Show More
@@ -1,1180 +1,1180 b'' | |||||
1 | # hgweb/hgweb_mod.py - Web interface for a repository. |
|
1 | # hgweb/hgweb_mod.py - Web interface for a repository. | |
2 | # |
|
2 | # | |
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> | |
4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
5 | # |
|
5 | # | |
6 | # This software may be used and distributed according to the terms |
|
6 | # This software may be used and distributed according to the terms | |
7 | # of the GNU General Public License, incorporated herein by reference. |
|
7 | # of the GNU General Public License, incorporated herein by reference. | |
8 |
|
8 | |||
9 | import os, mimetypes, re, zlib, mimetools, cStringIO, sys |
|
9 | import os, mimetypes, re, zlib, mimetools, cStringIO, sys | |
10 | import tempfile, urllib, bz2 |
|
10 | import tempfile, urllib, bz2 | |
11 | from mercurial.node import * |
|
11 | from mercurial.node import * | |
12 | from mercurial.i18n import gettext as _ |
|
12 | from mercurial.i18n import gettext as _ | |
13 | from mercurial import mdiff, ui, hg, util, archival, streamclone, patch |
|
13 | from mercurial import mdiff, ui, hg, util, archival, streamclone, patch | |
14 | from mercurial import revlog, templater |
|
14 | from mercurial import revlog, templater | |
15 | from common import get_mtime, staticfile, style_map, paritygen |
|
15 | from common import get_mtime, staticfile, style_map, paritygen | |
16 |
|
16 | |||
17 | def _up(p): |
|
17 | def _up(p): | |
18 | if p[0] != "/": |
|
18 | if p[0] != "/": | |
19 | p = "/" + p |
|
19 | p = "/" + p | |
20 | if p[-1] == "/": |
|
20 | if p[-1] == "/": | |
21 | p = p[:-1] |
|
21 | p = p[:-1] | |
22 | up = os.path.dirname(p) |
|
22 | up = os.path.dirname(p) | |
23 | if up == "/": |
|
23 | if up == "/": | |
24 | return "/" |
|
24 | return "/" | |
25 | return up + "/" |
|
25 | return up + "/" | |
26 |
|
26 | |||
27 | def revnavgen(pos, pagelen, limit, nodefunc): |
|
27 | def revnavgen(pos, pagelen, limit, nodefunc): | |
28 | def seq(factor, limit=None): |
|
28 | def seq(factor, limit=None): | |
29 | if limit: |
|
29 | if limit: | |
30 | yield limit |
|
30 | yield limit | |
31 | if limit >= 20 and limit <= 40: |
|
31 | if limit >= 20 and limit <= 40: | |
32 | yield 50 |
|
32 | yield 50 | |
33 | else: |
|
33 | else: | |
34 | yield 1 * factor |
|
34 | yield 1 * factor | |
35 | yield 3 * factor |
|
35 | yield 3 * factor | |
36 | for f in seq(factor * 10): |
|
36 | for f in seq(factor * 10): | |
37 | yield f |
|
37 | yield f | |
38 |
|
38 | |||
39 | def nav(**map): |
|
39 | def nav(**map): | |
40 | l = [] |
|
40 | l = [] | |
41 | last = 0 |
|
41 | last = 0 | |
42 | for f in seq(1, pagelen): |
|
42 | for f in seq(1, pagelen): | |
43 | if f < pagelen or f <= last: |
|
43 | if f < pagelen or f <= last: | |
44 | continue |
|
44 | continue | |
45 | if f > limit: |
|
45 | if f > limit: | |
46 | break |
|
46 | break | |
47 | last = f |
|
47 | last = f | |
48 | if pos + f < limit: |
|
48 | if pos + f < limit: | |
49 | l.append(("+%d" % f, hex(nodefunc(pos + f).node()))) |
|
49 | l.append(("+%d" % f, hex(nodefunc(pos + f).node()))) | |
50 | if pos - f >= 0: |
|
50 | if pos - f >= 0: | |
51 | l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node()))) |
|
51 | l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node()))) | |
52 |
|
52 | |||
53 | try: |
|
53 | try: | |
54 | yield {"label": "(0)", "node": hex(nodefunc('0').node())} |
|
54 | yield {"label": "(0)", "node": hex(nodefunc('0').node())} | |
55 |
|
55 | |||
56 | for label, node in l: |
|
56 | for label, node in l: | |
57 | yield {"label": label, "node": node} |
|
57 | yield {"label": label, "node": node} | |
58 |
|
58 | |||
59 | yield {"label": "tip", "node": "tip"} |
|
59 | yield {"label": "tip", "node": "tip"} | |
60 | except hg.RepoError: |
|
60 | except hg.RepoError: | |
61 | pass |
|
61 | pass | |
62 |
|
62 | |||
63 | return nav |
|
63 | return nav | |
64 |
|
64 | |||
65 | class hgweb(object): |
|
65 | class hgweb(object): | |
66 | def __init__(self, repo, name=None): |
|
66 | def __init__(self, repo, name=None): | |
67 | if type(repo) == type(""): |
|
67 | if type(repo) == type(""): | |
68 | self.repo = hg.repository(ui.ui(report_untrusted=False), repo) |
|
68 | self.repo = hg.repository(ui.ui(report_untrusted=False), repo) | |
69 | else: |
|
69 | else: | |
70 | self.repo = repo |
|
70 | self.repo = repo | |
71 |
|
71 | |||
72 | self.mtime = -1 |
|
72 | self.mtime = -1 | |
73 | self.reponame = name |
|
73 | self.reponame = name | |
74 | self.archives = 'zip', 'gz', 'bz2' |
|
74 | self.archives = 'zip', 'gz', 'bz2' | |
75 | self.stripecount = 1 |
|
75 | self.stripecount = 1 | |
76 | # a repo owner may set web.templates in .hg/hgrc to get any file |
|
76 | # a repo owner may set web.templates in .hg/hgrc to get any file | |
77 | # readable by the user running the CGI script |
|
77 | # readable by the user running the CGI script | |
78 | self.templatepath = self.config("web", "templates", |
|
78 | self.templatepath = self.config("web", "templates", | |
79 | templater.templatepath(), |
|
79 | templater.templatepath(), | |
80 | untrusted=False) |
|
80 | untrusted=False) | |
81 |
|
81 | |||
82 | # The CGI scripts are often run by a user different from the repo owner. |
|
82 | # The CGI scripts are often run by a user different from the repo owner. | |
83 | # Trust the settings from the .hg/hgrc files by default. |
|
83 | # Trust the settings from the .hg/hgrc files by default. | |
84 | def config(self, section, name, default=None, untrusted=True): |
|
84 | def config(self, section, name, default=None, untrusted=True): | |
85 | return self.repo.ui.config(section, name, default, |
|
85 | return self.repo.ui.config(section, name, default, | |
86 | untrusted=untrusted) |
|
86 | untrusted=untrusted) | |
87 |
|
87 | |||
88 | def configbool(self, section, name, default=False, untrusted=True): |
|
88 | def configbool(self, section, name, default=False, untrusted=True): | |
89 | return self.repo.ui.configbool(section, name, default, |
|
89 | return self.repo.ui.configbool(section, name, default, | |
90 | untrusted=untrusted) |
|
90 | untrusted=untrusted) | |
91 |
|
91 | |||
92 | def configlist(self, section, name, default=None, untrusted=True): |
|
92 | def configlist(self, section, name, default=None, untrusted=True): | |
93 | return self.repo.ui.configlist(section, name, default, |
|
93 | return self.repo.ui.configlist(section, name, default, | |
94 | untrusted=untrusted) |
|
94 | untrusted=untrusted) | |
95 |
|
95 | |||
96 | def refresh(self): |
|
96 | def refresh(self): | |
97 | mtime = get_mtime(self.repo.root) |
|
97 | mtime = get_mtime(self.repo.root) | |
98 | if mtime != self.mtime: |
|
98 | if mtime != self.mtime: | |
99 | self.mtime = mtime |
|
99 | self.mtime = mtime | |
100 | self.repo = hg.repository(self.repo.ui, self.repo.root) |
|
100 | self.repo = hg.repository(self.repo.ui, self.repo.root) | |
101 | self.maxchanges = int(self.config("web", "maxchanges", 10)) |
|
101 | self.maxchanges = int(self.config("web", "maxchanges", 10)) | |
102 | self.stripecount = int(self.config("web", "stripes", 1)) |
|
102 | self.stripecount = int(self.config("web", "stripes", 1)) | |
103 | self.maxshortchanges = int(self.config("web", "maxshortchanges", 60)) |
|
103 | self.maxshortchanges = int(self.config("web", "maxshortchanges", 60)) | |
104 | self.maxfiles = int(self.config("web", "maxfiles", 10)) |
|
104 | self.maxfiles = int(self.config("web", "maxfiles", 10)) | |
105 | self.allowpull = self.configbool("web", "allowpull", True) |
|
105 | self.allowpull = self.configbool("web", "allowpull", True) | |
106 | self.encoding = self.config("web", "encoding", util._encoding) |
|
106 | self.encoding = self.config("web", "encoding", util._encoding) | |
107 |
|
107 | |||
108 | def archivelist(self, nodeid): |
|
108 | def archivelist(self, nodeid): | |
109 | allowed = self.configlist("web", "allow_archive") |
|
109 | allowed = self.configlist("web", "allow_archive") | |
110 | for i, spec in self.archive_specs.iteritems(): |
|
110 | for i, spec in self.archive_specs.iteritems(): | |
111 | if i in allowed or self.configbool("web", "allow" + i): |
|
111 | if i in allowed or self.configbool("web", "allow" + i): | |
112 | yield {"type" : i, "extension" : spec[2], "node" : nodeid} |
|
112 | yield {"type" : i, "extension" : spec[2], "node" : nodeid} | |
113 |
|
113 | |||
114 | def listfilediffs(self, files, changeset): |
|
114 | def listfilediffs(self, files, changeset): | |
115 | for f in files[:self.maxfiles]: |
|
115 | for f in files[:self.maxfiles]: | |
116 | yield self.t("filedifflink", node=hex(changeset), file=f) |
|
116 | yield self.t("filedifflink", node=hex(changeset), file=f) | |
117 | if len(files) > self.maxfiles: |
|
117 | if len(files) > self.maxfiles: | |
118 | yield self.t("fileellipses") |
|
118 | yield self.t("fileellipses") | |
119 |
|
119 | |||
120 | def siblings(self, siblings=[], hiderev=None, **args): |
|
120 | def siblings(self, siblings=[], hiderev=None, **args): | |
121 | siblings = [s for s in siblings if s.node() != nullid] |
|
121 | siblings = [s for s in siblings if s.node() != nullid] | |
122 | if len(siblings) == 1 and siblings[0].rev() == hiderev: |
|
122 | if len(siblings) == 1 and siblings[0].rev() == hiderev: | |
123 | return |
|
123 | return | |
124 | for s in siblings: |
|
124 | for s in siblings: | |
125 | d = {'node': hex(s.node()), 'rev': s.rev()} |
|
125 | d = {'node': hex(s.node()), 'rev': s.rev()} | |
126 | if hasattr(s, 'path'): |
|
126 | if hasattr(s, 'path'): | |
127 | d['file'] = s.path() |
|
127 | d['file'] = s.path() | |
128 | d.update(args) |
|
128 | d.update(args) | |
129 | yield d |
|
129 | yield d | |
130 |
|
130 | |||
131 | def renamelink(self, fl, node): |
|
131 | def renamelink(self, fl, node): | |
132 | r = fl.renamed(node) |
|
132 | r = fl.renamed(node) | |
133 | if r: |
|
133 | if r: | |
134 | return [dict(file=r[0], node=hex(r[1]))] |
|
134 | return [dict(file=r[0], node=hex(r[1]))] | |
135 | return [] |
|
135 | return [] | |
136 |
|
136 | |||
137 | def nodetagsdict(self, node): |
|
137 | def nodetagsdict(self, node): | |
138 | return [{"name": i} for i in self.repo.nodetags(node)] |
|
138 | return [{"name": i} for i in self.repo.nodetags(node)] | |
139 |
|
139 | |||
140 | def nodebranchdict(self, ctx): |
|
140 | def nodebranchdict(self, ctx): | |
141 | branches = [] |
|
141 | branches = [] | |
142 | branch = ctx.branch() |
|
142 | branch = ctx.branch() | |
143 | if self.repo.branchtags()[branch] == ctx.node(): |
|
143 | if self.repo.branchtags()[branch] == ctx.node(): | |
144 | branches.append({"name": branch}) |
|
144 | branches.append({"name": branch}) | |
145 | return branches |
|
145 | return branches | |
146 |
|
146 | |||
147 | def showtag(self, t1, node=nullid, **args): |
|
147 | def showtag(self, t1, node=nullid, **args): | |
148 | for t in self.repo.nodetags(node): |
|
148 | for t in self.repo.nodetags(node): | |
149 | yield self.t(t1, tag=t, **args) |
|
149 | yield self.t(t1, tag=t, **args) | |
150 |
|
150 | |||
151 | def diff(self, node1, node2, files): |
|
151 | def diff(self, node1, node2, files): | |
152 | def filterfiles(filters, files): |
|
152 | def filterfiles(filters, files): | |
153 | l = [x for x in files if x in filters] |
|
153 | l = [x for x in files if x in filters] | |
154 |
|
154 | |||
155 | for t in filters: |
|
155 | for t in filters: | |
156 | if t and t[-1] != os.sep: |
|
156 | if t and t[-1] != os.sep: | |
157 | t += os.sep |
|
157 | t += os.sep | |
158 | l += [x for x in files if x.startswith(t)] |
|
158 | l += [x for x in files if x.startswith(t)] | |
159 | return l |
|
159 | return l | |
160 |
|
160 | |||
161 | parity = paritygen(self.stripecount) |
|
161 | parity = paritygen(self.stripecount) | |
162 | def diffblock(diff, f, fn): |
|
162 | def diffblock(diff, f, fn): | |
163 | yield self.t("diffblock", |
|
163 | yield self.t("diffblock", | |
164 | lines=prettyprintlines(diff), |
|
164 | lines=prettyprintlines(diff), | |
165 | parity=parity.next(), |
|
165 | parity=parity.next(), | |
166 | file=f, |
|
166 | file=f, | |
167 | filenode=hex(fn or nullid)) |
|
167 | filenode=hex(fn or nullid)) | |
168 |
|
168 | |||
169 | def prettyprintlines(diff): |
|
169 | def prettyprintlines(diff): | |
170 | for l in diff.splitlines(1): |
|
170 | for l in diff.splitlines(1): | |
171 | if l.startswith('+'): |
|
171 | if l.startswith('+'): | |
172 | yield self.t("difflineplus", line=l) |
|
172 | yield self.t("difflineplus", line=l) | |
173 | elif l.startswith('-'): |
|
173 | elif l.startswith('-'): | |
174 | yield self.t("difflineminus", line=l) |
|
174 | yield self.t("difflineminus", line=l) | |
175 | elif l.startswith('@'): |
|
175 | elif l.startswith('@'): | |
176 | yield self.t("difflineat", line=l) |
|
176 | yield self.t("difflineat", line=l) | |
177 | else: |
|
177 | else: | |
178 | yield self.t("diffline", line=l) |
|
178 | yield self.t("diffline", line=l) | |
179 |
|
179 | |||
180 | r = self.repo |
|
180 | r = self.repo | |
181 | c1 = r.changectx(node1) |
|
181 | c1 = r.changectx(node1) | |
182 | c2 = r.changectx(node2) |
|
182 | c2 = r.changectx(node2) | |
183 | date1 = util.datestr(c1.date()) |
|
183 | date1 = util.datestr(c1.date()) | |
184 | date2 = util.datestr(c2.date()) |
|
184 | date2 = util.datestr(c2.date()) | |
185 |
|
185 | |||
186 | modified, added, removed, deleted, unknown = r.status(node1, node2)[:5] |
|
186 | modified, added, removed, deleted, unknown = r.status(node1, node2)[:5] | |
187 | if files: |
|
187 | if files: | |
188 | modified, added, removed = map(lambda x: filterfiles(files, x), |
|
188 | modified, added, removed = map(lambda x: filterfiles(files, x), | |
189 | (modified, added, removed)) |
|
189 | (modified, added, removed)) | |
190 |
|
190 | |||
191 | diffopts = patch.diffopts(self.repo.ui, untrusted=True) |
|
191 | diffopts = patch.diffopts(self.repo.ui, untrusted=True) | |
192 | for f in modified: |
|
192 | for f in modified: | |
193 | to = c1.filectx(f).data() |
|
193 | to = c1.filectx(f).data() | |
194 | tn = c2.filectx(f).data() |
|
194 | tn = c2.filectx(f).data() | |
195 | yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, |
|
195 | yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, | |
196 | opts=diffopts), f, tn) |
|
196 | opts=diffopts), f, tn) | |
197 | for f in added: |
|
197 | for f in added: | |
198 | to = None |
|
198 | to = None | |
199 | tn = c2.filectx(f).data() |
|
199 | tn = c2.filectx(f).data() | |
200 | yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, |
|
200 | yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, | |
201 | opts=diffopts), f, tn) |
|
201 | opts=diffopts), f, tn) | |
202 | for f in removed: |
|
202 | for f in removed: | |
203 | to = c1.filectx(f).data() |
|
203 | to = c1.filectx(f).data() | |
204 | tn = None |
|
204 | tn = None | |
205 | yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, |
|
205 | yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, | |
206 | opts=diffopts), f, tn) |
|
206 | opts=diffopts), f, tn) | |
207 |
|
207 | |||
208 | def changelog(self, ctx, shortlog=False): |
|
208 | def changelog(self, ctx, shortlog=False): | |
209 | def changelist(**map): |
|
209 | def changelist(**map): | |
210 | cl = self.repo.changelog |
|
210 | cl = self.repo.changelog | |
211 | l = [] # build a list in forward order for efficiency |
|
211 | l = [] # build a list in forward order for efficiency | |
212 | for i in xrange(start, end): |
|
212 | for i in xrange(start, end): | |
213 | ctx = self.repo.changectx(i) |
|
213 | ctx = self.repo.changectx(i) | |
214 | n = ctx.node() |
|
214 | n = ctx.node() | |
215 |
|
215 | |||
216 | l.insert(0, {"parity": parity.next(), |
|
216 | l.insert(0, {"parity": parity.next(), | |
217 | "author": ctx.user(), |
|
217 | "author": ctx.user(), | |
218 | "parent": self.siblings(ctx.parents(), i - 1), |
|
218 | "parent": self.siblings(ctx.parents(), i - 1), | |
219 | "child": self.siblings(ctx.children(), i + 1), |
|
219 | "child": self.siblings(ctx.children(), i + 1), | |
220 | "changelogtag": self.showtag("changelogtag",n), |
|
220 | "changelogtag": self.showtag("changelogtag",n), | |
221 | "desc": ctx.description(), |
|
221 | "desc": ctx.description(), | |
222 | "date": ctx.date(), |
|
222 | "date": ctx.date(), | |
223 | "files": self.listfilediffs(ctx.files(), n), |
|
223 | "files": self.listfilediffs(ctx.files(), n), | |
224 | "rev": i, |
|
224 | "rev": i, | |
225 | "node": hex(n), |
|
225 | "node": hex(n), | |
226 | "tags": self.nodetagsdict(n), |
|
226 | "tags": self.nodetagsdict(n), | |
227 | "branches": self.nodebranchdict(ctx)}) |
|
227 | "branches": self.nodebranchdict(ctx)}) | |
228 |
|
228 | |||
229 | for e in l: |
|
229 | for e in l: | |
230 | yield e |
|
230 | yield e | |
231 |
|
231 | |||
232 | maxchanges = shortlog and self.maxshortchanges or self.maxchanges |
|
232 | maxchanges = shortlog and self.maxshortchanges or self.maxchanges | |
233 | cl = self.repo.changelog |
|
233 | cl = self.repo.changelog | |
234 | count = cl.count() |
|
234 | count = cl.count() | |
235 | pos = ctx.rev() |
|
235 | pos = ctx.rev() | |
236 | start = max(0, pos - maxchanges + 1) |
|
236 | start = max(0, pos - maxchanges + 1) | |
237 | end = min(count, start + maxchanges) |
|
237 | end = min(count, start + maxchanges) | |
238 | pos = end - 1 |
|
238 | pos = end - 1 | |
239 | parity = paritygen(self.stripecount, offset=start-end) |
|
239 | parity = paritygen(self.stripecount, offset=start-end) | |
240 |
|
240 | |||
241 | changenav = revnavgen(pos, maxchanges, count, self.repo.changectx) |
|
241 | changenav = revnavgen(pos, maxchanges, count, self.repo.changectx) | |
242 |
|
242 | |||
243 | yield self.t(shortlog and 'shortlog' or 'changelog', |
|
243 | yield self.t(shortlog and 'shortlog' or 'changelog', | |
244 | changenav=changenav, |
|
244 | changenav=changenav, | |
245 | node=hex(cl.tip()), |
|
245 | node=hex(cl.tip()), | |
246 | rev=pos, changesets=count, entries=changelist, |
|
246 | rev=pos, changesets=count, entries=changelist, | |
247 | archives=self.archivelist("tip")) |
|
247 | archives=self.archivelist("tip")) | |
248 |
|
248 | |||
249 | def search(self, query): |
|
249 | def search(self, query): | |
250 |
|
250 | |||
251 | def changelist(**map): |
|
251 | def changelist(**map): | |
252 | cl = self.repo.changelog |
|
252 | cl = self.repo.changelog | |
253 | count = 0 |
|
253 | count = 0 | |
254 | qw = query.lower().split() |
|
254 | qw = query.lower().split() | |
255 |
|
255 | |||
256 | def revgen(): |
|
256 | def revgen(): | |
257 | for i in xrange(cl.count() - 1, 0, -100): |
|
257 | for i in xrange(cl.count() - 1, 0, -100): | |
258 | l = [] |
|
258 | l = [] | |
259 | for j in xrange(max(0, i - 100), i): |
|
259 | for j in xrange(max(0, i - 100), i): | |
260 | ctx = self.repo.changectx(j) |
|
260 | ctx = self.repo.changectx(j) | |
261 | l.append(ctx) |
|
261 | l.append(ctx) | |
262 | l.reverse() |
|
262 | l.reverse() | |
263 | for e in l: |
|
263 | for e in l: | |
264 | yield e |
|
264 | yield e | |
265 |
|
265 | |||
266 | for ctx in revgen(): |
|
266 | for ctx in revgen(): | |
267 | miss = 0 |
|
267 | miss = 0 | |
268 | for q in qw: |
|
268 | for q in qw: | |
269 | if not (q in ctx.user().lower() or |
|
269 | if not (q in ctx.user().lower() or | |
270 | q in ctx.description().lower() or |
|
270 | q in ctx.description().lower() or | |
271 | q in " ".join(ctx.files()).lower()): |
|
271 | q in " ".join(ctx.files()).lower()): | |
272 | miss = 1 |
|
272 | miss = 1 | |
273 | break |
|
273 | break | |
274 | if miss: |
|
274 | if miss: | |
275 | continue |
|
275 | continue | |
276 |
|
276 | |||
277 | count += 1 |
|
277 | count += 1 | |
278 | n = ctx.node() |
|
278 | n = ctx.node() | |
279 |
|
279 | |||
280 | yield self.t('searchentry', |
|
280 | yield self.t('searchentry', | |
281 | parity=parity.next(), |
|
281 | parity=parity.next(), | |
282 | author=ctx.user(), |
|
282 | author=ctx.user(), | |
283 | parent=self.siblings(ctx.parents()), |
|
283 | parent=self.siblings(ctx.parents()), | |
284 | child=self.siblings(ctx.children()), |
|
284 | child=self.siblings(ctx.children()), | |
285 | changelogtag=self.showtag("changelogtag",n), |
|
285 | changelogtag=self.showtag("changelogtag",n), | |
286 | desc=ctx.description(), |
|
286 | desc=ctx.description(), | |
287 | date=ctx.date(), |
|
287 | date=ctx.date(), | |
288 | files=self.listfilediffs(ctx.files(), n), |
|
288 | files=self.listfilediffs(ctx.files(), n), | |
289 | rev=ctx.rev(), |
|
289 | rev=ctx.rev(), | |
290 | node=hex(n), |
|
290 | node=hex(n), | |
291 | tags=self.nodetagsdict(n), |
|
291 | tags=self.nodetagsdict(n), | |
292 | branches=self.nodebranchdict(ctx)) |
|
292 | branches=self.nodebranchdict(ctx)) | |
293 |
|
293 | |||
294 | if count >= self.maxchanges: |
|
294 | if count >= self.maxchanges: | |
295 | break |
|
295 | break | |
296 |
|
296 | |||
297 | cl = self.repo.changelog |
|
297 | cl = self.repo.changelog | |
298 | parity = paritygen(self.stripecount) |
|
298 | parity = paritygen(self.stripecount) | |
299 |
|
299 | |||
300 | yield self.t('search', |
|
300 | yield self.t('search', | |
301 | query=query, |
|
301 | query=query, | |
302 | node=hex(cl.tip()), |
|
302 | node=hex(cl.tip()), | |
303 | entries=changelist, |
|
303 | entries=changelist, | |
304 | archives=self.archivelist("tip")) |
|
304 | archives=self.archivelist("tip")) | |
305 |
|
305 | |||
306 | def changeset(self, ctx): |
|
306 | def changeset(self, ctx): | |
307 | n = ctx.node() |
|
307 | n = ctx.node() | |
308 | parents = ctx.parents() |
|
308 | parents = ctx.parents() | |
309 | p1 = parents[0].node() |
|
309 | p1 = parents[0].node() | |
310 |
|
310 | |||
311 | files = [] |
|
311 | files = [] | |
312 | parity = paritygen(self.stripecount) |
|
312 | parity = paritygen(self.stripecount) | |
313 | for f in ctx.files(): |
|
313 | for f in ctx.files(): | |
314 | files.append(self.t("filenodelink", |
|
314 | files.append(self.t("filenodelink", | |
315 | node=hex(n), file=f, |
|
315 | node=hex(n), file=f, | |
316 | parity=parity.next())) |
|
316 | parity=parity.next())) | |
317 |
|
317 | |||
318 | def diff(**map): |
|
318 | def diff(**map): | |
319 | yield self.diff(p1, n, None) |
|
319 | yield self.diff(p1, n, None) | |
320 |
|
320 | |||
321 | yield self.t('changeset', |
|
321 | yield self.t('changeset', | |
322 | diff=diff, |
|
322 | diff=diff, | |
323 | rev=ctx.rev(), |
|
323 | rev=ctx.rev(), | |
324 | node=hex(n), |
|
324 | node=hex(n), | |
325 | parent=self.siblings(parents), |
|
325 | parent=self.siblings(parents), | |
326 | child=self.siblings(ctx.children()), |
|
326 | child=self.siblings(ctx.children()), | |
327 | changesettag=self.showtag("changesettag",n), |
|
327 | changesettag=self.showtag("changesettag",n), | |
328 | author=ctx.user(), |
|
328 | author=ctx.user(), | |
329 | desc=ctx.description(), |
|
329 | desc=ctx.description(), | |
330 | date=ctx.date(), |
|
330 | date=ctx.date(), | |
331 | files=files, |
|
331 | files=files, | |
332 | archives=self.archivelist(hex(n)), |
|
332 | archives=self.archivelist(hex(n)), | |
333 | tags=self.nodetagsdict(n), |
|
333 | tags=self.nodetagsdict(n), | |
334 | branches=self.nodebranchdict(ctx)) |
|
334 | branches=self.nodebranchdict(ctx)) | |
335 |
|
335 | |||
336 | def filelog(self, fctx): |
|
336 | def filelog(self, fctx): | |
337 | f = fctx.path() |
|
337 | f = fctx.path() | |
338 | fl = fctx.filelog() |
|
338 | fl = fctx.filelog() | |
339 | count = fl.count() |
|
339 | count = fl.count() | |
340 | pagelen = self.maxshortchanges |
|
340 | pagelen = self.maxshortchanges | |
341 | pos = fctx.filerev() |
|
341 | pos = fctx.filerev() | |
342 | start = max(0, pos - pagelen + 1) |
|
342 | start = max(0, pos - pagelen + 1) | |
343 | end = min(count, start + pagelen) |
|
343 | end = min(count, start + pagelen) | |
344 | pos = end - 1 |
|
344 | pos = end - 1 | |
345 | parity = paritygen(self.stripecount, offset=start-end) |
|
345 | parity = paritygen(self.stripecount, offset=start-end) | |
346 |
|
346 | |||
347 | def entries(**map): |
|
347 | def entries(**map): | |
348 | l = [] |
|
348 | l = [] | |
349 |
|
349 | |||
350 | for i in xrange(start, end): |
|
350 | for i in xrange(start, end): | |
351 | ctx = fctx.filectx(i) |
|
351 | ctx = fctx.filectx(i) | |
352 | n = fl.node(i) |
|
352 | n = fl.node(i) | |
353 |
|
353 | |||
354 | l.insert(0, {"parity": parity.next(), |
|
354 | l.insert(0, {"parity": parity.next(), | |
355 | "filerev": i, |
|
355 | "filerev": i, | |
356 | "file": f, |
|
356 | "file": f, | |
357 | "node": hex(ctx.node()), |
|
357 | "node": hex(ctx.node()), | |
358 | "author": ctx.user(), |
|
358 | "author": ctx.user(), | |
359 | "date": ctx.date(), |
|
359 | "date": ctx.date(), | |
360 | "rename": self.renamelink(fl, n), |
|
360 | "rename": self.renamelink(fl, n), | |
361 | "parent": self.siblings(fctx.parents()), |
|
361 | "parent": self.siblings(fctx.parents()), | |
362 | "child": self.siblings(fctx.children()), |
|
362 | "child": self.siblings(fctx.children()), | |
363 | "desc": ctx.description()}) |
|
363 | "desc": ctx.description()}) | |
364 |
|
364 | |||
365 | for e in l: |
|
365 | for e in l: | |
366 | yield e |
|
366 | yield e | |
367 |
|
367 | |||
368 | nodefunc = lambda x: fctx.filectx(fileid=x) |
|
368 | nodefunc = lambda x: fctx.filectx(fileid=x) | |
369 | nav = revnavgen(pos, pagelen, count, nodefunc) |
|
369 | nav = revnavgen(pos, pagelen, count, nodefunc) | |
370 | yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav, |
|
370 | yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav, | |
371 | entries=entries) |
|
371 | entries=entries) | |
372 |
|
372 | |||
373 | def filerevision(self, fctx): |
|
373 | def filerevision(self, fctx): | |
374 | f = fctx.path() |
|
374 | f = fctx.path() | |
375 | text = fctx.data() |
|
375 | text = fctx.data() | |
376 | fl = fctx.filelog() |
|
376 | fl = fctx.filelog() | |
377 | n = fctx.filenode() |
|
377 | n = fctx.filenode() | |
378 | parity = paritygen(self.stripecount) |
|
378 | parity = paritygen(self.stripecount) | |
379 |
|
379 | |||
380 | mt = mimetypes.guess_type(f)[0] |
|
380 | mt = mimetypes.guess_type(f)[0] | |
381 | rawtext = text |
|
381 | rawtext = text | |
382 | if util.binary(text): |
|
382 | if util.binary(text): | |
383 | mt = mt or 'application/octet-stream' |
|
383 | mt = mt or 'application/octet-stream' | |
384 | text = "(binary:%s)" % mt |
|
384 | text = "(binary:%s)" % mt | |
385 | mt = mt or 'text/plain' |
|
385 | mt = mt or 'text/plain' | |
386 |
|
386 | |||
387 | def lines(): |
|
387 | def lines(): | |
388 | for l, t in enumerate(text.splitlines(1)): |
|
388 | for l, t in enumerate(text.splitlines(1)): | |
389 | yield {"line": t, |
|
389 | yield {"line": t, | |
390 | "linenumber": "% 6d" % (l + 1), |
|
390 | "linenumber": "% 6d" % (l + 1), | |
391 | "parity": parity.next()} |
|
391 | "parity": parity.next()} | |
392 |
|
392 | |||
393 | yield self.t("filerevision", |
|
393 | yield self.t("filerevision", | |
394 | file=f, |
|
394 | file=f, | |
395 | path=_up(f), |
|
395 | path=_up(f), | |
396 | text=lines(), |
|
396 | text=lines(), | |
397 | raw=rawtext, |
|
397 | raw=rawtext, | |
398 | mimetype=mt, |
|
398 | mimetype=mt, | |
399 | rev=fctx.rev(), |
|
399 | rev=fctx.rev(), | |
400 | node=hex(fctx.node()), |
|
400 | node=hex(fctx.node()), | |
401 | author=fctx.user(), |
|
401 | author=fctx.user(), | |
402 | date=fctx.date(), |
|
402 | date=fctx.date(), | |
403 | desc=fctx.description(), |
|
403 | desc=fctx.description(), | |
404 | parent=self.siblings(fctx.parents()), |
|
404 | parent=self.siblings(fctx.parents()), | |
405 | child=self.siblings(fctx.children()), |
|
405 | child=self.siblings(fctx.children()), | |
406 | rename=self.renamelink(fl, n), |
|
406 | rename=self.renamelink(fl, n), | |
407 |
permissions=fctx.manifest(). |
|
407 | permissions=fctx.manifest().flags(f)) | |
408 |
|
408 | |||
409 | def fileannotate(self, fctx): |
|
409 | def fileannotate(self, fctx): | |
410 | f = fctx.path() |
|
410 | f = fctx.path() | |
411 | n = fctx.filenode() |
|
411 | n = fctx.filenode() | |
412 | fl = fctx.filelog() |
|
412 | fl = fctx.filelog() | |
413 | parity = paritygen(self.stripecount) |
|
413 | parity = paritygen(self.stripecount) | |
414 |
|
414 | |||
415 | def annotate(**map): |
|
415 | def annotate(**map): | |
416 | last = None |
|
416 | last = None | |
417 | for f, l in fctx.annotate(follow=True): |
|
417 | for f, l in fctx.annotate(follow=True): | |
418 | fnode = f.filenode() |
|
418 | fnode = f.filenode() | |
419 | name = self.repo.ui.shortuser(f.user()) |
|
419 | name = self.repo.ui.shortuser(f.user()) | |
420 |
|
420 | |||
421 | if last != fnode: |
|
421 | if last != fnode: | |
422 | last = fnode |
|
422 | last = fnode | |
423 |
|
423 | |||
424 | yield {"parity": parity.next(), |
|
424 | yield {"parity": parity.next(), | |
425 | "node": hex(f.node()), |
|
425 | "node": hex(f.node()), | |
426 | "rev": f.rev(), |
|
426 | "rev": f.rev(), | |
427 | "author": name, |
|
427 | "author": name, | |
428 | "file": f.path(), |
|
428 | "file": f.path(), | |
429 | "line": l} |
|
429 | "line": l} | |
430 |
|
430 | |||
431 | yield self.t("fileannotate", |
|
431 | yield self.t("fileannotate", | |
432 | file=f, |
|
432 | file=f, | |
433 | annotate=annotate, |
|
433 | annotate=annotate, | |
434 | path=_up(f), |
|
434 | path=_up(f), | |
435 | rev=fctx.rev(), |
|
435 | rev=fctx.rev(), | |
436 | node=hex(fctx.node()), |
|
436 | node=hex(fctx.node()), | |
437 | author=fctx.user(), |
|
437 | author=fctx.user(), | |
438 | date=fctx.date(), |
|
438 | date=fctx.date(), | |
439 | desc=fctx.description(), |
|
439 | desc=fctx.description(), | |
440 | rename=self.renamelink(fl, n), |
|
440 | rename=self.renamelink(fl, n), | |
441 | parent=self.siblings(fctx.parents()), |
|
441 | parent=self.siblings(fctx.parents()), | |
442 | child=self.siblings(fctx.children()), |
|
442 | child=self.siblings(fctx.children()), | |
443 |
permissions=fctx.manifest(). |
|
443 | permissions=fctx.manifest().flags(f)) | |
444 |
|
444 | |||
445 | def manifest(self, ctx, path): |
|
445 | def manifest(self, ctx, path): | |
446 | mf = ctx.manifest() |
|
446 | mf = ctx.manifest() | |
447 | node = ctx.node() |
|
447 | node = ctx.node() | |
448 |
|
448 | |||
449 | files = {} |
|
449 | files = {} | |
450 | parity = paritygen(self.stripecount) |
|
450 | parity = paritygen(self.stripecount) | |
451 |
|
451 | |||
452 | if path and path[-1] != "/": |
|
452 | if path and path[-1] != "/": | |
453 | path += "/" |
|
453 | path += "/" | |
454 | l = len(path) |
|
454 | l = len(path) | |
455 | abspath = "/" + path |
|
455 | abspath = "/" + path | |
456 |
|
456 | |||
457 | for f, n in mf.items(): |
|
457 | for f, n in mf.items(): | |
458 | if f[:l] != path: |
|
458 | if f[:l] != path: | |
459 | continue |
|
459 | continue | |
460 | remain = f[l:] |
|
460 | remain = f[l:] | |
461 | if "/" in remain: |
|
461 | if "/" in remain: | |
462 | short = remain[:remain.index("/") + 1] # bleah |
|
462 | short = remain[:remain.index("/") + 1] # bleah | |
463 | files[short] = (f, None) |
|
463 | files[short] = (f, None) | |
464 | else: |
|
464 | else: | |
465 | short = os.path.basename(remain) |
|
465 | short = os.path.basename(remain) | |
466 | files[short] = (f, n) |
|
466 | files[short] = (f, n) | |
467 |
|
467 | |||
468 | def filelist(**map): |
|
468 | def filelist(**map): | |
469 | fl = files.keys() |
|
469 | fl = files.keys() | |
470 | fl.sort() |
|
470 | fl.sort() | |
471 | for f in fl: |
|
471 | for f in fl: | |
472 | full, fnode = files[f] |
|
472 | full, fnode = files[f] | |
473 | if not fnode: |
|
473 | if not fnode: | |
474 | continue |
|
474 | continue | |
475 |
|
475 | |||
476 | yield {"file": full, |
|
476 | yield {"file": full, | |
477 | "parity": parity.next(), |
|
477 | "parity": parity.next(), | |
478 | "basename": f, |
|
478 | "basename": f, | |
479 | "size": ctx.filectx(full).size(), |
|
479 | "size": ctx.filectx(full).size(), | |
480 |
"permissions": mf. |
|
480 | "permissions": mf.flags(full)} | |
481 |
|
481 | |||
482 | def dirlist(**map): |
|
482 | def dirlist(**map): | |
483 | fl = files.keys() |
|
483 | fl = files.keys() | |
484 | fl.sort() |
|
484 | fl.sort() | |
485 | for f in fl: |
|
485 | for f in fl: | |
486 | full, fnode = files[f] |
|
486 | full, fnode = files[f] | |
487 | if fnode: |
|
487 | if fnode: | |
488 | continue |
|
488 | continue | |
489 |
|
489 | |||
490 | yield {"parity": parity.next(), |
|
490 | yield {"parity": parity.next(), | |
491 | "path": os.path.join(abspath, f), |
|
491 | "path": os.path.join(abspath, f), | |
492 | "basename": f[:-1]} |
|
492 | "basename": f[:-1]} | |
493 |
|
493 | |||
494 | yield self.t("manifest", |
|
494 | yield self.t("manifest", | |
495 | rev=ctx.rev(), |
|
495 | rev=ctx.rev(), | |
496 | node=hex(node), |
|
496 | node=hex(node), | |
497 | path=abspath, |
|
497 | path=abspath, | |
498 | up=_up(abspath), |
|
498 | up=_up(abspath), | |
499 | upparity=parity.next(), |
|
499 | upparity=parity.next(), | |
500 | fentries=filelist, |
|
500 | fentries=filelist, | |
501 | dentries=dirlist, |
|
501 | dentries=dirlist, | |
502 | archives=self.archivelist(hex(node)), |
|
502 | archives=self.archivelist(hex(node)), | |
503 | tags=self.nodetagsdict(node), |
|
503 | tags=self.nodetagsdict(node), | |
504 | branches=self.nodebranchdict(ctx)) |
|
504 | branches=self.nodebranchdict(ctx)) | |
505 |
|
505 | |||
506 | def tags(self): |
|
506 | def tags(self): | |
507 | i = self.repo.tagslist() |
|
507 | i = self.repo.tagslist() | |
508 | i.reverse() |
|
508 | i.reverse() | |
509 | parity = paritygen(self.stripecount) |
|
509 | parity = paritygen(self.stripecount) | |
510 |
|
510 | |||
511 | def entries(notip=False, **map): |
|
511 | def entries(notip=False, **map): | |
512 | for k, n in i: |
|
512 | for k, n in i: | |
513 | if notip and k == "tip": |
|
513 | if notip and k == "tip": | |
514 | continue |
|
514 | continue | |
515 | yield {"parity": parity.next(), |
|
515 | yield {"parity": parity.next(), | |
516 | "tag": k, |
|
516 | "tag": k, | |
517 | "date": self.repo.changectx(n).date(), |
|
517 | "date": self.repo.changectx(n).date(), | |
518 | "node": hex(n)} |
|
518 | "node": hex(n)} | |
519 |
|
519 | |||
520 | yield self.t("tags", |
|
520 | yield self.t("tags", | |
521 | node=hex(self.repo.changelog.tip()), |
|
521 | node=hex(self.repo.changelog.tip()), | |
522 | entries=lambda **x: entries(False, **x), |
|
522 | entries=lambda **x: entries(False, **x), | |
523 | entriesnotip=lambda **x: entries(True, **x)) |
|
523 | entriesnotip=lambda **x: entries(True, **x)) | |
524 |
|
524 | |||
525 | def summary(self): |
|
525 | def summary(self): | |
526 | i = self.repo.tagslist() |
|
526 | i = self.repo.tagslist() | |
527 | i.reverse() |
|
527 | i.reverse() | |
528 |
|
528 | |||
529 | def tagentries(**map): |
|
529 | def tagentries(**map): | |
530 | parity = paritygen(self.stripecount) |
|
530 | parity = paritygen(self.stripecount) | |
531 | count = 0 |
|
531 | count = 0 | |
532 | for k, n in i: |
|
532 | for k, n in i: | |
533 | if k == "tip": # skip tip |
|
533 | if k == "tip": # skip tip | |
534 | continue; |
|
534 | continue; | |
535 |
|
535 | |||
536 | count += 1 |
|
536 | count += 1 | |
537 | if count > 10: # limit to 10 tags |
|
537 | if count > 10: # limit to 10 tags | |
538 | break; |
|
538 | break; | |
539 |
|
539 | |||
540 | yield self.t("tagentry", |
|
540 | yield self.t("tagentry", | |
541 | parity=parity.next(), |
|
541 | parity=parity.next(), | |
542 | tag=k, |
|
542 | tag=k, | |
543 | node=hex(n), |
|
543 | node=hex(n), | |
544 | date=self.repo.changectx(n).date()) |
|
544 | date=self.repo.changectx(n).date()) | |
545 |
|
545 | |||
546 |
|
546 | |||
547 | def branches(**map): |
|
547 | def branches(**map): | |
548 | parity = paritygen(self.stripecount) |
|
548 | parity = paritygen(self.stripecount) | |
549 |
|
549 | |||
550 | b = self.repo.branchtags() |
|
550 | b = self.repo.branchtags() | |
551 | l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()] |
|
551 | l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()] | |
552 | l.sort() |
|
552 | l.sort() | |
553 |
|
553 | |||
554 | for r,n,t in l: |
|
554 | for r,n,t in l: | |
555 | ctx = self.repo.changectx(n) |
|
555 | ctx = self.repo.changectx(n) | |
556 |
|
556 | |||
557 | yield {'parity': parity.next(), |
|
557 | yield {'parity': parity.next(), | |
558 | 'branch': t, |
|
558 | 'branch': t, | |
559 | 'node': hex(n), |
|
559 | 'node': hex(n), | |
560 | 'date': ctx.date()} |
|
560 | 'date': ctx.date()} | |
561 |
|
561 | |||
562 | def changelist(**map): |
|
562 | def changelist(**map): | |
563 | parity = paritygen(self.stripecount, offset=start-end) |
|
563 | parity = paritygen(self.stripecount, offset=start-end) | |
564 | l = [] # build a list in forward order for efficiency |
|
564 | l = [] # build a list in forward order for efficiency | |
565 | for i in xrange(start, end): |
|
565 | for i in xrange(start, end): | |
566 | ctx = self.repo.changectx(i) |
|
566 | ctx = self.repo.changectx(i) | |
567 | n = ctx.node() |
|
567 | n = ctx.node() | |
568 | hn = hex(n) |
|
568 | hn = hex(n) | |
569 |
|
569 | |||
570 | l.insert(0, self.t( |
|
570 | l.insert(0, self.t( | |
571 | 'shortlogentry', |
|
571 | 'shortlogentry', | |
572 | parity=parity.next(), |
|
572 | parity=parity.next(), | |
573 | author=ctx.user(), |
|
573 | author=ctx.user(), | |
574 | desc=ctx.description(), |
|
574 | desc=ctx.description(), | |
575 | date=ctx.date(), |
|
575 | date=ctx.date(), | |
576 | rev=i, |
|
576 | rev=i, | |
577 | node=hn, |
|
577 | node=hn, | |
578 | tags=self.nodetagsdict(n), |
|
578 | tags=self.nodetagsdict(n), | |
579 | branches=self.nodebranchdict(ctx))) |
|
579 | branches=self.nodebranchdict(ctx))) | |
580 |
|
580 | |||
581 | yield l |
|
581 | yield l | |
582 |
|
582 | |||
583 | cl = self.repo.changelog |
|
583 | cl = self.repo.changelog | |
584 | count = cl.count() |
|
584 | count = cl.count() | |
585 | start = max(0, count - self.maxchanges) |
|
585 | start = max(0, count - self.maxchanges) | |
586 | end = min(count, start + self.maxchanges) |
|
586 | end = min(count, start + self.maxchanges) | |
587 |
|
587 | |||
588 | yield self.t("summary", |
|
588 | yield self.t("summary", | |
589 | desc=self.config("web", "description", "unknown"), |
|
589 | desc=self.config("web", "description", "unknown"), | |
590 | owner=(self.config("ui", "username") or # preferred |
|
590 | owner=(self.config("ui", "username") or # preferred | |
591 | self.config("web", "contact") or # deprecated |
|
591 | self.config("web", "contact") or # deprecated | |
592 | self.config("web", "author", "unknown")), # also |
|
592 | self.config("web", "author", "unknown")), # also | |
593 | lastchange=cl.read(cl.tip())[2], |
|
593 | lastchange=cl.read(cl.tip())[2], | |
594 | tags=tagentries, |
|
594 | tags=tagentries, | |
595 | branches=branches, |
|
595 | branches=branches, | |
596 | shortlog=changelist, |
|
596 | shortlog=changelist, | |
597 | node=hex(cl.tip()), |
|
597 | node=hex(cl.tip()), | |
598 | archives=self.archivelist("tip")) |
|
598 | archives=self.archivelist("tip")) | |
599 |
|
599 | |||
600 | def filediff(self, fctx): |
|
600 | def filediff(self, fctx): | |
601 | n = fctx.node() |
|
601 | n = fctx.node() | |
602 | path = fctx.path() |
|
602 | path = fctx.path() | |
603 | parents = fctx.parents() |
|
603 | parents = fctx.parents() | |
604 | p1 = parents and parents[0].node() or nullid |
|
604 | p1 = parents and parents[0].node() or nullid | |
605 |
|
605 | |||
606 | def diff(**map): |
|
606 | def diff(**map): | |
607 | yield self.diff(p1, n, [path]) |
|
607 | yield self.diff(p1, n, [path]) | |
608 |
|
608 | |||
609 | yield self.t("filediff", |
|
609 | yield self.t("filediff", | |
610 | file=path, |
|
610 | file=path, | |
611 | node=hex(n), |
|
611 | node=hex(n), | |
612 | rev=fctx.rev(), |
|
612 | rev=fctx.rev(), | |
613 | parent=self.siblings(parents), |
|
613 | parent=self.siblings(parents), | |
614 | child=self.siblings(fctx.children()), |
|
614 | child=self.siblings(fctx.children()), | |
615 | diff=diff) |
|
615 | diff=diff) | |
616 |
|
616 | |||
617 | archive_specs = { |
|
617 | archive_specs = { | |
618 | 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None), |
|
618 | 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None), | |
619 | 'gz': ('application/x-tar', 'tgz', '.tar.gz', None), |
|
619 | 'gz': ('application/x-tar', 'tgz', '.tar.gz', None), | |
620 | 'zip': ('application/zip', 'zip', '.zip', None), |
|
620 | 'zip': ('application/zip', 'zip', '.zip', None), | |
621 | } |
|
621 | } | |
622 |
|
622 | |||
623 | def archive(self, req, key, type_): |
|
623 | def archive(self, req, key, type_): | |
624 | reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame)) |
|
624 | reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame)) | |
625 | cnode = self.repo.lookup(key) |
|
625 | cnode = self.repo.lookup(key) | |
626 | arch_version = key |
|
626 | arch_version = key | |
627 | if cnode == key or key == 'tip': |
|
627 | if cnode == key or key == 'tip': | |
628 | arch_version = short(cnode) |
|
628 | arch_version = short(cnode) | |
629 | name = "%s-%s" % (reponame, arch_version) |
|
629 | name = "%s-%s" % (reponame, arch_version) | |
630 | mimetype, artype, extension, encoding = self.archive_specs[type_] |
|
630 | mimetype, artype, extension, encoding = self.archive_specs[type_] | |
631 | headers = [('Content-type', mimetype), |
|
631 | headers = [('Content-type', mimetype), | |
632 | ('Content-disposition', 'attachment; filename=%s%s' % |
|
632 | ('Content-disposition', 'attachment; filename=%s%s' % | |
633 | (name, extension))] |
|
633 | (name, extension))] | |
634 | if encoding: |
|
634 | if encoding: | |
635 | headers.append(('Content-encoding', encoding)) |
|
635 | headers.append(('Content-encoding', encoding)) | |
636 | req.header(headers) |
|
636 | req.header(headers) | |
637 | archival.archive(self.repo, req.out, cnode, artype, prefix=name) |
|
637 | archival.archive(self.repo, req.out, cnode, artype, prefix=name) | |
638 |
|
638 | |||
639 | # add tags to things |
|
639 | # add tags to things | |
640 | # tags -> list of changesets corresponding to tags |
|
640 | # tags -> list of changesets corresponding to tags | |
641 | # find tag, changeset, file |
|
641 | # find tag, changeset, file | |
642 |
|
642 | |||
643 | def cleanpath(self, path): |
|
643 | def cleanpath(self, path): | |
644 | path = path.lstrip('/') |
|
644 | path = path.lstrip('/') | |
645 | return util.canonpath(self.repo.root, '', path) |
|
645 | return util.canonpath(self.repo.root, '', path) | |
646 |
|
646 | |||
647 | def run(self): |
|
647 | def run(self): | |
648 | if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): |
|
648 | if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): | |
649 | raise RuntimeError("This function is only intended to be called while running as a CGI script.") |
|
649 | raise RuntimeError("This function is only intended to be called while running as a CGI script.") | |
650 | import mercurial.hgweb.wsgicgi as wsgicgi |
|
650 | import mercurial.hgweb.wsgicgi as wsgicgi | |
651 | from request import wsgiapplication |
|
651 | from request import wsgiapplication | |
652 | def make_web_app(): |
|
652 | def make_web_app(): | |
653 | return self |
|
653 | return self | |
654 | wsgicgi.launch(wsgiapplication(make_web_app)) |
|
654 | wsgicgi.launch(wsgiapplication(make_web_app)) | |
655 |
|
655 | |||
656 | def run_wsgi(self, req): |
|
656 | def run_wsgi(self, req): | |
657 | def header(**map): |
|
657 | def header(**map): | |
658 | header_file = cStringIO.StringIO( |
|
658 | header_file = cStringIO.StringIO( | |
659 | ''.join(self.t("header", encoding=self.encoding, **map))) |
|
659 | ''.join(self.t("header", encoding=self.encoding, **map))) | |
660 | msg = mimetools.Message(header_file, 0) |
|
660 | msg = mimetools.Message(header_file, 0) | |
661 | req.header(msg.items()) |
|
661 | req.header(msg.items()) | |
662 | yield header_file.read() |
|
662 | yield header_file.read() | |
663 |
|
663 | |||
664 | def rawfileheader(**map): |
|
664 | def rawfileheader(**map): | |
665 | req.header([('Content-type', map['mimetype']), |
|
665 | req.header([('Content-type', map['mimetype']), | |
666 | ('Content-disposition', 'filename=%s' % map['file']), |
|
666 | ('Content-disposition', 'filename=%s' % map['file']), | |
667 | ('Content-length', str(len(map['raw'])))]) |
|
667 | ('Content-length', str(len(map['raw'])))]) | |
668 | yield '' |
|
668 | yield '' | |
669 |
|
669 | |||
670 | def footer(**map): |
|
670 | def footer(**map): | |
671 | yield self.t("footer", **map) |
|
671 | yield self.t("footer", **map) | |
672 |
|
672 | |||
673 | def motd(**map): |
|
673 | def motd(**map): | |
674 | yield self.config("web", "motd", "") |
|
674 | yield self.config("web", "motd", "") | |
675 |
|
675 | |||
676 | def expand_form(form): |
|
676 | def expand_form(form): | |
677 | shortcuts = { |
|
677 | shortcuts = { | |
678 | 'cl': [('cmd', ['changelog']), ('rev', None)], |
|
678 | 'cl': [('cmd', ['changelog']), ('rev', None)], | |
679 | 'sl': [('cmd', ['shortlog']), ('rev', None)], |
|
679 | 'sl': [('cmd', ['shortlog']), ('rev', None)], | |
680 | 'cs': [('cmd', ['changeset']), ('node', None)], |
|
680 | 'cs': [('cmd', ['changeset']), ('node', None)], | |
681 | 'f': [('cmd', ['file']), ('filenode', None)], |
|
681 | 'f': [('cmd', ['file']), ('filenode', None)], | |
682 | 'fl': [('cmd', ['filelog']), ('filenode', None)], |
|
682 | 'fl': [('cmd', ['filelog']), ('filenode', None)], | |
683 | 'fd': [('cmd', ['filediff']), ('node', None)], |
|
683 | 'fd': [('cmd', ['filediff']), ('node', None)], | |
684 | 'fa': [('cmd', ['annotate']), ('filenode', None)], |
|
684 | 'fa': [('cmd', ['annotate']), ('filenode', None)], | |
685 | 'mf': [('cmd', ['manifest']), ('manifest', None)], |
|
685 | 'mf': [('cmd', ['manifest']), ('manifest', None)], | |
686 | 'ca': [('cmd', ['archive']), ('node', None)], |
|
686 | 'ca': [('cmd', ['archive']), ('node', None)], | |
687 | 'tags': [('cmd', ['tags'])], |
|
687 | 'tags': [('cmd', ['tags'])], | |
688 | 'tip': [('cmd', ['changeset']), ('node', ['tip'])], |
|
688 | 'tip': [('cmd', ['changeset']), ('node', ['tip'])], | |
689 | 'static': [('cmd', ['static']), ('file', None)] |
|
689 | 'static': [('cmd', ['static']), ('file', None)] | |
690 | } |
|
690 | } | |
691 |
|
691 | |||
692 | for k in shortcuts.iterkeys(): |
|
692 | for k in shortcuts.iterkeys(): | |
693 | if form.has_key(k): |
|
693 | if form.has_key(k): | |
694 | for name, value in shortcuts[k]: |
|
694 | for name, value in shortcuts[k]: | |
695 | if value is None: |
|
695 | if value is None: | |
696 | value = form[k] |
|
696 | value = form[k] | |
697 | form[name] = value |
|
697 | form[name] = value | |
698 | del form[k] |
|
698 | del form[k] | |
699 |
|
699 | |||
700 | def rewrite_request(req): |
|
700 | def rewrite_request(req): | |
701 | '''translate new web interface to traditional format''' |
|
701 | '''translate new web interface to traditional format''' | |
702 |
|
702 | |||
703 | def spliturl(req): |
|
703 | def spliturl(req): | |
704 | def firstitem(query): |
|
704 | def firstitem(query): | |
705 | return query.split('&', 1)[0].split(';', 1)[0] |
|
705 | return query.split('&', 1)[0].split(';', 1)[0] | |
706 |
|
706 | |||
707 | def normurl(url): |
|
707 | def normurl(url): | |
708 | inner = '/'.join([x for x in url.split('/') if x]) |
|
708 | inner = '/'.join([x for x in url.split('/') if x]) | |
709 | tl = len(url) > 1 and url.endswith('/') and '/' or '' |
|
709 | tl = len(url) > 1 and url.endswith('/') and '/' or '' | |
710 |
|
710 | |||
711 | return '%s%s%s' % (url.startswith('/') and '/' or '', |
|
711 | return '%s%s%s' % (url.startswith('/') and '/' or '', | |
712 | inner, tl) |
|
712 | inner, tl) | |
713 |
|
713 | |||
714 | root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0])) |
|
714 | root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0])) | |
715 | pi = normurl(req.env.get('PATH_INFO', '')) |
|
715 | pi = normurl(req.env.get('PATH_INFO', '')) | |
716 | if pi: |
|
716 | if pi: | |
717 | # strip leading / |
|
717 | # strip leading / | |
718 | pi = pi[1:] |
|
718 | pi = pi[1:] | |
719 | if pi: |
|
719 | if pi: | |
720 | root = root[:root.rfind(pi)] |
|
720 | root = root[:root.rfind(pi)] | |
721 | if req.env.has_key('REPO_NAME'): |
|
721 | if req.env.has_key('REPO_NAME'): | |
722 | rn = req.env['REPO_NAME'] + '/' |
|
722 | rn = req.env['REPO_NAME'] + '/' | |
723 | root += rn |
|
723 | root += rn | |
724 | query = pi[len(rn):] |
|
724 | query = pi[len(rn):] | |
725 | else: |
|
725 | else: | |
726 | query = pi |
|
726 | query = pi | |
727 | else: |
|
727 | else: | |
728 | root += '?' |
|
728 | root += '?' | |
729 | query = firstitem(req.env['QUERY_STRING']) |
|
729 | query = firstitem(req.env['QUERY_STRING']) | |
730 |
|
730 | |||
731 | return (root, query) |
|
731 | return (root, query) | |
732 |
|
732 | |||
733 | req.url, query = spliturl(req) |
|
733 | req.url, query = spliturl(req) | |
734 |
|
734 | |||
735 | if req.form.has_key('cmd'): |
|
735 | if req.form.has_key('cmd'): | |
736 | # old style |
|
736 | # old style | |
737 | return |
|
737 | return | |
738 |
|
738 | |||
739 | args = query.split('/', 2) |
|
739 | args = query.split('/', 2) | |
740 | if not args or not args[0]: |
|
740 | if not args or not args[0]: | |
741 | return |
|
741 | return | |
742 |
|
742 | |||
743 | cmd = args.pop(0) |
|
743 | cmd = args.pop(0) | |
744 | style = cmd.rfind('-') |
|
744 | style = cmd.rfind('-') | |
745 | if style != -1: |
|
745 | if style != -1: | |
746 | req.form['style'] = [cmd[:style]] |
|
746 | req.form['style'] = [cmd[:style]] | |
747 | cmd = cmd[style+1:] |
|
747 | cmd = cmd[style+1:] | |
748 | # avoid accepting e.g. style parameter as command |
|
748 | # avoid accepting e.g. style parameter as command | |
749 | if hasattr(self, 'do_' + cmd): |
|
749 | if hasattr(self, 'do_' + cmd): | |
750 | req.form['cmd'] = [cmd] |
|
750 | req.form['cmd'] = [cmd] | |
751 |
|
751 | |||
752 | if args and args[0]: |
|
752 | if args and args[0]: | |
753 | node = args.pop(0) |
|
753 | node = args.pop(0) | |
754 | req.form['node'] = [node] |
|
754 | req.form['node'] = [node] | |
755 | if args: |
|
755 | if args: | |
756 | req.form['file'] = args |
|
756 | req.form['file'] = args | |
757 |
|
757 | |||
758 | if cmd == 'static': |
|
758 | if cmd == 'static': | |
759 | req.form['file'] = req.form['node'] |
|
759 | req.form['file'] = req.form['node'] | |
760 | elif cmd == 'archive': |
|
760 | elif cmd == 'archive': | |
761 | fn = req.form['node'][0] |
|
761 | fn = req.form['node'][0] | |
762 | for type_, spec in self.archive_specs.iteritems(): |
|
762 | for type_, spec in self.archive_specs.iteritems(): | |
763 | ext = spec[2] |
|
763 | ext = spec[2] | |
764 | if fn.endswith(ext): |
|
764 | if fn.endswith(ext): | |
765 | req.form['node'] = [fn[:-len(ext)]] |
|
765 | req.form['node'] = [fn[:-len(ext)]] | |
766 | req.form['type'] = [type_] |
|
766 | req.form['type'] = [type_] | |
767 |
|
767 | |||
768 | def sessionvars(**map): |
|
768 | def sessionvars(**map): | |
769 | fields = [] |
|
769 | fields = [] | |
770 | if req.form.has_key('style'): |
|
770 | if req.form.has_key('style'): | |
771 | style = req.form['style'][0] |
|
771 | style = req.form['style'][0] | |
772 | if style != self.config('web', 'style', ''): |
|
772 | if style != self.config('web', 'style', ''): | |
773 | fields.append(('style', style)) |
|
773 | fields.append(('style', style)) | |
774 |
|
774 | |||
775 | separator = req.url[-1] == '?' and ';' or '?' |
|
775 | separator = req.url[-1] == '?' and ';' or '?' | |
776 | for name, value in fields: |
|
776 | for name, value in fields: | |
777 | yield dict(name=name, value=value, separator=separator) |
|
777 | yield dict(name=name, value=value, separator=separator) | |
778 | separator = ';' |
|
778 | separator = ';' | |
779 |
|
779 | |||
780 | self.refresh() |
|
780 | self.refresh() | |
781 |
|
781 | |||
782 | expand_form(req.form) |
|
782 | expand_form(req.form) | |
783 | rewrite_request(req) |
|
783 | rewrite_request(req) | |
784 |
|
784 | |||
785 | style = self.config("web", "style", "") |
|
785 | style = self.config("web", "style", "") | |
786 | if req.form.has_key('style'): |
|
786 | if req.form.has_key('style'): | |
787 | style = req.form['style'][0] |
|
787 | style = req.form['style'][0] | |
788 | mapfile = style_map(self.templatepath, style) |
|
788 | mapfile = style_map(self.templatepath, style) | |
789 |
|
789 | |||
790 | port = req.env["SERVER_PORT"] |
|
790 | port = req.env["SERVER_PORT"] | |
791 | port = port != "80" and (":" + port) or "" |
|
791 | port = port != "80" and (":" + port) or "" | |
792 | urlbase = 'http://%s%s' % (req.env['SERVER_NAME'], port) |
|
792 | urlbase = 'http://%s%s' % (req.env['SERVER_NAME'], port) | |
793 | staticurl = self.config("web", "staticurl") or req.url + 'static/' |
|
793 | staticurl = self.config("web", "staticurl") or req.url + 'static/' | |
794 | if not staticurl.endswith('/'): |
|
794 | if not staticurl.endswith('/'): | |
795 | staticurl += '/' |
|
795 | staticurl += '/' | |
796 |
|
796 | |||
797 | if not self.reponame: |
|
797 | if not self.reponame: | |
798 | self.reponame = (self.config("web", "name") |
|
798 | self.reponame = (self.config("web", "name") | |
799 | or req.env.get('REPO_NAME') |
|
799 | or req.env.get('REPO_NAME') | |
800 | or req.url.strip('/') or self.repo.root) |
|
800 | or req.url.strip('/') or self.repo.root) | |
801 |
|
801 | |||
802 | self.t = templater.templater(mapfile, templater.common_filters, |
|
802 | self.t = templater.templater(mapfile, templater.common_filters, | |
803 | defaults={"url": req.url, |
|
803 | defaults={"url": req.url, | |
804 | "staticurl": staticurl, |
|
804 | "staticurl": staticurl, | |
805 | "urlbase": urlbase, |
|
805 | "urlbase": urlbase, | |
806 | "repo": self.reponame, |
|
806 | "repo": self.reponame, | |
807 | "header": header, |
|
807 | "header": header, | |
808 | "footer": footer, |
|
808 | "footer": footer, | |
809 | "motd": motd, |
|
809 | "motd": motd, | |
810 | "rawfileheader": rawfileheader, |
|
810 | "rawfileheader": rawfileheader, | |
811 | "sessionvars": sessionvars |
|
811 | "sessionvars": sessionvars | |
812 | }) |
|
812 | }) | |
813 |
|
813 | |||
814 | try: |
|
814 | try: | |
815 | if not req.form.has_key('cmd'): |
|
815 | if not req.form.has_key('cmd'): | |
816 | req.form['cmd'] = [self.t.cache['default']] |
|
816 | req.form['cmd'] = [self.t.cache['default']] | |
817 |
|
817 | |||
818 | cmd = req.form['cmd'][0] |
|
818 | cmd = req.form['cmd'][0] | |
819 |
|
819 | |||
820 | method = getattr(self, 'do_' + cmd, None) |
|
820 | method = getattr(self, 'do_' + cmd, None) | |
821 | if method: |
|
821 | if method: | |
822 | try: |
|
822 | try: | |
823 | method(req) |
|
823 | method(req) | |
824 | except (hg.RepoError, revlog.RevlogError), inst: |
|
824 | except (hg.RepoError, revlog.RevlogError), inst: | |
825 | req.write(self.t("error", error=str(inst))) |
|
825 | req.write(self.t("error", error=str(inst))) | |
826 | else: |
|
826 | else: | |
827 | req.write(self.t("error", error='No such method: ' + cmd)) |
|
827 | req.write(self.t("error", error='No such method: ' + cmd)) | |
828 | finally: |
|
828 | finally: | |
829 | self.t = None |
|
829 | self.t = None | |
830 |
|
830 | |||
831 | def changectx(self, req): |
|
831 | def changectx(self, req): | |
832 | if req.form.has_key('node'): |
|
832 | if req.form.has_key('node'): | |
833 | changeid = req.form['node'][0] |
|
833 | changeid = req.form['node'][0] | |
834 | elif req.form.has_key('manifest'): |
|
834 | elif req.form.has_key('manifest'): | |
835 | changeid = req.form['manifest'][0] |
|
835 | changeid = req.form['manifest'][0] | |
836 | else: |
|
836 | else: | |
837 | changeid = self.repo.changelog.count() - 1 |
|
837 | changeid = self.repo.changelog.count() - 1 | |
838 |
|
838 | |||
839 | try: |
|
839 | try: | |
840 | ctx = self.repo.changectx(changeid) |
|
840 | ctx = self.repo.changectx(changeid) | |
841 | except hg.RepoError: |
|
841 | except hg.RepoError: | |
842 | man = self.repo.manifest |
|
842 | man = self.repo.manifest | |
843 | mn = man.lookup(changeid) |
|
843 | mn = man.lookup(changeid) | |
844 | ctx = self.repo.changectx(man.linkrev(mn)) |
|
844 | ctx = self.repo.changectx(man.linkrev(mn)) | |
845 |
|
845 | |||
846 | return ctx |
|
846 | return ctx | |
847 |
|
847 | |||
848 | def filectx(self, req): |
|
848 | def filectx(self, req): | |
849 | path = self.cleanpath(req.form['file'][0]) |
|
849 | path = self.cleanpath(req.form['file'][0]) | |
850 | if req.form.has_key('node'): |
|
850 | if req.form.has_key('node'): | |
851 | changeid = req.form['node'][0] |
|
851 | changeid = req.form['node'][0] | |
852 | else: |
|
852 | else: | |
853 | changeid = req.form['filenode'][0] |
|
853 | changeid = req.form['filenode'][0] | |
854 | try: |
|
854 | try: | |
855 | ctx = self.repo.changectx(changeid) |
|
855 | ctx = self.repo.changectx(changeid) | |
856 | fctx = ctx.filectx(path) |
|
856 | fctx = ctx.filectx(path) | |
857 | except hg.RepoError: |
|
857 | except hg.RepoError: | |
858 | fctx = self.repo.filectx(path, fileid=changeid) |
|
858 | fctx = self.repo.filectx(path, fileid=changeid) | |
859 |
|
859 | |||
860 | return fctx |
|
860 | return fctx | |
861 |
|
861 | |||
862 | def do_log(self, req): |
|
862 | def do_log(self, req): | |
863 | if req.form.has_key('file') and req.form['file'][0]: |
|
863 | if req.form.has_key('file') and req.form['file'][0]: | |
864 | self.do_filelog(req) |
|
864 | self.do_filelog(req) | |
865 | else: |
|
865 | else: | |
866 | self.do_changelog(req) |
|
866 | self.do_changelog(req) | |
867 |
|
867 | |||
868 | def do_rev(self, req): |
|
868 | def do_rev(self, req): | |
869 | self.do_changeset(req) |
|
869 | self.do_changeset(req) | |
870 |
|
870 | |||
871 | def do_file(self, req): |
|
871 | def do_file(self, req): | |
872 | path = self.cleanpath(req.form.get('file', [''])[0]) |
|
872 | path = self.cleanpath(req.form.get('file', [''])[0]) | |
873 | if path: |
|
873 | if path: | |
874 | try: |
|
874 | try: | |
875 | req.write(self.filerevision(self.filectx(req))) |
|
875 | req.write(self.filerevision(self.filectx(req))) | |
876 | return |
|
876 | return | |
877 | except revlog.LookupError: |
|
877 | except revlog.LookupError: | |
878 | pass |
|
878 | pass | |
879 |
|
879 | |||
880 | req.write(self.manifest(self.changectx(req), path)) |
|
880 | req.write(self.manifest(self.changectx(req), path)) | |
881 |
|
881 | |||
882 | def do_diff(self, req): |
|
882 | def do_diff(self, req): | |
883 | self.do_filediff(req) |
|
883 | self.do_filediff(req) | |
884 |
|
884 | |||
885 | def do_changelog(self, req, shortlog = False): |
|
885 | def do_changelog(self, req, shortlog = False): | |
886 | if req.form.has_key('node'): |
|
886 | if req.form.has_key('node'): | |
887 | ctx = self.changectx(req) |
|
887 | ctx = self.changectx(req) | |
888 | else: |
|
888 | else: | |
889 | if req.form.has_key('rev'): |
|
889 | if req.form.has_key('rev'): | |
890 | hi = req.form['rev'][0] |
|
890 | hi = req.form['rev'][0] | |
891 | else: |
|
891 | else: | |
892 | hi = self.repo.changelog.count() - 1 |
|
892 | hi = self.repo.changelog.count() - 1 | |
893 | try: |
|
893 | try: | |
894 | ctx = self.repo.changectx(hi) |
|
894 | ctx = self.repo.changectx(hi) | |
895 | except hg.RepoError: |
|
895 | except hg.RepoError: | |
896 | req.write(self.search(hi)) # XXX redirect to 404 page? |
|
896 | req.write(self.search(hi)) # XXX redirect to 404 page? | |
897 | return |
|
897 | return | |
898 |
|
898 | |||
899 | req.write(self.changelog(ctx, shortlog = shortlog)) |
|
899 | req.write(self.changelog(ctx, shortlog = shortlog)) | |
900 |
|
900 | |||
901 | def do_shortlog(self, req): |
|
901 | def do_shortlog(self, req): | |
902 | self.do_changelog(req, shortlog = True) |
|
902 | self.do_changelog(req, shortlog = True) | |
903 |
|
903 | |||
904 | def do_changeset(self, req): |
|
904 | def do_changeset(self, req): | |
905 | req.write(self.changeset(self.changectx(req))) |
|
905 | req.write(self.changeset(self.changectx(req))) | |
906 |
|
906 | |||
907 | def do_manifest(self, req): |
|
907 | def do_manifest(self, req): | |
908 | req.write(self.manifest(self.changectx(req), |
|
908 | req.write(self.manifest(self.changectx(req), | |
909 | self.cleanpath(req.form['path'][0]))) |
|
909 | self.cleanpath(req.form['path'][0]))) | |
910 |
|
910 | |||
911 | def do_tags(self, req): |
|
911 | def do_tags(self, req): | |
912 | req.write(self.tags()) |
|
912 | req.write(self.tags()) | |
913 |
|
913 | |||
914 | def do_summary(self, req): |
|
914 | def do_summary(self, req): | |
915 | req.write(self.summary()) |
|
915 | req.write(self.summary()) | |
916 |
|
916 | |||
917 | def do_filediff(self, req): |
|
917 | def do_filediff(self, req): | |
918 | req.write(self.filediff(self.filectx(req))) |
|
918 | req.write(self.filediff(self.filectx(req))) | |
919 |
|
919 | |||
920 | def do_annotate(self, req): |
|
920 | def do_annotate(self, req): | |
921 | req.write(self.fileannotate(self.filectx(req))) |
|
921 | req.write(self.fileannotate(self.filectx(req))) | |
922 |
|
922 | |||
923 | def do_filelog(self, req): |
|
923 | def do_filelog(self, req): | |
924 | req.write(self.filelog(self.filectx(req))) |
|
924 | req.write(self.filelog(self.filectx(req))) | |
925 |
|
925 | |||
926 | def do_lookup(self, req): |
|
926 | def do_lookup(self, req): | |
927 | try: |
|
927 | try: | |
928 | r = hex(self.repo.lookup(req.form['key'][0])) |
|
928 | r = hex(self.repo.lookup(req.form['key'][0])) | |
929 | success = 1 |
|
929 | success = 1 | |
930 | except Exception,inst: |
|
930 | except Exception,inst: | |
931 | r = str(inst) |
|
931 | r = str(inst) | |
932 | success = 0 |
|
932 | success = 0 | |
933 | resp = "%s %s\n" % (success, r) |
|
933 | resp = "%s %s\n" % (success, r) | |
934 | req.httphdr("application/mercurial-0.1", length=len(resp)) |
|
934 | req.httphdr("application/mercurial-0.1", length=len(resp)) | |
935 | req.write(resp) |
|
935 | req.write(resp) | |
936 |
|
936 | |||
937 | def do_heads(self, req): |
|
937 | def do_heads(self, req): | |
938 | resp = " ".join(map(hex, self.repo.heads())) + "\n" |
|
938 | resp = " ".join(map(hex, self.repo.heads())) + "\n" | |
939 | req.httphdr("application/mercurial-0.1", length=len(resp)) |
|
939 | req.httphdr("application/mercurial-0.1", length=len(resp)) | |
940 | req.write(resp) |
|
940 | req.write(resp) | |
941 |
|
941 | |||
942 | def do_branches(self, req): |
|
942 | def do_branches(self, req): | |
943 | nodes = [] |
|
943 | nodes = [] | |
944 | if req.form.has_key('nodes'): |
|
944 | if req.form.has_key('nodes'): | |
945 | nodes = map(bin, req.form['nodes'][0].split(" ")) |
|
945 | nodes = map(bin, req.form['nodes'][0].split(" ")) | |
946 | resp = cStringIO.StringIO() |
|
946 | resp = cStringIO.StringIO() | |
947 | for b in self.repo.branches(nodes): |
|
947 | for b in self.repo.branches(nodes): | |
948 | resp.write(" ".join(map(hex, b)) + "\n") |
|
948 | resp.write(" ".join(map(hex, b)) + "\n") | |
949 | resp = resp.getvalue() |
|
949 | resp = resp.getvalue() | |
950 | req.httphdr("application/mercurial-0.1", length=len(resp)) |
|
950 | req.httphdr("application/mercurial-0.1", length=len(resp)) | |
951 | req.write(resp) |
|
951 | req.write(resp) | |
952 |
|
952 | |||
953 | def do_between(self, req): |
|
953 | def do_between(self, req): | |
954 | if req.form.has_key('pairs'): |
|
954 | if req.form.has_key('pairs'): | |
955 | pairs = [map(bin, p.split("-")) |
|
955 | pairs = [map(bin, p.split("-")) | |
956 | for p in req.form['pairs'][0].split(" ")] |
|
956 | for p in req.form['pairs'][0].split(" ")] | |
957 | resp = cStringIO.StringIO() |
|
957 | resp = cStringIO.StringIO() | |
958 | for b in self.repo.between(pairs): |
|
958 | for b in self.repo.between(pairs): | |
959 | resp.write(" ".join(map(hex, b)) + "\n") |
|
959 | resp.write(" ".join(map(hex, b)) + "\n") | |
960 | resp = resp.getvalue() |
|
960 | resp = resp.getvalue() | |
961 | req.httphdr("application/mercurial-0.1", length=len(resp)) |
|
961 | req.httphdr("application/mercurial-0.1", length=len(resp)) | |
962 | req.write(resp) |
|
962 | req.write(resp) | |
963 |
|
963 | |||
964 | def do_changegroup(self, req): |
|
964 | def do_changegroup(self, req): | |
965 | req.httphdr("application/mercurial-0.1") |
|
965 | req.httphdr("application/mercurial-0.1") | |
966 | nodes = [] |
|
966 | nodes = [] | |
967 | if not self.allowpull: |
|
967 | if not self.allowpull: | |
968 | return |
|
968 | return | |
969 |
|
969 | |||
970 | if req.form.has_key('roots'): |
|
970 | if req.form.has_key('roots'): | |
971 | nodes = map(bin, req.form['roots'][0].split(" ")) |
|
971 | nodes = map(bin, req.form['roots'][0].split(" ")) | |
972 |
|
972 | |||
973 | z = zlib.compressobj() |
|
973 | z = zlib.compressobj() | |
974 | f = self.repo.changegroup(nodes, 'serve') |
|
974 | f = self.repo.changegroup(nodes, 'serve') | |
975 | while 1: |
|
975 | while 1: | |
976 | chunk = f.read(4096) |
|
976 | chunk = f.read(4096) | |
977 | if not chunk: |
|
977 | if not chunk: | |
978 | break |
|
978 | break | |
979 | req.write(z.compress(chunk)) |
|
979 | req.write(z.compress(chunk)) | |
980 |
|
980 | |||
981 | req.write(z.flush()) |
|
981 | req.write(z.flush()) | |
982 |
|
982 | |||
983 | def do_changegroupsubset(self, req): |
|
983 | def do_changegroupsubset(self, req): | |
984 | req.httphdr("application/mercurial-0.1") |
|
984 | req.httphdr("application/mercurial-0.1") | |
985 | bases = [] |
|
985 | bases = [] | |
986 | heads = [] |
|
986 | heads = [] | |
987 | if not self.allowpull: |
|
987 | if not self.allowpull: | |
988 | return |
|
988 | return | |
989 |
|
989 | |||
990 | if req.form.has_key('bases'): |
|
990 | if req.form.has_key('bases'): | |
991 | bases = [bin(x) for x in req.form['bases'][0].split(' ')] |
|
991 | bases = [bin(x) for x in req.form['bases'][0].split(' ')] | |
992 | if req.form.has_key('heads'): |
|
992 | if req.form.has_key('heads'): | |
993 | heads = [bin(x) for x in req.form['heads'][0].split(' ')] |
|
993 | heads = [bin(x) for x in req.form['heads'][0].split(' ')] | |
994 |
|
994 | |||
995 | z = zlib.compressobj() |
|
995 | z = zlib.compressobj() | |
996 | f = self.repo.changegroupsubset(bases, heads, 'serve') |
|
996 | f = self.repo.changegroupsubset(bases, heads, 'serve') | |
997 | while 1: |
|
997 | while 1: | |
998 | chunk = f.read(4096) |
|
998 | chunk = f.read(4096) | |
999 | if not chunk: |
|
999 | if not chunk: | |
1000 | break |
|
1000 | break | |
1001 | req.write(z.compress(chunk)) |
|
1001 | req.write(z.compress(chunk)) | |
1002 |
|
1002 | |||
1003 | req.write(z.flush()) |
|
1003 | req.write(z.flush()) | |
1004 |
|
1004 | |||
1005 | def do_archive(self, req): |
|
1005 | def do_archive(self, req): | |
1006 | type_ = req.form['type'][0] |
|
1006 | type_ = req.form['type'][0] | |
1007 | allowed = self.configlist("web", "allow_archive") |
|
1007 | allowed = self.configlist("web", "allow_archive") | |
1008 | if (type_ in self.archives and (type_ in allowed or |
|
1008 | if (type_ in self.archives and (type_ in allowed or | |
1009 | self.configbool("web", "allow" + type_, False))): |
|
1009 | self.configbool("web", "allow" + type_, False))): | |
1010 | self.archive(req, req.form['node'][0], type_) |
|
1010 | self.archive(req, req.form['node'][0], type_) | |
1011 | return |
|
1011 | return | |
1012 |
|
1012 | |||
1013 | req.write(self.t("error")) |
|
1013 | req.write(self.t("error")) | |
1014 |
|
1014 | |||
1015 | def do_static(self, req): |
|
1015 | def do_static(self, req): | |
1016 | fname = req.form['file'][0] |
|
1016 | fname = req.form['file'][0] | |
1017 | # a repo owner may set web.static in .hg/hgrc to get any file |
|
1017 | # a repo owner may set web.static in .hg/hgrc to get any file | |
1018 | # readable by the user running the CGI script |
|
1018 | # readable by the user running the CGI script | |
1019 | static = self.config("web", "static", |
|
1019 | static = self.config("web", "static", | |
1020 | os.path.join(self.templatepath, "static"), |
|
1020 | os.path.join(self.templatepath, "static"), | |
1021 | untrusted=False) |
|
1021 | untrusted=False) | |
1022 | req.write(staticfile(static, fname, req) |
|
1022 | req.write(staticfile(static, fname, req) | |
1023 | or self.t("error", error="%r not found" % fname)) |
|
1023 | or self.t("error", error="%r not found" % fname)) | |
1024 |
|
1024 | |||
1025 | def do_capabilities(self, req): |
|
1025 | def do_capabilities(self, req): | |
1026 | caps = ['lookup', 'changegroupsubset'] |
|
1026 | caps = ['lookup', 'changegroupsubset'] | |
1027 | if self.configbool('server', 'uncompressed'): |
|
1027 | if self.configbool('server', 'uncompressed'): | |
1028 | caps.append('stream=%d' % self.repo.changelog.version) |
|
1028 | caps.append('stream=%d' % self.repo.changelog.version) | |
1029 | # XXX: make configurable and/or share code with do_unbundle: |
|
1029 | # XXX: make configurable and/or share code with do_unbundle: | |
1030 | unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN'] |
|
1030 | unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN'] | |
1031 | if unbundleversions: |
|
1031 | if unbundleversions: | |
1032 | caps.append('unbundle=%s' % ','.join(unbundleversions)) |
|
1032 | caps.append('unbundle=%s' % ','.join(unbundleversions)) | |
1033 | resp = ' '.join(caps) |
|
1033 | resp = ' '.join(caps) | |
1034 | req.httphdr("application/mercurial-0.1", length=len(resp)) |
|
1034 | req.httphdr("application/mercurial-0.1", length=len(resp)) | |
1035 | req.write(resp) |
|
1035 | req.write(resp) | |
1036 |
|
1036 | |||
1037 | def check_perm(self, req, op, default): |
|
1037 | def check_perm(self, req, op, default): | |
1038 | '''check permission for operation based on user auth. |
|
1038 | '''check permission for operation based on user auth. | |
1039 | return true if op allowed, else false. |
|
1039 | return true if op allowed, else false. | |
1040 | default is policy to use if no config given.''' |
|
1040 | default is policy to use if no config given.''' | |
1041 |
|
1041 | |||
1042 | user = req.env.get('REMOTE_USER') |
|
1042 | user = req.env.get('REMOTE_USER') | |
1043 |
|
1043 | |||
1044 | deny = self.configlist('web', 'deny_' + op) |
|
1044 | deny = self.configlist('web', 'deny_' + op) | |
1045 | if deny and (not user or deny == ['*'] or user in deny): |
|
1045 | if deny and (not user or deny == ['*'] or user in deny): | |
1046 | return False |
|
1046 | return False | |
1047 |
|
1047 | |||
1048 | allow = self.configlist('web', 'allow_' + op) |
|
1048 | allow = self.configlist('web', 'allow_' + op) | |
1049 | return (allow and (allow == ['*'] or user in allow)) or default |
|
1049 | return (allow and (allow == ['*'] or user in allow)) or default | |
1050 |
|
1050 | |||
1051 | def do_unbundle(self, req): |
|
1051 | def do_unbundle(self, req): | |
1052 | def bail(response, headers={}): |
|
1052 | def bail(response, headers={}): | |
1053 | length = int(req.env['CONTENT_LENGTH']) |
|
1053 | length = int(req.env['CONTENT_LENGTH']) | |
1054 | for s in util.filechunkiter(req, limit=length): |
|
1054 | for s in util.filechunkiter(req, limit=length): | |
1055 | # drain incoming bundle, else client will not see |
|
1055 | # drain incoming bundle, else client will not see | |
1056 | # response when run outside cgi script |
|
1056 | # response when run outside cgi script | |
1057 | pass |
|
1057 | pass | |
1058 | req.httphdr("application/mercurial-0.1", headers=headers) |
|
1058 | req.httphdr("application/mercurial-0.1", headers=headers) | |
1059 | req.write('0\n') |
|
1059 | req.write('0\n') | |
1060 | req.write(response) |
|
1060 | req.write(response) | |
1061 |
|
1061 | |||
1062 | # require ssl by default, auth info cannot be sniffed and |
|
1062 | # require ssl by default, auth info cannot be sniffed and | |
1063 | # replayed |
|
1063 | # replayed | |
1064 | ssl_req = self.configbool('web', 'push_ssl', True) |
|
1064 | ssl_req = self.configbool('web', 'push_ssl', True) | |
1065 | if ssl_req: |
|
1065 | if ssl_req: | |
1066 | if not req.env.get('HTTPS'): |
|
1066 | if not req.env.get('HTTPS'): | |
1067 | bail(_('ssl required\n')) |
|
1067 | bail(_('ssl required\n')) | |
1068 | return |
|
1068 | return | |
1069 | proto = 'https' |
|
1069 | proto = 'https' | |
1070 | else: |
|
1070 | else: | |
1071 | proto = 'http' |
|
1071 | proto = 'http' | |
1072 |
|
1072 | |||
1073 | # do not allow push unless explicitly allowed |
|
1073 | # do not allow push unless explicitly allowed | |
1074 | if not self.check_perm(req, 'push', False): |
|
1074 | if not self.check_perm(req, 'push', False): | |
1075 | bail(_('push not authorized\n'), |
|
1075 | bail(_('push not authorized\n'), | |
1076 | headers={'status': '401 Unauthorized'}) |
|
1076 | headers={'status': '401 Unauthorized'}) | |
1077 | return |
|
1077 | return | |
1078 |
|
1078 | |||
1079 | their_heads = req.form['heads'][0].split(' ') |
|
1079 | their_heads = req.form['heads'][0].split(' ') | |
1080 |
|
1080 | |||
1081 | def check_heads(): |
|
1081 | def check_heads(): | |
1082 | heads = map(hex, self.repo.heads()) |
|
1082 | heads = map(hex, self.repo.heads()) | |
1083 | return their_heads == [hex('force')] or their_heads == heads |
|
1083 | return their_heads == [hex('force')] or their_heads == heads | |
1084 |
|
1084 | |||
1085 | # fail early if possible |
|
1085 | # fail early if possible | |
1086 | if not check_heads(): |
|
1086 | if not check_heads(): | |
1087 | bail(_('unsynced changes\n')) |
|
1087 | bail(_('unsynced changes\n')) | |
1088 | return |
|
1088 | return | |
1089 |
|
1089 | |||
1090 | req.httphdr("application/mercurial-0.1") |
|
1090 | req.httphdr("application/mercurial-0.1") | |
1091 |
|
1091 | |||
1092 | # do not lock repo until all changegroup data is |
|
1092 | # do not lock repo until all changegroup data is | |
1093 | # streamed. save to temporary file. |
|
1093 | # streamed. save to temporary file. | |
1094 |
|
1094 | |||
1095 | fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-') |
|
1095 | fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-') | |
1096 | fp = os.fdopen(fd, 'wb+') |
|
1096 | fp = os.fdopen(fd, 'wb+') | |
1097 | try: |
|
1097 | try: | |
1098 | length = int(req.env['CONTENT_LENGTH']) |
|
1098 | length = int(req.env['CONTENT_LENGTH']) | |
1099 | for s in util.filechunkiter(req, limit=length): |
|
1099 | for s in util.filechunkiter(req, limit=length): | |
1100 | fp.write(s) |
|
1100 | fp.write(s) | |
1101 |
|
1101 | |||
1102 | try: |
|
1102 | try: | |
1103 | lock = self.repo.lock() |
|
1103 | lock = self.repo.lock() | |
1104 | try: |
|
1104 | try: | |
1105 | if not check_heads(): |
|
1105 | if not check_heads(): | |
1106 | req.write('0\n') |
|
1106 | req.write('0\n') | |
1107 | req.write(_('unsynced changes\n')) |
|
1107 | req.write(_('unsynced changes\n')) | |
1108 | return |
|
1108 | return | |
1109 |
|
1109 | |||
1110 | fp.seek(0) |
|
1110 | fp.seek(0) | |
1111 | header = fp.read(6) |
|
1111 | header = fp.read(6) | |
1112 | if not header.startswith("HG"): |
|
1112 | if not header.startswith("HG"): | |
1113 | # old client with uncompressed bundle |
|
1113 | # old client with uncompressed bundle | |
1114 | def generator(f): |
|
1114 | def generator(f): | |
1115 | yield header |
|
1115 | yield header | |
1116 | for chunk in f: |
|
1116 | for chunk in f: | |
1117 | yield chunk |
|
1117 | yield chunk | |
1118 | elif not header.startswith("HG10"): |
|
1118 | elif not header.startswith("HG10"): | |
1119 | req.write("0\n") |
|
1119 | req.write("0\n") | |
1120 | req.write(_("unknown bundle version\n")) |
|
1120 | req.write(_("unknown bundle version\n")) | |
1121 | return |
|
1121 | return | |
1122 | elif header == "HG10GZ": |
|
1122 | elif header == "HG10GZ": | |
1123 | def generator(f): |
|
1123 | def generator(f): | |
1124 | zd = zlib.decompressobj() |
|
1124 | zd = zlib.decompressobj() | |
1125 | for chunk in f: |
|
1125 | for chunk in f: | |
1126 | yield zd.decompress(chunk) |
|
1126 | yield zd.decompress(chunk) | |
1127 | elif header == "HG10BZ": |
|
1127 | elif header == "HG10BZ": | |
1128 | def generator(f): |
|
1128 | def generator(f): | |
1129 | zd = bz2.BZ2Decompressor() |
|
1129 | zd = bz2.BZ2Decompressor() | |
1130 | zd.decompress("BZ") |
|
1130 | zd.decompress("BZ") | |
1131 | for chunk in f: |
|
1131 | for chunk in f: | |
1132 | yield zd.decompress(chunk) |
|
1132 | yield zd.decompress(chunk) | |
1133 | elif header == "HG10UN": |
|
1133 | elif header == "HG10UN": | |
1134 | def generator(f): |
|
1134 | def generator(f): | |
1135 | for chunk in f: |
|
1135 | for chunk in f: | |
1136 | yield chunk |
|
1136 | yield chunk | |
1137 | else: |
|
1137 | else: | |
1138 | req.write("0\n") |
|
1138 | req.write("0\n") | |
1139 | req.write(_("unknown bundle compression type\n")) |
|
1139 | req.write(_("unknown bundle compression type\n")) | |
1140 | return |
|
1140 | return | |
1141 | gen = generator(util.filechunkiter(fp, 4096)) |
|
1141 | gen = generator(util.filechunkiter(fp, 4096)) | |
1142 |
|
1142 | |||
1143 | # send addchangegroup output to client |
|
1143 | # send addchangegroup output to client | |
1144 |
|
1144 | |||
1145 | old_stdout = sys.stdout |
|
1145 | old_stdout = sys.stdout | |
1146 | sys.stdout = cStringIO.StringIO() |
|
1146 | sys.stdout = cStringIO.StringIO() | |
1147 |
|
1147 | |||
1148 | try: |
|
1148 | try: | |
1149 | url = 'remote:%s:%s' % (proto, |
|
1149 | url = 'remote:%s:%s' % (proto, | |
1150 | req.env.get('REMOTE_HOST', '')) |
|
1150 | req.env.get('REMOTE_HOST', '')) | |
1151 | try: |
|
1151 | try: | |
1152 | ret = self.repo.addchangegroup( |
|
1152 | ret = self.repo.addchangegroup( | |
1153 | util.chunkbuffer(gen), 'serve', url) |
|
1153 | util.chunkbuffer(gen), 'serve', url) | |
1154 | except util.Abort, inst: |
|
1154 | except util.Abort, inst: | |
1155 | sys.stdout.write("abort: %s\n" % inst) |
|
1155 | sys.stdout.write("abort: %s\n" % inst) | |
1156 | ret = 0 |
|
1156 | ret = 0 | |
1157 | finally: |
|
1157 | finally: | |
1158 | val = sys.stdout.getvalue() |
|
1158 | val = sys.stdout.getvalue() | |
1159 | sys.stdout = old_stdout |
|
1159 | sys.stdout = old_stdout | |
1160 | req.write('%d\n' % ret) |
|
1160 | req.write('%d\n' % ret) | |
1161 | req.write(val) |
|
1161 | req.write(val) | |
1162 | finally: |
|
1162 | finally: | |
1163 | lock.release() |
|
1163 | lock.release() | |
1164 | except (OSError, IOError), inst: |
|
1164 | except (OSError, IOError), inst: | |
1165 | req.write('0\n') |
|
1165 | req.write('0\n') | |
1166 | filename = getattr(inst, 'filename', '') |
|
1166 | filename = getattr(inst, 'filename', '') | |
1167 | # Don't send our filesystem layout to the client |
|
1167 | # Don't send our filesystem layout to the client | |
1168 | if filename.startswith(self.repo.root): |
|
1168 | if filename.startswith(self.repo.root): | |
1169 | filename = filename[len(self.repo.root)+1:] |
|
1169 | filename = filename[len(self.repo.root)+1:] | |
1170 | else: |
|
1170 | else: | |
1171 | filename = '' |
|
1171 | filename = '' | |
1172 | error = getattr(inst, 'strerror', 'Unknown error') |
|
1172 | error = getattr(inst, 'strerror', 'Unknown error') | |
1173 | req.write('%s: %s\n' % (error, filename)) |
|
1173 | req.write('%s: %s\n' % (error, filename)) | |
1174 | finally: |
|
1174 | finally: | |
1175 | fp.close() |
|
1175 | fp.close() | |
1176 | os.unlink(tempname) |
|
1176 | os.unlink(tempname) | |
1177 |
|
1177 | |||
1178 | def do_stream_out(self, req): |
|
1178 | def do_stream_out(self, req): | |
1179 | req.httphdr("application/mercurial-0.1") |
|
1179 | req.httphdr("application/mercurial-0.1") | |
1180 | streamclone.stream_out(self.repo, req) |
|
1180 | streamclone.stream_out(self.repo, req) |
@@ -1,290 +1,297 b'' | |||||
1 | # templater.py - template expansion for output |
|
1 | # templater.py - template expansion for output | |
2 | # |
|
2 | # | |
3 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms |
|
5 | # This software may be used and distributed according to the terms | |
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
7 |
|
7 | |||
8 | from i18n import _ |
|
8 | from i18n import _ | |
9 | from node import * |
|
9 | from node import * | |
10 | import cgi, re, sys, os, time, urllib, util, textwrap |
|
10 | import cgi, re, sys, os, time, urllib, util, textwrap | |
11 |
|
11 | |||
12 | def parsestring(s, quoted=True): |
|
12 | def parsestring(s, quoted=True): | |
13 | '''parse a string using simple c-like syntax. |
|
13 | '''parse a string using simple c-like syntax. | |
14 | string must be in quotes if quoted is True.''' |
|
14 | string must be in quotes if quoted is True.''' | |
15 | if quoted: |
|
15 | if quoted: | |
16 | if len(s) < 2 or s[0] != s[-1]: |
|
16 | if len(s) < 2 or s[0] != s[-1]: | |
17 | raise SyntaxError(_('unmatched quotes')) |
|
17 | raise SyntaxError(_('unmatched quotes')) | |
18 | return s[1:-1].decode('string_escape') |
|
18 | return s[1:-1].decode('string_escape') | |
19 |
|
19 | |||
20 | return s.decode('string_escape') |
|
20 | return s.decode('string_escape') | |
21 |
|
21 | |||
22 | class templater(object): |
|
22 | class templater(object): | |
23 | '''template expansion engine. |
|
23 | '''template expansion engine. | |
24 |
|
24 | |||
25 | template expansion works like this. a map file contains key=value |
|
25 | template expansion works like this. a map file contains key=value | |
26 | pairs. if value is quoted, it is treated as string. otherwise, it |
|
26 | pairs. if value is quoted, it is treated as string. otherwise, it | |
27 | is treated as name of template file. |
|
27 | is treated as name of template file. | |
28 |
|
28 | |||
29 | templater is asked to expand a key in map. it looks up key, and |
|
29 | templater is asked to expand a key in map. it looks up key, and | |
30 | looks for strings like this: {foo}. it expands {foo} by looking up |
|
30 | looks for strings like this: {foo}. it expands {foo} by looking up | |
31 | foo in map, and substituting it. expansion is recursive: it stops |
|
31 | foo in map, and substituting it. expansion is recursive: it stops | |
32 | when there is no more {foo} to replace. |
|
32 | when there is no more {foo} to replace. | |
33 |
|
33 | |||
34 | expansion also allows formatting and filtering. |
|
34 | expansion also allows formatting and filtering. | |
35 |
|
35 | |||
36 | format uses key to expand each item in list. syntax is |
|
36 | format uses key to expand each item in list. syntax is | |
37 | {key%format}. |
|
37 | {key%format}. | |
38 |
|
38 | |||
39 | filter uses function to transform value. syntax is |
|
39 | filter uses function to transform value. syntax is | |
40 | {key|filter1|filter2|...}.''' |
|
40 | {key|filter1|filter2|...}.''' | |
41 |
|
41 | |||
42 | template_re = re.compile(r"(?:(?:#(?=[\w\|%]+#))|(?:{(?=[\w\|%]+})))" |
|
42 | template_re = re.compile(r"(?:(?:#(?=[\w\|%]+#))|(?:{(?=[\w\|%]+})))" | |
43 | r"(\w+)(?:(?:%(\w+))|((?:\|\w+)*))[#}]") |
|
43 | r"(\w+)(?:(?:%(\w+))|((?:\|\w+)*))[#}]") | |
44 |
|
44 | |||
45 | def __init__(self, mapfile, filters={}, defaults={}, cache={}): |
|
45 | def __init__(self, mapfile, filters={}, defaults={}, cache={}): | |
46 | '''set up template engine. |
|
46 | '''set up template engine. | |
47 | mapfile is name of file to read map definitions from. |
|
47 | mapfile is name of file to read map definitions from. | |
48 | filters is dict of functions. each transforms a value into another. |
|
48 | filters is dict of functions. each transforms a value into another. | |
49 | defaults is dict of default map definitions.''' |
|
49 | defaults is dict of default map definitions.''' | |
50 | self.mapfile = mapfile or 'template' |
|
50 | self.mapfile = mapfile or 'template' | |
51 | self.cache = cache.copy() |
|
51 | self.cache = cache.copy() | |
52 | self.map = {} |
|
52 | self.map = {} | |
53 | self.base = (mapfile and os.path.dirname(mapfile)) or '' |
|
53 | self.base = (mapfile and os.path.dirname(mapfile)) or '' | |
54 | self.filters = filters |
|
54 | self.filters = filters | |
55 | self.defaults = defaults |
|
55 | self.defaults = defaults | |
56 |
|
56 | |||
57 | if not mapfile: |
|
57 | if not mapfile: | |
58 | return |
|
58 | return | |
59 | i = 0 |
|
59 | i = 0 | |
60 | for l in file(mapfile): |
|
60 | for l in file(mapfile): | |
61 | l = l.strip() |
|
61 | l = l.strip() | |
62 | i += 1 |
|
62 | i += 1 | |
63 | if not l or l[0] in '#;': continue |
|
63 | if not l or l[0] in '#;': continue | |
64 | m = re.match(r'([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.+)$', l) |
|
64 | m = re.match(r'([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.+)$', l) | |
65 | if m: |
|
65 | if m: | |
66 | key, val = m.groups() |
|
66 | key, val = m.groups() | |
67 | if val[0] in "'\"": |
|
67 | if val[0] in "'\"": | |
68 | try: |
|
68 | try: | |
69 | self.cache[key] = parsestring(val) |
|
69 | self.cache[key] = parsestring(val) | |
70 | except SyntaxError, inst: |
|
70 | except SyntaxError, inst: | |
71 | raise SyntaxError('%s:%s: %s' % |
|
71 | raise SyntaxError('%s:%s: %s' % | |
72 | (mapfile, i, inst.args[0])) |
|
72 | (mapfile, i, inst.args[0])) | |
73 | else: |
|
73 | else: | |
74 | self.map[key] = os.path.join(self.base, val) |
|
74 | self.map[key] = os.path.join(self.base, val) | |
75 | else: |
|
75 | else: | |
76 | raise SyntaxError(_("%s:%s: parse error") % (mapfile, i)) |
|
76 | raise SyntaxError(_("%s:%s: parse error") % (mapfile, i)) | |
77 |
|
77 | |||
78 | def __contains__(self, key): |
|
78 | def __contains__(self, key): | |
79 | return key in self.cache or key in self.map |
|
79 | return key in self.cache or key in self.map | |
80 |
|
80 | |||
81 | def __call__(self, t, **map): |
|
81 | def __call__(self, t, **map): | |
82 | '''perform expansion. |
|
82 | '''perform expansion. | |
83 | t is name of map element to expand. |
|
83 | t is name of map element to expand. | |
84 | map is added elements to use during expansion.''' |
|
84 | map is added elements to use during expansion.''' | |
85 | if not self.cache.has_key(t): |
|
85 | if not self.cache.has_key(t): | |
86 | try: |
|
86 | try: | |
87 | self.cache[t] = file(self.map[t]).read() |
|
87 | self.cache[t] = file(self.map[t]).read() | |
88 | except IOError, inst: |
|
88 | except IOError, inst: | |
89 | raise IOError(inst.args[0], _('template file %s: %s') % |
|
89 | raise IOError(inst.args[0], _('template file %s: %s') % | |
90 | (self.map[t], inst.args[1])) |
|
90 | (self.map[t], inst.args[1])) | |
91 | tmpl = self.cache[t] |
|
91 | tmpl = self.cache[t] | |
92 |
|
92 | |||
93 | while tmpl: |
|
93 | while tmpl: | |
94 | m = self.template_re.search(tmpl) |
|
94 | m = self.template_re.search(tmpl) | |
95 | if not m: |
|
95 | if not m: | |
96 | yield tmpl |
|
96 | yield tmpl | |
97 | break |
|
97 | break | |
98 |
|
98 | |||
99 | start, end = m.span(0) |
|
99 | start, end = m.span(0) | |
100 | key, format, fl = m.groups() |
|
100 | key, format, fl = m.groups() | |
101 |
|
101 | |||
102 | if start: |
|
102 | if start: | |
103 | yield tmpl[:start] |
|
103 | yield tmpl[:start] | |
104 | tmpl = tmpl[end:] |
|
104 | tmpl = tmpl[end:] | |
105 |
|
105 | |||
106 | if key in map: |
|
106 | if key in map: | |
107 | v = map[key] |
|
107 | v = map[key] | |
108 | else: |
|
108 | else: | |
109 | v = self.defaults.get(key, "") |
|
109 | v = self.defaults.get(key, "") | |
110 | if callable(v): |
|
110 | if callable(v): | |
111 | v = v(**map) |
|
111 | v = v(**map) | |
112 | if format: |
|
112 | if format: | |
113 | if not hasattr(v, '__iter__'): |
|
113 | if not hasattr(v, '__iter__'): | |
114 | raise SyntaxError(_("Error expanding '%s%s'") |
|
114 | raise SyntaxError(_("Error expanding '%s%s'") | |
115 | % (key, format)) |
|
115 | % (key, format)) | |
116 | lm = map.copy() |
|
116 | lm = map.copy() | |
117 | for i in v: |
|
117 | for i in v: | |
118 | lm.update(i) |
|
118 | lm.update(i) | |
119 | yield self(format, **lm) |
|
119 | yield self(format, **lm) | |
120 | else: |
|
120 | else: | |
121 | if fl: |
|
121 | if fl: | |
122 | for f in fl.split("|")[1:]: |
|
122 | for f in fl.split("|")[1:]: | |
123 | v = self.filters[f](v) |
|
123 | v = self.filters[f](v) | |
124 | yield v |
|
124 | yield v | |
125 |
|
125 | |||
126 | agescales = [("second", 1), |
|
126 | agescales = [("second", 1), | |
127 | ("minute", 60), |
|
127 | ("minute", 60), | |
128 | ("hour", 3600), |
|
128 | ("hour", 3600), | |
129 | ("day", 3600 * 24), |
|
129 | ("day", 3600 * 24), | |
130 | ("week", 3600 * 24 * 7), |
|
130 | ("week", 3600 * 24 * 7), | |
131 | ("month", 3600 * 24 * 30), |
|
131 | ("month", 3600 * 24 * 30), | |
132 | ("year", 3600 * 24 * 365)] |
|
132 | ("year", 3600 * 24 * 365)] | |
133 |
|
133 | |||
134 | agescales.reverse() |
|
134 | agescales.reverse() | |
135 |
|
135 | |||
136 | def age(date): |
|
136 | def age(date): | |
137 | '''turn a (timestamp, tzoff) tuple into an age string.''' |
|
137 | '''turn a (timestamp, tzoff) tuple into an age string.''' | |
138 |
|
138 | |||
139 | def plural(t, c): |
|
139 | def plural(t, c): | |
140 | if c == 1: |
|
140 | if c == 1: | |
141 | return t |
|
141 | return t | |
142 | return t + "s" |
|
142 | return t + "s" | |
143 | def fmt(t, c): |
|
143 | def fmt(t, c): | |
144 | return "%d %s" % (c, plural(t, c)) |
|
144 | return "%d %s" % (c, plural(t, c)) | |
145 |
|
145 | |||
146 | now = time.time() |
|
146 | now = time.time() | |
147 | then = date[0] |
|
147 | then = date[0] | |
148 | delta = max(1, int(now - then)) |
|
148 | delta = max(1, int(now - then)) | |
149 |
|
149 | |||
150 | for t, s in agescales: |
|
150 | for t, s in agescales: | |
151 | n = delta / s |
|
151 | n = delta / s | |
152 | if n >= 2 or s == 1: |
|
152 | if n >= 2 or s == 1: | |
153 | return fmt(t, n) |
|
153 | return fmt(t, n) | |
154 |
|
154 | |||
155 | def stringify(thing): |
|
155 | def stringify(thing): | |
156 | '''turn nested template iterator into string.''' |
|
156 | '''turn nested template iterator into string.''' | |
157 | if hasattr(thing, '__iter__'): |
|
157 | if hasattr(thing, '__iter__'): | |
158 | return "".join([stringify(t) for t in thing if t is not None]) |
|
158 | return "".join([stringify(t) for t in thing if t is not None]) | |
159 | return str(thing) |
|
159 | return str(thing) | |
160 |
|
160 | |||
161 | para_re = None |
|
161 | para_re = None | |
162 | space_re = None |
|
162 | space_re = None | |
163 |
|
163 | |||
164 | def fill(text, width): |
|
164 | def fill(text, width): | |
165 | '''fill many paragraphs.''' |
|
165 | '''fill many paragraphs.''' | |
166 | global para_re, space_re |
|
166 | global para_re, space_re | |
167 | if para_re is None: |
|
167 | if para_re is None: | |
168 | para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M) |
|
168 | para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M) | |
169 | space_re = re.compile(r' +') |
|
169 | space_re = re.compile(r' +') | |
170 |
|
170 | |||
171 | def findparas(): |
|
171 | def findparas(): | |
172 | start = 0 |
|
172 | start = 0 | |
173 | while True: |
|
173 | while True: | |
174 | m = para_re.search(text, start) |
|
174 | m = para_re.search(text, start) | |
175 | if not m: |
|
175 | if not m: | |
176 | w = len(text) |
|
176 | w = len(text) | |
177 | while w > start and text[w-1].isspace(): w -= 1 |
|
177 | while w > start and text[w-1].isspace(): w -= 1 | |
178 | yield text[start:w], text[w:] |
|
178 | yield text[start:w], text[w:] | |
179 | break |
|
179 | break | |
180 | yield text[start:m.start(0)], m.group(1) |
|
180 | yield text[start:m.start(0)], m.group(1) | |
181 | start = m.end(1) |
|
181 | start = m.end(1) | |
182 |
|
182 | |||
183 | return "".join([space_re.sub(' ', textwrap.fill(para, width)) + rest |
|
183 | return "".join([space_re.sub(' ', textwrap.fill(para, width)) + rest | |
184 | for para, rest in findparas()]) |
|
184 | for para, rest in findparas()]) | |
185 |
|
185 | |||
186 | def firstline(text): |
|
186 | def firstline(text): | |
187 | '''return the first line of text''' |
|
187 | '''return the first line of text''' | |
188 | try: |
|
188 | try: | |
189 | return text.splitlines(1)[0].rstrip('\r\n') |
|
189 | return text.splitlines(1)[0].rstrip('\r\n') | |
190 | except IndexError: |
|
190 | except IndexError: | |
191 | return '' |
|
191 | return '' | |
192 |
|
192 | |||
193 | def isodate(date): |
|
193 | def isodate(date): | |
194 | '''turn a (timestamp, tzoff) tuple into an iso 8631 date and time.''' |
|
194 | '''turn a (timestamp, tzoff) tuple into an iso 8631 date and time.''' | |
195 | return util.datestr(date, format='%Y-%m-%d %H:%M') |
|
195 | return util.datestr(date, format='%Y-%m-%d %H:%M') | |
196 |
|
196 | |||
197 | def hgdate(date): |
|
197 | def hgdate(date): | |
198 | '''turn a (timestamp, tzoff) tuple into an hg cset timestamp.''' |
|
198 | '''turn a (timestamp, tzoff) tuple into an hg cset timestamp.''' | |
199 | return "%d %d" % date |
|
199 | return "%d %d" % date | |
200 |
|
200 | |||
201 | def nl2br(text): |
|
201 | def nl2br(text): | |
202 | '''replace raw newlines with xhtml line breaks.''' |
|
202 | '''replace raw newlines with xhtml line breaks.''' | |
203 | return text.replace('\n', '<br/>\n') |
|
203 | return text.replace('\n', '<br/>\n') | |
204 |
|
204 | |||
205 | def obfuscate(text): |
|
205 | def obfuscate(text): | |
206 | text = unicode(text, util._encoding, 'replace') |
|
206 | text = unicode(text, util._encoding, 'replace') | |
207 | return ''.join(['&#%d;' % ord(c) for c in text]) |
|
207 | return ''.join(['&#%d;' % ord(c) for c in text]) | |
208 |
|
208 | |||
209 | def domain(author): |
|
209 | def domain(author): | |
210 | '''get domain of author, or empty string if none.''' |
|
210 | '''get domain of author, or empty string if none.''' | |
211 | f = author.find('@') |
|
211 | f = author.find('@') | |
212 | if f == -1: return '' |
|
212 | if f == -1: return '' | |
213 | author = author[f+1:] |
|
213 | author = author[f+1:] | |
214 | f = author.find('>') |
|
214 | f = author.find('>') | |
215 | if f >= 0: author = author[:f] |
|
215 | if f >= 0: author = author[:f] | |
216 | return author |
|
216 | return author | |
217 |
|
217 | |||
218 | def email(author): |
|
218 | def email(author): | |
219 | '''get email of author.''' |
|
219 | '''get email of author.''' | |
220 | r = author.find('>') |
|
220 | r = author.find('>') | |
221 | if r == -1: r = None |
|
221 | if r == -1: r = None | |
222 | return author[author.find('<')+1:r] |
|
222 | return author[author.find('<')+1:r] | |
223 |
|
223 | |||
224 | def person(author): |
|
224 | def person(author): | |
225 | '''get name of author, or else username.''' |
|
225 | '''get name of author, or else username.''' | |
226 | f = author.find('<') |
|
226 | f = author.find('<') | |
227 | if f == -1: return util.shortuser(author) |
|
227 | if f == -1: return util.shortuser(author) | |
228 | return author[:f].rstrip() |
|
228 | return author[:f].rstrip() | |
229 |
|
229 | |||
230 | def shortdate(date): |
|
230 | def shortdate(date): | |
231 | '''turn (timestamp, tzoff) tuple into iso 8631 date.''' |
|
231 | '''turn (timestamp, tzoff) tuple into iso 8631 date.''' | |
232 | return util.datestr(date, format='%Y-%m-%d', timezone=False) |
|
232 | return util.datestr(date, format='%Y-%m-%d', timezone=False) | |
233 |
|
233 | |||
234 | def indent(text, prefix): |
|
234 | def indent(text, prefix): | |
235 | '''indent each non-empty line of text after first with prefix.''' |
|
235 | '''indent each non-empty line of text after first with prefix.''' | |
236 | lines = text.splitlines() |
|
236 | lines = text.splitlines() | |
237 | num_lines = len(lines) |
|
237 | num_lines = len(lines) | |
238 | def indenter(): |
|
238 | def indenter(): | |
239 | for i in xrange(num_lines): |
|
239 | for i in xrange(num_lines): | |
240 | l = lines[i] |
|
240 | l = lines[i] | |
241 | if i and l.strip(): |
|
241 | if i and l.strip(): | |
242 | yield prefix |
|
242 | yield prefix | |
243 | yield l |
|
243 | yield l | |
244 | if i < num_lines - 1 or text.endswith('\n'): |
|
244 | if i < num_lines - 1 or text.endswith('\n'): | |
245 | yield '\n' |
|
245 | yield '\n' | |
246 | return "".join(indenter()) |
|
246 | return "".join(indenter()) | |
247 |
|
247 | |||
|
248 | def permissions(flags): | |||
|
249 | if "l" in flags: | |||
|
250 | return "lrwxrwxrwx" | |||
|
251 | if "x" in flags: | |||
|
252 | return "-rwxr-xr-x" | |||
|
253 | return "-rw-r--r--" | |||
|
254 | ||||
248 | common_filters = { |
|
255 | common_filters = { | |
249 | "addbreaks": nl2br, |
|
256 | "addbreaks": nl2br, | |
250 | "basename": os.path.basename, |
|
257 | "basename": os.path.basename, | |
251 | "age": age, |
|
258 | "age": age, | |
252 | "date": lambda x: util.datestr(x), |
|
259 | "date": lambda x: util.datestr(x), | |
253 | "domain": domain, |
|
260 | "domain": domain, | |
254 | "email": email, |
|
261 | "email": email, | |
255 | "escape": lambda x: cgi.escape(x, True), |
|
262 | "escape": lambda x: cgi.escape(x, True), | |
256 | "fill68": lambda x: fill(x, width=68), |
|
263 | "fill68": lambda x: fill(x, width=68), | |
257 | "fill76": lambda x: fill(x, width=76), |
|
264 | "fill76": lambda x: fill(x, width=76), | |
258 | "firstline": firstline, |
|
265 | "firstline": firstline, | |
259 | "tabindent": lambda x: indent(x, '\t'), |
|
266 | "tabindent": lambda x: indent(x, '\t'), | |
260 | "hgdate": hgdate, |
|
267 | "hgdate": hgdate, | |
261 | "isodate": isodate, |
|
268 | "isodate": isodate, | |
262 | "obfuscate": obfuscate, |
|
269 | "obfuscate": obfuscate, | |
263 | "permissions": lambda x: x and "-rwxr-xr-x" or "-rw-r--r--", |
|
270 | "permissions": permissions, | |
264 | "person": person, |
|
271 | "person": person, | |
265 | "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"), |
|
272 | "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"), | |
266 | "short": lambda x: x[:12], |
|
273 | "short": lambda x: x[:12], | |
267 | "shortdate": shortdate, |
|
274 | "shortdate": shortdate, | |
268 | "stringify": stringify, |
|
275 | "stringify": stringify, | |
269 | "strip": lambda x: x.strip(), |
|
276 | "strip": lambda x: x.strip(), | |
270 | "urlescape": lambda x: urllib.quote(x), |
|
277 | "urlescape": lambda x: urllib.quote(x), | |
271 | "user": lambda x: util.shortuser(x), |
|
278 | "user": lambda x: util.shortuser(x), | |
272 | "stringescape": lambda x: x.encode('string_escape'), |
|
279 | "stringescape": lambda x: x.encode('string_escape'), | |
273 | } |
|
280 | } | |
274 |
|
281 | |||
275 | def templatepath(name=None): |
|
282 | def templatepath(name=None): | |
276 | '''return location of template file or directory (if no name). |
|
283 | '''return location of template file or directory (if no name). | |
277 | returns None if not found.''' |
|
284 | returns None if not found.''' | |
278 |
|
285 | |||
279 | # executable version (py2exe) doesn't support __file__ |
|
286 | # executable version (py2exe) doesn't support __file__ | |
280 | if hasattr(sys, 'frozen'): |
|
287 | if hasattr(sys, 'frozen'): | |
281 | module = sys.executable |
|
288 | module = sys.executable | |
282 | else: |
|
289 | else: | |
283 | module = __file__ |
|
290 | module = __file__ | |
284 | for f in 'templates', '../templates': |
|
291 | for f in 'templates', '../templates': | |
285 | fl = f.split('/') |
|
292 | fl = f.split('/') | |
286 | if name: fl.append(name) |
|
293 | if name: fl.append(name) | |
287 | p = os.path.join(os.path.dirname(module), *fl) |
|
294 | p = os.path.join(os.path.dirname(module), *fl) | |
288 | if (name and os.path.exists(p)) or os.path.isdir(p): |
|
295 | if (name and os.path.exists(p)) or os.path.isdir(p): | |
289 | return os.path.normpath(p) |
|
296 | return os.path.normpath(p) | |
290 |
|
297 |
General Comments 0
You need to be logged in to leave comments.
Login now