Show More
@@ -0,0 +1,237 b'' | |||||
|
1 | # | |||
|
2 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> | |||
|
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |||
|
4 | # | |||
|
5 | # This software may be used and distributed according to the terms | |||
|
6 | # of the GNU General Public License, incorporated herein by reference. | |||
|
7 | ||||
|
8 | import cStringIO, zlib, bz2, tempfile, errno, os, sys | |||
|
9 | from mercurial import util, streamclone | |||
|
10 | from mercurial.i18n import gettext as _ | |||
|
11 | from mercurial.node import * | |||
|
12 | ||||
|
13 | def lookup(web, req): | |||
|
14 | try: | |||
|
15 | r = hex(web.repo.lookup(req.form['key'][0])) | |||
|
16 | success = 1 | |||
|
17 | except Exception,inst: | |||
|
18 | r = str(inst) | |||
|
19 | success = 0 | |||
|
20 | resp = "%s %s\n" % (success, r) | |||
|
21 | req.httphdr("application/mercurial-0.1", length=len(resp)) | |||
|
22 | req.write(resp) | |||
|
23 | ||||
|
24 | def heads(web, req): | |||
|
25 | resp = " ".join(map(hex, web.repo.heads())) + "\n" | |||
|
26 | req.httphdr("application/mercurial-0.1", length=len(resp)) | |||
|
27 | req.write(resp) | |||
|
28 | ||||
|
29 | def branches(web, req): | |||
|
30 | nodes = [] | |||
|
31 | if req.form.has_key('nodes'): | |||
|
32 | nodes = map(bin, req.form['nodes'][0].split(" ")) | |||
|
33 | resp = cStringIO.StringIO() | |||
|
34 | for b in web.repo.branches(nodes): | |||
|
35 | resp.write(" ".join(map(hex, b)) + "\n") | |||
|
36 | resp = resp.getvalue() | |||
|
37 | req.httphdr("application/mercurial-0.1", length=len(resp)) | |||
|
38 | req.write(resp) | |||
|
39 | ||||
|
40 | def between(web, req): | |||
|
41 | if req.form.has_key('pairs'): | |||
|
42 | pairs = [map(bin, p.split("-")) | |||
|
43 | for p in req.form['pairs'][0].split(" ")] | |||
|
44 | resp = cStringIO.StringIO() | |||
|
45 | for b in web.repo.between(pairs): | |||
|
46 | resp.write(" ".join(map(hex, b)) + "\n") | |||
|
47 | resp = resp.getvalue() | |||
|
48 | req.httphdr("application/mercurial-0.1", length=len(resp)) | |||
|
49 | req.write(resp) | |||
|
50 | ||||
|
51 | def changegroup(web, req): | |||
|
52 | req.httphdr("application/mercurial-0.1") | |||
|
53 | nodes = [] | |||
|
54 | if not web.allowpull: | |||
|
55 | return | |||
|
56 | ||||
|
57 | if req.form.has_key('roots'): | |||
|
58 | nodes = map(bin, req.form['roots'][0].split(" ")) | |||
|
59 | ||||
|
60 | z = zlib.compressobj() | |||
|
61 | f = web.repo.changegroup(nodes, 'serve') | |||
|
62 | while 1: | |||
|
63 | chunk = f.read(4096) | |||
|
64 | if not chunk: | |||
|
65 | break | |||
|
66 | req.write(z.compress(chunk)) | |||
|
67 | ||||
|
68 | req.write(z.flush()) | |||
|
69 | ||||
|
70 | def changegroupsubset(web, req): | |||
|
71 | req.httphdr("application/mercurial-0.1") | |||
|
72 | bases = [] | |||
|
73 | heads = [] | |||
|
74 | if not web.allowpull: | |||
|
75 | return | |||
|
76 | ||||
|
77 | if req.form.has_key('bases'): | |||
|
78 | bases = [bin(x) for x in req.form['bases'][0].split(' ')] | |||
|
79 | if req.form.has_key('heads'): | |||
|
80 | heads = [bin(x) for x in req.form['heads'][0].split(' ')] | |||
|
81 | ||||
|
82 | z = zlib.compressobj() | |||
|
83 | f = web.repo.changegroupsubset(bases, heads, 'serve') | |||
|
84 | while 1: | |||
|
85 | chunk = f.read(4096) | |||
|
86 | if not chunk: | |||
|
87 | break | |||
|
88 | req.write(z.compress(chunk)) | |||
|
89 | ||||
|
90 | req.write(z.flush()) | |||
|
91 | ||||
|
92 | def capabilities(web, req): | |||
|
93 | caps = ['lookup', 'changegroupsubset'] | |||
|
94 | if web.configbool('server', 'uncompressed'): | |||
|
95 | caps.append('stream=%d' % web.repo.changelog.version) | |||
|
96 | # XXX: make configurable and/or share code with do_unbundle: | |||
|
97 | unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN'] | |||
|
98 | if unbundleversions: | |||
|
99 | caps.append('unbundle=%s' % ','.join(unbundleversions)) | |||
|
100 | resp = ' '.join(caps) | |||
|
101 | req.httphdr("application/mercurial-0.1", length=len(resp)) | |||
|
102 | req.write(resp) | |||
|
103 | ||||
|
104 | def unbundle(web, req): | |||
|
105 | def bail(response, headers={}): | |||
|
106 | length = int(req.env['CONTENT_LENGTH']) | |||
|
107 | for s in util.filechunkiter(req, limit=length): | |||
|
108 | # drain incoming bundle, else client will not see | |||
|
109 | # response when run outside cgi script | |||
|
110 | pass | |||
|
111 | req.httphdr("application/mercurial-0.1", headers=headers) | |||
|
112 | req.write('0\n') | |||
|
113 | req.write(response) | |||
|
114 | ||||
|
115 | # require ssl by default, auth info cannot be sniffed and | |||
|
116 | # replayed | |||
|
117 | ssl_req = web.configbool('web', 'push_ssl', True) | |||
|
118 | if ssl_req: | |||
|
119 | if req.env.get('wsgi.url_scheme') != 'https': | |||
|
120 | bail(_('ssl required\n')) | |||
|
121 | return | |||
|
122 | proto = 'https' | |||
|
123 | else: | |||
|
124 | proto = 'http' | |||
|
125 | ||||
|
126 | # do not allow push unless explicitly allowed | |||
|
127 | if not web.check_perm(req, 'push', False): | |||
|
128 | bail(_('push not authorized\n'), | |||
|
129 | headers={'status': '401 Unauthorized'}) | |||
|
130 | return | |||
|
131 | ||||
|
132 | their_heads = req.form['heads'][0].split(' ') | |||
|
133 | ||||
|
134 | def check_heads(): | |||
|
135 | heads = map(hex, web.repo.heads()) | |||
|
136 | return their_heads == [hex('force')] or their_heads == heads | |||
|
137 | ||||
|
138 | # fail early if possible | |||
|
139 | if not check_heads(): | |||
|
140 | bail(_('unsynced changes\n')) | |||
|
141 | return | |||
|
142 | ||||
|
143 | req.httphdr("application/mercurial-0.1") | |||
|
144 | ||||
|
145 | # do not lock repo until all changegroup data is | |||
|
146 | # streamed. save to temporary file. | |||
|
147 | ||||
|
148 | fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-') | |||
|
149 | fp = os.fdopen(fd, 'wb+') | |||
|
150 | try: | |||
|
151 | length = int(req.env['CONTENT_LENGTH']) | |||
|
152 | for s in util.filechunkiter(req, limit=length): | |||
|
153 | fp.write(s) | |||
|
154 | ||||
|
155 | try: | |||
|
156 | lock = web.repo.lock() | |||
|
157 | try: | |||
|
158 | if not check_heads(): | |||
|
159 | req.write('0\n') | |||
|
160 | req.write(_('unsynced changes\n')) | |||
|
161 | return | |||
|
162 | ||||
|
163 | fp.seek(0) | |||
|
164 | header = fp.read(6) | |||
|
165 | if not header.startswith("HG"): | |||
|
166 | # old client with uncompressed bundle | |||
|
167 | def generator(f): | |||
|
168 | yield header | |||
|
169 | for chunk in f: | |||
|
170 | yield chunk | |||
|
171 | elif not header.startswith("HG10"): | |||
|
172 | req.write("0\n") | |||
|
173 | req.write(_("unknown bundle version\n")) | |||
|
174 | return | |||
|
175 | elif header == "HG10GZ": | |||
|
176 | def generator(f): | |||
|
177 | zd = zlib.decompressobj() | |||
|
178 | for chunk in f: | |||
|
179 | yield zd.decompress(chunk) | |||
|
180 | elif header == "HG10BZ": | |||
|
181 | def generator(f): | |||
|
182 | zd = bz2.BZ2Decompressor() | |||
|
183 | zd.decompress("BZ") | |||
|
184 | for chunk in f: | |||
|
185 | yield zd.decompress(chunk) | |||
|
186 | elif header == "HG10UN": | |||
|
187 | def generator(f): | |||
|
188 | for chunk in f: | |||
|
189 | yield chunk | |||
|
190 | else: | |||
|
191 | req.write("0\n") | |||
|
192 | req.write(_("unknown bundle compression type\n")) | |||
|
193 | return | |||
|
194 | gen = generator(util.filechunkiter(fp, 4096)) | |||
|
195 | ||||
|
196 | # send addchangegroup output to client | |||
|
197 | ||||
|
198 | old_stdout = sys.stdout | |||
|
199 | sys.stdout = cStringIO.StringIO() | |||
|
200 | ||||
|
201 | try: | |||
|
202 | url = 'remote:%s:%s' % (proto, | |||
|
203 | req.env.get('REMOTE_HOST', '')) | |||
|
204 | try: | |||
|
205 | ret = web.repo.addchangegroup( | |||
|
206 | util.chunkbuffer(gen), 'serve', url) | |||
|
207 | except util.Abort, inst: | |||
|
208 | sys.stdout.write("abort: %s\n" % inst) | |||
|
209 | ret = 0 | |||
|
210 | finally: | |||
|
211 | val = sys.stdout.getvalue() | |||
|
212 | sys.stdout = old_stdout | |||
|
213 | req.write('%d\n' % ret) | |||
|
214 | req.write(val) | |||
|
215 | finally: | |||
|
216 | del lock | |||
|
217 | except (OSError, IOError), inst: | |||
|
218 | req.write('0\n') | |||
|
219 | filename = getattr(inst, 'filename', '') | |||
|
220 | # Don't send our filesystem layout to the client | |||
|
221 | if filename.startswith(web.repo.root): | |||
|
222 | filename = filename[len(web.repo.root)+1:] | |||
|
223 | else: | |||
|
224 | filename = '' | |||
|
225 | error = getattr(inst, 'strerror', 'Unknown error') | |||
|
226 | if inst.errno == errno.ENOENT: | |||
|
227 | code = 404 | |||
|
228 | else: | |||
|
229 | code = 500 | |||
|
230 | req.respond(code, '%s: %s\n' % (error, filename)) | |||
|
231 | finally: | |||
|
232 | fp.close() | |||
|
233 | os.unlink(tempname) | |||
|
234 | ||||
|
235 | def stream_out(web, req): | |||
|
236 | req.httphdr("application/mercurial-0.1") | |||
|
237 | streamclone.stream_out(web.repo, req, untrusted=True) |
@@ -0,0 +1,92 b'' | |||||
|
1 | # | |||
|
2 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> | |||
|
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |||
|
4 | # | |||
|
5 | # This software may be used and distributed according to the terms | |||
|
6 | # of the GNU General Public License, incorporated herein by reference. | |||
|
7 | ||||
|
8 | import os | |||
|
9 | from mercurial import revlog | |||
|
10 | from common import staticfile | |||
|
11 | ||||
|
12 | def log(web, req, tmpl): | |||
|
13 | if req.form.has_key('file') and req.form['file'][0]: | |||
|
14 | filelog(web, req, tmpl) | |||
|
15 | else: | |||
|
16 | changelog(web, req, tmpl) | |||
|
17 | ||||
|
18 | def file(web, req, tmpl): | |||
|
19 | path = web.cleanpath(req.form.get('file', [''])[0]) | |||
|
20 | if path: | |||
|
21 | try: | |||
|
22 | req.write(web.filerevision(tmpl, web.filectx(req))) | |||
|
23 | return | |||
|
24 | except revlog.LookupError: | |||
|
25 | pass | |||
|
26 | ||||
|
27 | req.write(web.manifest(tmpl, web.changectx(req), path)) | |||
|
28 | ||||
|
29 | def changelog(web, req, tmpl, shortlog = False): | |||
|
30 | if req.form.has_key('node'): | |||
|
31 | ctx = web.changectx(req) | |||
|
32 | else: | |||
|
33 | if req.form.has_key('rev'): | |||
|
34 | hi = req.form['rev'][0] | |||
|
35 | else: | |||
|
36 | hi = web.repo.changelog.count() - 1 | |||
|
37 | try: | |||
|
38 | ctx = web.repo.changectx(hi) | |||
|
39 | except hg.RepoError: | |||
|
40 | req.write(web.search(tmpl, hi)) # XXX redirect to 404 page? | |||
|
41 | return | |||
|
42 | ||||
|
43 | req.write(web.changelog(tmpl, ctx, shortlog = shortlog)) | |||
|
44 | ||||
|
45 | def shortlog(web, req, tmpl): | |||
|
46 | changelog(web, req, tmpl, shortlog = True) | |||
|
47 | ||||
|
48 | def changeset(web, req, tmpl): | |||
|
49 | req.write(web.changeset(tmpl, web.changectx(req))) | |||
|
50 | ||||
|
51 | rev = changeset | |||
|
52 | ||||
|
53 | def manifest(web, req, tmpl): | |||
|
54 | req.write(web.manifest(tmpl, web.changectx(req), | |||
|
55 | web.cleanpath(req.form['path'][0]))) | |||
|
56 | ||||
|
57 | def tags(web, req, tmpl): | |||
|
58 | req.write(web.tags(tmpl)) | |||
|
59 | ||||
|
60 | def summary(web, req, tmpl): | |||
|
61 | req.write(web.summary(tmpl)) | |||
|
62 | ||||
|
63 | def filediff(web, req, tmpl): | |||
|
64 | req.write(web.filediff(tmpl, web.filectx(req))) | |||
|
65 | ||||
|
66 | diff = filediff | |||
|
67 | ||||
|
68 | def annotate(web, req, tmpl): | |||
|
69 | req.write(web.fileannotate(tmpl, web.filectx(req))) | |||
|
70 | ||||
|
71 | def filelog(web, req, tmpl): | |||
|
72 | req.write(web.filelog(tmpl, web.filectx(req))) | |||
|
73 | ||||
|
74 | def archive(web, req, tmpl): | |||
|
75 | type_ = req.form['type'][0] | |||
|
76 | allowed = web.configlist("web", "allow_archive") | |||
|
77 | if (type_ in web.archives and (type_ in allowed or | |||
|
78 | web.configbool("web", "allow" + type_, False))): | |||
|
79 | web.archive(tmpl, req, req.form['node'][0], type_) | |||
|
80 | return | |||
|
81 | ||||
|
82 | req.respond(400, tmpl('error', | |||
|
83 | error='Unsupported archive type: %s' % type_)) | |||
|
84 | ||||
|
85 | def static(web, req, tmpl): | |||
|
86 | fname = req.form['file'][0] | |||
|
87 | # a repo owner may set web.static in .hg/hgrc to get any file | |||
|
88 | # readable by the user running the CGI script | |||
|
89 | static = web.config("web", "static", | |||
|
90 | os.path.join(web.templatepath, "static"), | |||
|
91 | untrusted=False) | |||
|
92 | req.write(staticfile(static, fname, req)) |
@@ -0,0 +1,53 b'' | |||||
|
1 | #!/bin/sh | |||
|
2 | # An attempt at more fully testing the hgweb web interface. | |||
|
3 | # The following things are tested elsewhere and are therefore omitted: | |||
|
4 | # - archive, tested in test-archive | |||
|
5 | # - unbundle, tested in test-push-http | |||
|
6 | # - changegroupsubset, tested in test-pull | |||
|
7 | ||||
|
8 | echo % Set up the repo | |||
|
9 | hg init test | |||
|
10 | cd test | |||
|
11 | mkdir da | |||
|
12 | echo foo > da/foo | |||
|
13 | echo foo > foo | |||
|
14 | hg ci -d'0 0' -Ambase | |||
|
15 | hg tag 1.0 | |||
|
16 | hg serve -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log | |||
|
17 | cat hg.pid >> $DAEMON_PIDS | |||
|
18 | ||||
|
19 | echo % Logs and changes | |||
|
20 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//" | |||
|
21 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/1/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//" | |||
|
22 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/1/foo/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//" | |||
|
23 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/shortlog/' | sed "s/[0-9]* years/many years/" | |||
|
24 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/rev/1/?style=raw' | |||
|
25 | ||||
|
26 | echo % File-related | |||
|
27 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/foo/?style=raw' | |||
|
28 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/annotate/1/foo/?style=raw' | |||
|
29 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/?style=raw' | |||
|
30 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/filediff/1/foo/?style=raw' | |||
|
31 | ||||
|
32 | echo % Overviews | |||
|
33 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/tags/?style=atom' | sed "s/http:\/\/[^/]*\//http:\/\/127.0.0.1\//" | |||
|
34 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/summary/?style=gitweb' | sed "s/[0-9]* years ago/long ago/" | |||
|
35 | ||||
|
36 | echo % capabilities | |||
|
37 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/capabilities' | |||
|
38 | echo % heads | |||
|
39 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/heads' | |||
|
40 | echo % lookup | |||
|
41 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/lookup/1' | |||
|
42 | echo % branches | |||
|
43 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/branches' | |||
|
44 | echo % changegroup | |||
|
45 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/changegroup' | |||
|
46 | echo % stream_out | |||
|
47 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/stream_out' | |||
|
48 | ||||
|
49 | echo % Static files | |||
|
50 | "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/static/style.css' | |||
|
51 | ||||
|
52 | echo % ERRORS ENCOUNTERED | |||
|
53 | cat errors.log |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
@@ -610,7 +610,7 b' class svn_source(converter_source):' | |||||
610 | # Example SVN datetime. Includes microseconds. |
|
610 | # Example SVN datetime. Includes microseconds. | |
611 | # ISO-8601 conformant |
|
611 | # ISO-8601 conformant | |
612 | # '2007-01-04T17:35:00.902377Z' |
|
612 | # '2007-01-04T17:35:00.902377Z' | |
613 |
date = util.parsedate(date[:1 |
|
613 | date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"]) | |
614 |
|
614 | |||
615 | log = message and self.recode(message) |
|
615 | log = message and self.recode(message) | |
616 | author = author and self.recode(author) or '' |
|
616 | author = author and self.recode(author) or '' |
@@ -82,14 +82,17 b' def pygments_format(filename, rawtext, f' | |||||
82 | return highlight(rawtext, lexer, formatter) |
|
82 | return highlight(rawtext, lexer, formatter) | |
83 |
|
83 | |||
84 |
|
84 | |||
85 | def filerevision_pygments(self, fctx): |
|
85 | def filerevision_pygments(self, tmpl, fctx): | |
86 | """Reimplement hgweb.filerevision to use syntax highlighting""" |
|
86 | """Reimplement hgweb.filerevision to use syntax highlighting""" | |
87 |
f |
|
87 | f = fctx.path() | |
88 |
|
88 | |||
89 | rawtext = fctx.data() |
|
89 | rawtext = fctx.data() | |
90 | text = rawtext |
|
90 | text = rawtext | |
91 |
|
91 | |||
92 | mt = mimetypes.guess_type(filename)[0] |
|
92 | fl = fctx.filelog() | |
|
93 | n = fctx.filenode() | |||
|
94 | ||||
|
95 | mt = mimetypes.guess_type(f)[0] | |||
93 |
|
96 | |||
94 | if util.binary(text): |
|
97 | if util.binary(text): | |
95 | mt = mt or 'application/octet-stream' |
|
98 | mt = mt or 'application/octet-stream' | |
@@ -107,36 +110,35 b' def filerevision_pygments(self, fctx):' | |||||
107 |
|
110 | |||
108 | style = self.config("web", "pygments_style", "colorful") |
|
111 | style = self.config("web", "pygments_style", "colorful") | |
109 |
|
112 | |||
110 |
text_formatted = lines(pygments_format(f |
|
113 | text_formatted = lines(pygments_format(f, text, | |
111 | forcetext=forcetext, |
|
114 | forcetext=forcetext, | |
112 | stripecount=self.stripecount, |
|
115 | stripecount=self.stripecount, | |
113 | style=style)) |
|
116 | style=style)) | |
114 |
|
117 | |||
115 | # override per-line template |
|
118 | # override per-line template | |
116 |
|
|
119 | tmpl.cache['fileline'] = '#line#' | |
117 |
|
120 | |||
118 | # append a <link ...> to the syntax highlighting css |
|
121 | # append a <link ...> to the syntax highlighting css | |
119 |
old_header = ''.join( |
|
122 | old_header = ''.join(tmpl('header')) | |
120 | if SYNTAX_CSS not in old_header: |
|
123 | if SYNTAX_CSS not in old_header: | |
121 | new_header = old_header + SYNTAX_CSS |
|
124 | new_header = old_header + SYNTAX_CSS | |
122 |
|
|
125 | tmpl.cache['header'] = new_header | |
123 |
|
126 | |||
124 |
yield |
|
127 | yield tmpl("filerevision", | |
125 |
|
|
128 | file=f, | |
126 |
|
|
129 | path=hgweb_mod._up(f), # fixme: make public | |
127 |
|
|
130 | text=text_formatted, | |
128 |
|
|
131 | raw=rawtext, | |
129 |
|
|
132 | mimetype=mt, | |
130 |
|
|
133 | rev=fctx.rev(), | |
131 |
|
|
134 | node=hex(fctx.node()), | |
132 |
|
|
135 | author=fctx.user(), | |
133 |
|
|
136 | date=fctx.date(), | |
134 |
|
|
137 | desc=fctx.description(), | |
135 |
|
|
138 | parent=self.siblings(fctx.parents()), | |
136 |
|
|
139 | child=self.siblings(fctx.children()), | |
137 |
|
|
140 | rename=self.renamelink(fl, n), | |
138 | fctx.filenode()), |
|
141 | permissions=fctx.manifest().flags(f)) | |
139 | permissions=fctx.manifest().flags(filename)) |
|
|||
140 |
|
142 | |||
141 |
|
143 | |||
142 | # monkeypatch in the new version |
|
144 | # monkeypatch in the new version |
This diff has been collapsed as it changes many lines, (988 lines changed) Show them Hide them | |||||
@@ -6,14 +6,28 b'' | |||||
6 | # This software may be used and distributed according to the terms |
|
6 | # This software may be used and distributed according to the terms | |
7 | # of the GNU General Public License, incorporated herein by reference. |
|
7 | # of the GNU General Public License, incorporated herein by reference. | |
8 |
|
8 | |||
9 |
import |
|
9 | import os, mimetypes, re, mimetools, cStringIO | |
10 | import tempfile, urllib, bz2 |
|
|||
11 | from mercurial.node import * |
|
10 | from mercurial.node import * | |
12 | from mercurial.i18n import gettext as _ |
|
11 | from mercurial import mdiff, ui, hg, util, archival, patch | |
13 | from mercurial import mdiff, ui, hg, util, archival, streamclone, patch |
|
|||
14 | from mercurial import revlog, templater |
|
12 | from mercurial import revlog, templater | |
15 |
from common import ErrorResponse, get_mtime, |
|
13 | from common import ErrorResponse, get_mtime, style_map, paritygen | |
16 | from request import wsgirequest |
|
14 | from request import wsgirequest | |
|
15 | import webcommands, protocol | |||
|
16 | ||||
|
17 | shortcuts = { | |||
|
18 | 'cl': [('cmd', ['changelog']), ('rev', None)], | |||
|
19 | 'sl': [('cmd', ['shortlog']), ('rev', None)], | |||
|
20 | 'cs': [('cmd', ['changeset']), ('node', None)], | |||
|
21 | 'f': [('cmd', ['file']), ('filenode', None)], | |||
|
22 | 'fl': [('cmd', ['filelog']), ('filenode', None)], | |||
|
23 | 'fd': [('cmd', ['filediff']), ('node', None)], | |||
|
24 | 'fa': [('cmd', ['annotate']), ('filenode', None)], | |||
|
25 | 'mf': [('cmd', ['manifest']), ('manifest', None)], | |||
|
26 | 'ca': [('cmd', ['archive']), ('node', None)], | |||
|
27 | 'tags': [('cmd', ['tags'])], | |||
|
28 | 'tip': [('cmd', ['changeset']), ('node', ['tip'])], | |||
|
29 | 'static': [('cmd', ['static']), ('file', None)] | |||
|
30 | } | |||
17 |
|
31 | |||
18 | def _up(p): |
|
32 | def _up(p): | |
19 | if p[0] != "/": |
|
33 | if p[0] != "/": | |
@@ -107,17 +121,200 b' class hgweb(object):' | |||||
107 | self.allowpull = self.configbool("web", "allowpull", True) |
|
121 | self.allowpull = self.configbool("web", "allowpull", True) | |
108 | self.encoding = self.config("web", "encoding", util._encoding) |
|
122 | self.encoding = self.config("web", "encoding", util._encoding) | |
109 |
|
123 | |||
|
124 | def run(self): | |||
|
125 | if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): | |||
|
126 | raise RuntimeError("This function is only intended to be called while running as a CGI script.") | |||
|
127 | import mercurial.hgweb.wsgicgi as wsgicgi | |||
|
128 | wsgicgi.launch(self) | |||
|
129 | ||||
|
130 | def __call__(self, env, respond): | |||
|
131 | req = wsgirequest(env, respond) | |||
|
132 | self.run_wsgi(req) | |||
|
133 | return req | |||
|
134 | ||||
|
135 | def run_wsgi(self, req): | |||
|
136 | ||||
|
137 | self.refresh() | |||
|
138 | ||||
|
139 | # expand form shortcuts | |||
|
140 | ||||
|
141 | for k in shortcuts.iterkeys(): | |||
|
142 | if k in req.form: | |||
|
143 | for name, value in shortcuts[k]: | |||
|
144 | if value is None: | |||
|
145 | value = req.form[k] | |||
|
146 | req.form[name] = value | |||
|
147 | del req.form[k] | |||
|
148 | ||||
|
149 | # work with CGI variables to create coherent structure | |||
|
150 | # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME | |||
|
151 | ||||
|
152 | req.url = req.env['SCRIPT_NAME'] | |||
|
153 | if not req.url.endswith('/'): | |||
|
154 | req.url += '/' | |||
|
155 | if req.env.has_key('REPO_NAME'): | |||
|
156 | req.url += req.env['REPO_NAME'] + '/' | |||
|
157 | ||||
|
158 | if req.env.get('PATH_INFO'): | |||
|
159 | parts = req.env.get('PATH_INFO').strip('/').split('/') | |||
|
160 | repo_parts = req.env.get('REPO_NAME', '').split('/') | |||
|
161 | if parts[:len(repo_parts)] == repo_parts: | |||
|
162 | parts = parts[len(repo_parts):] | |||
|
163 | query = '/'.join(parts) | |||
|
164 | else: | |||
|
165 | query = req.env['QUERY_STRING'].split('&', 1)[0] | |||
|
166 | query = query.split(';', 1)[0] | |||
|
167 | ||||
|
168 | # translate user-visible url structure to internal structure | |||
|
169 | ||||
|
170 | args = query.split('/', 2) | |||
|
171 | if 'cmd' not in req.form and args and args[0]: | |||
|
172 | ||||
|
173 | cmd = args.pop(0) | |||
|
174 | style = cmd.rfind('-') | |||
|
175 | if style != -1: | |||
|
176 | req.form['style'] = [cmd[:style]] | |||
|
177 | cmd = cmd[style+1:] | |||
|
178 | ||||
|
179 | # avoid accepting e.g. style parameter as command | |||
|
180 | if hasattr(webcommands, cmd) or hasattr(protocol, cmd): | |||
|
181 | req.form['cmd'] = [cmd] | |||
|
182 | ||||
|
183 | if args and args[0]: | |||
|
184 | node = args.pop(0) | |||
|
185 | req.form['node'] = [node] | |||
|
186 | if args: | |||
|
187 | req.form['file'] = args | |||
|
188 | ||||
|
189 | if cmd == 'static': | |||
|
190 | req.form['file'] = req.form['node'] | |||
|
191 | elif cmd == 'archive': | |||
|
192 | fn = req.form['node'][0] | |||
|
193 | for type_, spec in self.archive_specs.iteritems(): | |||
|
194 | ext = spec[2] | |||
|
195 | if fn.endswith(ext): | |||
|
196 | req.form['node'] = [fn[:-len(ext)]] | |||
|
197 | req.form['type'] = [type_] | |||
|
198 | ||||
|
199 | # actually process the request | |||
|
200 | ||||
|
201 | try: | |||
|
202 | ||||
|
203 | cmd = req.form.get('cmd', [''])[0] | |||
|
204 | if hasattr(protocol, cmd): | |||
|
205 | method = getattr(protocol, cmd) | |||
|
206 | method(self, req) | |||
|
207 | else: | |||
|
208 | tmpl = self.templater(req) | |||
|
209 | if cmd == '': | |||
|
210 | req.form['cmd'] = [tmpl.cache['default']] | |||
|
211 | cmd = req.form['cmd'][0] | |||
|
212 | method = getattr(webcommands, cmd) | |||
|
213 | method(self, req, tmpl) | |||
|
214 | del tmpl | |||
|
215 | ||||
|
216 | except revlog.LookupError, err: | |||
|
217 | req.respond(404, tmpl( | |||
|
218 | 'error', error='revision not found: %s' % err.name)) | |||
|
219 | except (hg.RepoError, revlog.RevlogError), inst: | |||
|
220 | req.respond('500 Internal Server Error', | |||
|
221 | tmpl('error', error=str(inst))) | |||
|
222 | except ErrorResponse, inst: | |||
|
223 | req.respond(inst.code, tmpl('error', error=inst.message)) | |||
|
224 | except AttributeError: | |||
|
225 | req.respond(400, tmpl('error', error='No such method: ' + cmd)) | |||
|
226 | ||||
|
227 | def templater(self, req): | |||
|
228 | ||||
|
229 | # determine scheme, port and server name | |||
|
230 | # this is needed to create absolute urls | |||
|
231 | ||||
|
232 | proto = req.env.get('wsgi.url_scheme') | |||
|
233 | if proto == 'https': | |||
|
234 | proto = 'https' | |||
|
235 | default_port = "443" | |||
|
236 | else: | |||
|
237 | proto = 'http' | |||
|
238 | default_port = "80" | |||
|
239 | ||||
|
240 | port = req.env["SERVER_PORT"] | |||
|
241 | port = port != default_port and (":" + port) or "" | |||
|
242 | urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port) | |||
|
243 | staticurl = self.config("web", "staticurl") or req.url + 'static/' | |||
|
244 | if not staticurl.endswith('/'): | |||
|
245 | staticurl += '/' | |||
|
246 | ||||
|
247 | # some functions for the templater | |||
|
248 | ||||
|
249 | def header(**map): | |||
|
250 | header_file = cStringIO.StringIO( | |||
|
251 | ''.join(tmpl("header", encoding=self.encoding, **map))) | |||
|
252 | msg = mimetools.Message(header_file, 0) | |||
|
253 | req.header(msg.items()) | |||
|
254 | yield header_file.read() | |||
|
255 | ||||
|
256 | def rawfileheader(**map): | |||
|
257 | req.header([('Content-type', map['mimetype']), | |||
|
258 | ('Content-disposition', 'filename=%s' % map['file']), | |||
|
259 | ('Content-length', str(len(map['raw'])))]) | |||
|
260 | yield '' | |||
|
261 | ||||
|
262 | def footer(**map): | |||
|
263 | yield tmpl("footer", **map) | |||
|
264 | ||||
|
265 | def motd(**map): | |||
|
266 | yield self.config("web", "motd", "") | |||
|
267 | ||||
|
268 | def sessionvars(**map): | |||
|
269 | fields = [] | |||
|
270 | if req.form.has_key('style'): | |||
|
271 | style = req.form['style'][0] | |||
|
272 | if style != self.config('web', 'style', ''): | |||
|
273 | fields.append(('style', style)) | |||
|
274 | ||||
|
275 | separator = req.url[-1] == '?' and ';' or '?' | |||
|
276 | for name, value in fields: | |||
|
277 | yield dict(name=name, value=value, separator=separator) | |||
|
278 | separator = ';' | |||
|
279 | ||||
|
280 | # figure out which style to use | |||
|
281 | ||||
|
282 | style = self.config("web", "style", "") | |||
|
283 | if req.form.has_key('style'): | |||
|
284 | style = req.form['style'][0] | |||
|
285 | mapfile = style_map(self.templatepath, style) | |||
|
286 | ||||
|
287 | if not self.reponame: | |||
|
288 | self.reponame = (self.config("web", "name") | |||
|
289 | or req.env.get('REPO_NAME') | |||
|
290 | or req.url.strip('/') or self.repo.root) | |||
|
291 | ||||
|
292 | # create the templater | |||
|
293 | ||||
|
294 | tmpl = templater.templater(mapfile, templater.common_filters, | |||
|
295 | defaults={"url": req.url, | |||
|
296 | "staticurl": staticurl, | |||
|
297 | "urlbase": urlbase, | |||
|
298 | "repo": self.reponame, | |||
|
299 | "header": header, | |||
|
300 | "footer": footer, | |||
|
301 | "motd": motd, | |||
|
302 | "rawfileheader": rawfileheader, | |||
|
303 | "sessionvars": sessionvars | |||
|
304 | }) | |||
|
305 | return tmpl | |||
|
306 | ||||
110 | def archivelist(self, nodeid): |
|
307 | def archivelist(self, nodeid): | |
111 | allowed = self.configlist("web", "allow_archive") |
|
308 | allowed = self.configlist("web", "allow_archive") | |
112 | for i, spec in self.archive_specs.iteritems(): |
|
309 | for i, spec in self.archive_specs.iteritems(): | |
113 | if i in allowed or self.configbool("web", "allow" + i): |
|
310 | if i in allowed or self.configbool("web", "allow" + i): | |
114 | yield {"type" : i, "extension" : spec[2], "node" : nodeid} |
|
311 | yield {"type" : i, "extension" : spec[2], "node" : nodeid} | |
115 |
|
312 | |||
116 | def listfilediffs(self, files, changeset): |
|
313 | def listfilediffs(self, tmpl, files, changeset): | |
117 | for f in files[:self.maxfiles]: |
|
314 | for f in files[:self.maxfiles]: | |
118 |
yield |
|
315 | yield tmpl("filedifflink", node=hex(changeset), file=f) | |
119 | if len(files) > self.maxfiles: |
|
316 | if len(files) > self.maxfiles: | |
120 |
yield |
|
317 | yield tmpl("fileellipses") | |
121 |
|
318 | |||
122 | def siblings(self, siblings=[], hiderev=None, **args): |
|
319 | def siblings(self, siblings=[], hiderev=None, **args): | |
123 | siblings = [s for s in siblings if s.node() != nullid] |
|
320 | siblings = [s for s in siblings if s.node() != nullid] | |
@@ -149,11 +346,11 b' class hgweb(object):' | |||||
149 | branches.append({"name": branch}) |
|
346 | branches.append({"name": branch}) | |
150 | return branches |
|
347 | return branches | |
151 |
|
348 | |||
152 | def showtag(self, t1, node=nullid, **args): |
|
349 | def showtag(self, tmpl, t1, node=nullid, **args): | |
153 | for t in self.repo.nodetags(node): |
|
350 | for t in self.repo.nodetags(node): | |
154 |
yield |
|
351 | yield tmpl(t1, tag=t, **args) | |
155 |
|
352 | |||
156 | def diff(self, node1, node2, files): |
|
353 | def diff(self, tmpl, node1, node2, files): | |
157 | def filterfiles(filters, files): |
|
354 | def filterfiles(filters, files): | |
158 | l = [x for x in files if x in filters] |
|
355 | l = [x for x in files if x in filters] | |
159 |
|
356 | |||
@@ -165,22 +362,22 b' class hgweb(object):' | |||||
165 |
|
362 | |||
166 | parity = paritygen(self.stripecount) |
|
363 | parity = paritygen(self.stripecount) | |
167 | def diffblock(diff, f, fn): |
|
364 | def diffblock(diff, f, fn): | |
168 |
yield |
|
365 | yield tmpl("diffblock", | |
169 |
|
|
366 | lines=prettyprintlines(diff), | |
170 |
|
|
367 | parity=parity.next(), | |
171 |
|
|
368 | file=f, | |
172 |
|
|
369 | filenode=hex(fn or nullid)) | |
173 |
|
370 | |||
174 | def prettyprintlines(diff): |
|
371 | def prettyprintlines(diff): | |
175 | for l in diff.splitlines(1): |
|
372 | for l in diff.splitlines(1): | |
176 | if l.startswith('+'): |
|
373 | if l.startswith('+'): | |
177 |
yield |
|
374 | yield tmpl("difflineplus", line=l) | |
178 | elif l.startswith('-'): |
|
375 | elif l.startswith('-'): | |
179 |
yield |
|
376 | yield tmpl("difflineminus", line=l) | |
180 | elif l.startswith('@'): |
|
377 | elif l.startswith('@'): | |
181 |
yield |
|
378 | yield tmpl("difflineat", line=l) | |
182 | else: |
|
379 | else: | |
183 |
yield |
|
380 | yield tmpl("diffline", line=l) | |
184 |
|
381 | |||
185 | r = self.repo |
|
382 | r = self.repo | |
186 | c1 = r.changectx(node1) |
|
383 | c1 = r.changectx(node1) | |
@@ -210,7 +407,7 b' class hgweb(object):' | |||||
210 | yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f, |
|
407 | yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f, | |
211 | opts=diffopts), f, tn) |
|
408 | opts=diffopts), f, tn) | |
212 |
|
409 | |||
213 | def changelog(self, ctx, shortlog=False): |
|
410 | def changelog(self, tmpl, ctx, shortlog=False): | |
214 | def changelist(limit=0,**map): |
|
411 | def changelist(limit=0,**map): | |
215 | cl = self.repo.changelog |
|
412 | cl = self.repo.changelog | |
216 | l = [] # build a list in forward order for efficiency |
|
413 | l = [] # build a list in forward order for efficiency | |
@@ -225,7 +422,7 b' class hgweb(object):' | |||||
225 | "changelogtag": self.showtag("changelogtag",n), |
|
422 | "changelogtag": self.showtag("changelogtag",n), | |
226 | "desc": ctx.description(), |
|
423 | "desc": ctx.description(), | |
227 | "date": ctx.date(), |
|
424 | "date": ctx.date(), | |
228 | "files": self.listfilediffs(ctx.files(), n), |
|
425 | "files": self.listfilediffs(tmpl, ctx.files(), n), | |
229 | "rev": i, |
|
426 | "rev": i, | |
230 | "node": hex(n), |
|
427 | "node": hex(n), | |
231 | "tags": self.nodetagsdict(n), |
|
428 | "tags": self.nodetagsdict(n), | |
@@ -248,15 +445,15 b' class hgweb(object):' | |||||
248 |
|
445 | |||
249 | changenav = revnavgen(pos, maxchanges, count, self.repo.changectx) |
|
446 | changenav = revnavgen(pos, maxchanges, count, self.repo.changectx) | |
250 |
|
447 | |||
251 |
yield |
|
448 | yield tmpl(shortlog and 'shortlog' or 'changelog', | |
252 |
|
|
449 | changenav=changenav, | |
253 |
|
|
450 | node=hex(cl.tip()), | |
254 |
|
|
451 | rev=pos, changesets=count, | |
255 |
|
|
452 | entries=lambda **x: changelist(limit=0,**x), | |
256 |
|
|
453 | latestentry=lambda **x: changelist(limit=1,**x), | |
257 |
|
|
454 | archives=self.archivelist("tip")) | |
258 |
|
455 | |||
259 | def search(self, query): |
|
456 | def search(self, tmpl, query): | |
260 |
|
457 | |||
261 | def changelist(**map): |
|
458 | def changelist(**map): | |
262 | cl = self.repo.changelog |
|
459 | cl = self.repo.changelog | |
@@ -287,19 +484,19 b' class hgweb(object):' | |||||
287 | count += 1 |
|
484 | count += 1 | |
288 | n = ctx.node() |
|
485 | n = ctx.node() | |
289 |
|
486 | |||
290 |
yield |
|
487 | yield tmpl('searchentry', | |
291 |
|
|
488 | parity=parity.next(), | |
292 |
|
|
489 | author=ctx.user(), | |
293 |
|
|
490 | parent=self.siblings(ctx.parents()), | |
294 |
|
|
491 | child=self.siblings(ctx.children()), | |
295 |
|
|
492 | changelogtag=self.showtag("changelogtag",n), | |
296 |
|
|
493 | desc=ctx.description(), | |
297 |
|
|
494 | date=ctx.date(), | |
298 |
|
|
495 | files=self.listfilediffs(tmpl, ctx.files(), n), | |
299 |
|
|
496 | rev=ctx.rev(), | |
300 |
|
|
497 | node=hex(n), | |
301 |
|
|
498 | tags=self.nodetagsdict(n), | |
302 |
|
|
499 | branches=self.nodebranchdict(ctx)) | |
303 |
|
500 | |||
304 | if count >= self.maxchanges: |
|
501 | if count >= self.maxchanges: | |
305 | break |
|
502 | break | |
@@ -307,13 +504,13 b' class hgweb(object):' | |||||
307 | cl = self.repo.changelog |
|
504 | cl = self.repo.changelog | |
308 | parity = paritygen(self.stripecount) |
|
505 | parity = paritygen(self.stripecount) | |
309 |
|
506 | |||
310 |
yield |
|
507 | yield tmpl('search', | |
311 |
|
|
508 | query=query, | |
312 |
|
|
509 | node=hex(cl.tip()), | |
313 |
|
|
510 | entries=changelist, | |
314 |
|
|
511 | archives=self.archivelist("tip")) | |
315 |
|
512 | |||
316 | def changeset(self, ctx): |
|
513 | def changeset(self, tmpl, ctx): | |
317 | n = ctx.node() |
|
514 | n = ctx.node() | |
318 | parents = ctx.parents() |
|
515 | parents = ctx.parents() | |
319 | p1 = parents[0].node() |
|
516 | p1 = parents[0].node() | |
@@ -321,29 +518,29 b' class hgweb(object):' | |||||
321 | files = [] |
|
518 | files = [] | |
322 | parity = paritygen(self.stripecount) |
|
519 | parity = paritygen(self.stripecount) | |
323 | for f in ctx.files(): |
|
520 | for f in ctx.files(): | |
324 |
files.append( |
|
521 | files.append(tmpl("filenodelink", | |
325 |
|
|
522 | node=hex(n), file=f, | |
326 |
|
|
523 | parity=parity.next())) | |
327 |
|
524 | |||
328 | def diff(**map): |
|
525 | def diff(**map): | |
329 | yield self.diff(p1, n, None) |
|
526 | yield self.diff(tmpl, p1, n, None) | |
330 |
|
527 | |||
331 |
yield |
|
528 | yield tmpl('changeset', | |
332 |
|
|
529 | diff=diff, | |
333 |
|
|
530 | rev=ctx.rev(), | |
334 |
|
|
531 | node=hex(n), | |
335 |
|
|
532 | parent=self.siblings(parents), | |
336 |
|
|
533 | child=self.siblings(ctx.children()), | |
337 |
|
|
534 | changesettag=self.showtag("changesettag",n), | |
338 |
|
|
535 | author=ctx.user(), | |
339 |
|
|
536 | desc=ctx.description(), | |
340 |
|
|
537 | date=ctx.date(), | |
341 |
|
|
538 | files=files, | |
342 |
|
|
539 | archives=self.archivelist(hex(n)), | |
343 |
|
|
540 | tags=self.nodetagsdict(n), | |
344 |
|
|
541 | branches=self.nodebranchdict(ctx)) | |
345 |
|
542 | |||
346 | def filelog(self, fctx): |
|
543 | def filelog(self, tmpl, fctx): | |
347 | f = fctx.path() |
|
544 | f = fctx.path() | |
348 | fl = fctx.filelog() |
|
545 | fl = fctx.filelog() | |
349 | count = fl.count() |
|
546 | count = fl.count() | |
@@ -380,11 +577,11 b' class hgweb(object):' | |||||
380 |
|
577 | |||
381 | nodefunc = lambda x: fctx.filectx(fileid=x) |
|
578 | nodefunc = lambda x: fctx.filectx(fileid=x) | |
382 | nav = revnavgen(pos, pagelen, count, nodefunc) |
|
579 | nav = revnavgen(pos, pagelen, count, nodefunc) | |
383 |
yield |
|
580 | yield tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav, | |
384 |
|
|
581 | entries=lambda **x: entries(limit=0, **x), | |
385 |
|
|
582 | latestentry=lambda **x: entries(limit=1, **x)) | |
386 |
|
583 | |||
387 | def filerevision(self, fctx): |
|
584 | def filerevision(self, tmpl, fctx): | |
388 | f = fctx.path() |
|
585 | f = fctx.path() | |
389 | text = fctx.data() |
|
586 | text = fctx.data() | |
390 | fl = fctx.filelog() |
|
587 | fl = fctx.filelog() | |
@@ -404,23 +601,23 b' class hgweb(object):' | |||||
404 | "linenumber": "% 6d" % (l + 1), |
|
601 | "linenumber": "% 6d" % (l + 1), | |
405 | "parity": parity.next()} |
|
602 | "parity": parity.next()} | |
406 |
|
603 | |||
407 |
yield |
|
604 | yield tmpl("filerevision", | |
408 |
|
|
605 | file=f, | |
409 |
|
|
606 | path=_up(f), | |
410 |
|
|
607 | text=lines(), | |
411 |
|
|
608 | raw=rawtext, | |
412 |
|
|
609 | mimetype=mt, | |
413 |
|
|
610 | rev=fctx.rev(), | |
414 |
|
|
611 | node=hex(fctx.node()), | |
415 |
|
|
612 | author=fctx.user(), | |
416 |
|
|
613 | date=fctx.date(), | |
417 |
|
|
614 | desc=fctx.description(), | |
418 |
|
|
615 | parent=self.siblings(fctx.parents()), | |
419 |
|
|
616 | child=self.siblings(fctx.children()), | |
420 |
|
|
617 | rename=self.renamelink(fl, n), | |
421 |
|
|
618 | permissions=fctx.manifest().flags(f)) | |
422 |
|
619 | |||
423 | def fileannotate(self, fctx): |
|
620 | def fileannotate(self, tmpl, fctx): | |
424 | f = fctx.path() |
|
621 | f = fctx.path() | |
425 | n = fctx.filenode() |
|
622 | n = fctx.filenode() | |
426 | fl = fctx.filelog() |
|
623 | fl = fctx.filelog() | |
@@ -442,21 +639,21 b' class hgweb(object):' | |||||
442 | "file": f.path(), |
|
639 | "file": f.path(), | |
443 | "line": l} |
|
640 | "line": l} | |
444 |
|
641 | |||
445 |
yield |
|
642 | yield tmpl("fileannotate", | |
446 |
|
|
643 | file=f, | |
447 |
|
|
644 | annotate=annotate, | |
448 |
|
|
645 | path=_up(f), | |
449 |
|
|
646 | rev=fctx.rev(), | |
450 |
|
|
647 | node=hex(fctx.node()), | |
451 |
|
|
648 | author=fctx.user(), | |
452 |
|
|
649 | date=fctx.date(), | |
453 |
|
|
650 | desc=fctx.description(), | |
454 |
|
|
651 | rename=self.renamelink(fl, n), | |
455 |
|
|
652 | parent=self.siblings(fctx.parents()), | |
456 |
|
|
653 | child=self.siblings(fctx.children()), | |
457 |
|
|
654 | permissions=fctx.manifest().flags(f)) | |
458 |
|
655 | |||
459 | def manifest(self, ctx, path): |
|
656 | def manifest(self, tmpl, ctx, path): | |
460 | mf = ctx.manifest() |
|
657 | mf = ctx.manifest() | |
461 | node = ctx.node() |
|
658 | node = ctx.node() | |
462 |
|
659 | |||
@@ -510,19 +707,19 b' class hgweb(object):' | |||||
510 | "path": "%s%s" % (abspath, f), |
|
707 | "path": "%s%s" % (abspath, f), | |
511 | "basename": f[:-1]} |
|
708 | "basename": f[:-1]} | |
512 |
|
709 | |||
513 |
yield |
|
710 | yield tmpl("manifest", | |
514 |
|
|
711 | rev=ctx.rev(), | |
515 |
|
|
712 | node=hex(node), | |
516 |
|
|
713 | path=abspath, | |
517 |
|
|
714 | up=_up(abspath), | |
518 |
|
|
715 | upparity=parity.next(), | |
519 |
|
|
716 | fentries=filelist, | |
520 |
|
|
717 | dentries=dirlist, | |
521 |
|
|
718 | archives=self.archivelist(hex(node)), | |
522 |
|
|
719 | tags=self.nodetagsdict(node), | |
523 |
|
|
720 | branches=self.nodebranchdict(ctx)) | |
524 |
|
721 | |||
525 | def tags(self): |
|
722 | def tags(self, tmpl): | |
526 | i = self.repo.tagslist() |
|
723 | i = self.repo.tagslist() | |
527 | i.reverse() |
|
724 | i.reverse() | |
528 | parity = paritygen(self.stripecount) |
|
725 | parity = paritygen(self.stripecount) | |
@@ -540,13 +737,13 b' class hgweb(object):' | |||||
540 | "date": self.repo.changectx(n).date(), |
|
737 | "date": self.repo.changectx(n).date(), | |
541 | "node": hex(n)} |
|
738 | "node": hex(n)} | |
542 |
|
739 | |||
543 |
yield |
|
740 | yield tmpl("tags", | |
544 |
|
|
741 | node=hex(self.repo.changelog.tip()), | |
545 |
|
|
742 | entries=lambda **x: entries(False,0, **x), | |
546 |
|
|
743 | entriesnotip=lambda **x: entries(True,0, **x), | |
547 |
|
|
744 | latestentry=lambda **x: entries(True,1, **x)) | |
548 |
|
745 | |||
549 | def summary(self): |
|
746 | def summary(self, tmpl): | |
550 | i = self.repo.tagslist() |
|
747 | i = self.repo.tagslist() | |
551 | i.reverse() |
|
748 | i.reverse() | |
552 |
|
749 | |||
@@ -561,11 +758,11 b' class hgweb(object):' | |||||
561 | if count > 10: # limit to 10 tags |
|
758 | if count > 10: # limit to 10 tags | |
562 | break; |
|
759 | break; | |
563 |
|
760 | |||
564 |
yield |
|
761 | yield tmpl("tagentry", | |
565 |
|
|
762 | parity=parity.next(), | |
566 |
|
|
763 | tag=k, | |
567 |
|
|
764 | node=hex(n), | |
568 |
|
|
765 | date=self.repo.changectx(n).date()) | |
569 |
|
766 | |||
570 |
|
767 | |||
571 | def branches(**map): |
|
768 | def branches(**map): | |
@@ -591,8 +788,8 b' class hgweb(object):' | |||||
591 | n = ctx.node() |
|
788 | n = ctx.node() | |
592 | hn = hex(n) |
|
789 | hn = hex(n) | |
593 |
|
790 | |||
594 |
l.insert(0, |
|
791 | l.insert(0, tmpl( | |
595 |
|
|
792 | 'shortlogentry', | |
596 | parity=parity.next(), |
|
793 | parity=parity.next(), | |
597 | author=ctx.user(), |
|
794 | author=ctx.user(), | |
598 | desc=ctx.description(), |
|
795 | desc=ctx.description(), | |
@@ -609,34 +806,34 b' class hgweb(object):' | |||||
609 | start = max(0, count - self.maxchanges) |
|
806 | start = max(0, count - self.maxchanges) | |
610 | end = min(count, start + self.maxchanges) |
|
807 | end = min(count, start + self.maxchanges) | |
611 |
|
808 | |||
612 |
yield |
|
809 | yield tmpl("summary", | |
613 | desc=self.config("web", "description", "unknown"), |
|
810 | desc=self.config("web", "description", "unknown"), | |
614 | owner=(self.config("ui", "username") or # preferred |
|
811 | owner=(self.config("ui", "username") or # preferred | |
615 | self.config("web", "contact") or # deprecated |
|
812 | self.config("web", "contact") or # deprecated | |
616 | self.config("web", "author", "unknown")), # also |
|
813 | self.config("web", "author", "unknown")), # also | |
617 | lastchange=cl.read(cl.tip())[2], |
|
814 | lastchange=cl.read(cl.tip())[2], | |
618 | tags=tagentries, |
|
815 | tags=tagentries, | |
619 | branches=branches, |
|
816 | branches=branches, | |
620 | shortlog=changelist, |
|
817 | shortlog=changelist, | |
621 | node=hex(cl.tip()), |
|
818 | node=hex(cl.tip()), | |
622 | archives=self.archivelist("tip")) |
|
819 | archives=self.archivelist("tip")) | |
623 |
|
820 | |||
624 | def filediff(self, fctx): |
|
821 | def filediff(self, tmpl, fctx): | |
625 | n = fctx.node() |
|
822 | n = fctx.node() | |
626 | path = fctx.path() |
|
823 | path = fctx.path() | |
627 | parents = fctx.parents() |
|
824 | parents = fctx.parents() | |
628 | p1 = parents and parents[0].node() or nullid |
|
825 | p1 = parents and parents[0].node() or nullid | |
629 |
|
826 | |||
630 | def diff(**map): |
|
827 | def diff(**map): | |
631 | yield self.diff(p1, n, [path]) |
|
828 | yield self.diff(tmpl, p1, n, [path]) | |
632 |
|
829 | |||
633 |
yield |
|
830 | yield tmpl("filediff", | |
634 |
|
|
831 | file=path, | |
635 |
|
|
832 | node=hex(n), | |
636 |
|
|
833 | rev=fctx.rev(), | |
637 |
|
|
834 | parent=self.siblings(parents), | |
638 |
|
|
835 | child=self.siblings(fctx.children()), | |
639 |
|
|
836 | diff=diff) | |
640 |
|
837 | |||
641 | archive_specs = { |
|
838 | archive_specs = { | |
642 | 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None), |
|
839 | 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None), | |
@@ -644,7 +841,7 b' class hgweb(object):' | |||||
644 | 'zip': ('application/zip', 'zip', '.zip', None), |
|
841 | 'zip': ('application/zip', 'zip', '.zip', None), | |
645 | } |
|
842 | } | |
646 |
|
843 | |||
647 | def archive(self, req, key, type_): |
|
844 | def archive(self, tmpl, req, key, type_): | |
648 | reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame)) |
|
845 | reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame)) | |
649 | cnode = self.repo.lookup(key) |
|
846 | cnode = self.repo.lookup(key) | |
650 | arch_version = key |
|
847 | arch_version = key | |
@@ -668,191 +865,6 b' class hgweb(object):' | |||||
668 | path = path.lstrip('/') |
|
865 | path = path.lstrip('/') | |
669 | return util.canonpath(self.repo.root, '', path) |
|
866 | return util.canonpath(self.repo.root, '', path) | |
670 |
|
867 | |||
671 | def run(self): |
|
|||
672 | if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): |
|
|||
673 | raise RuntimeError("This function is only intended to be called while running as a CGI script.") |
|
|||
674 | import mercurial.hgweb.wsgicgi as wsgicgi |
|
|||
675 | wsgicgi.launch(self) |
|
|||
676 |
|
||||
677 | def __call__(self, env, respond): |
|
|||
678 | req = wsgirequest(env, respond) |
|
|||
679 | self.run_wsgi(req) |
|
|||
680 | return req |
|
|||
681 |
|
||||
682 | def run_wsgi(self, req): |
|
|||
683 | def header(**map): |
|
|||
684 | header_file = cStringIO.StringIO( |
|
|||
685 | ''.join(self.t("header", encoding=self.encoding, **map))) |
|
|||
686 | msg = mimetools.Message(header_file, 0) |
|
|||
687 | req.header(msg.items()) |
|
|||
688 | yield header_file.read() |
|
|||
689 |
|
||||
690 | def rawfileheader(**map): |
|
|||
691 | req.header([('Content-type', map['mimetype']), |
|
|||
692 | ('Content-disposition', 'filename=%s' % map['file']), |
|
|||
693 | ('Content-length', str(len(map['raw'])))]) |
|
|||
694 | yield '' |
|
|||
695 |
|
||||
696 | def footer(**map): |
|
|||
697 | yield self.t("footer", **map) |
|
|||
698 |
|
||||
699 | def motd(**map): |
|
|||
700 | yield self.config("web", "motd", "") |
|
|||
701 |
|
||||
702 | def expand_form(form): |
|
|||
703 | shortcuts = { |
|
|||
704 | 'cl': [('cmd', ['changelog']), ('rev', None)], |
|
|||
705 | 'sl': [('cmd', ['shortlog']), ('rev', None)], |
|
|||
706 | 'cs': [('cmd', ['changeset']), ('node', None)], |
|
|||
707 | 'f': [('cmd', ['file']), ('filenode', None)], |
|
|||
708 | 'fl': [('cmd', ['filelog']), ('filenode', None)], |
|
|||
709 | 'fd': [('cmd', ['filediff']), ('node', None)], |
|
|||
710 | 'fa': [('cmd', ['annotate']), ('filenode', None)], |
|
|||
711 | 'mf': [('cmd', ['manifest']), ('manifest', None)], |
|
|||
712 | 'ca': [('cmd', ['archive']), ('node', None)], |
|
|||
713 | 'tags': [('cmd', ['tags'])], |
|
|||
714 | 'tip': [('cmd', ['changeset']), ('node', ['tip'])], |
|
|||
715 | 'static': [('cmd', ['static']), ('file', None)] |
|
|||
716 | } |
|
|||
717 |
|
||||
718 | for k in shortcuts.iterkeys(): |
|
|||
719 | if form.has_key(k): |
|
|||
720 | for name, value in shortcuts[k]: |
|
|||
721 | if value is None: |
|
|||
722 | value = form[k] |
|
|||
723 | form[name] = value |
|
|||
724 | del form[k] |
|
|||
725 |
|
||||
726 | def rewrite_request(req): |
|
|||
727 | '''translate new web interface to traditional format''' |
|
|||
728 |
|
||||
729 | req.url = req.env['SCRIPT_NAME'] |
|
|||
730 | if not req.url.endswith('/'): |
|
|||
731 | req.url += '/' |
|
|||
732 | if req.env.has_key('REPO_NAME'): |
|
|||
733 | req.url += req.env['REPO_NAME'] + '/' |
|
|||
734 |
|
||||
735 | if req.env.get('PATH_INFO'): |
|
|||
736 | parts = req.env.get('PATH_INFO').strip('/').split('/') |
|
|||
737 | repo_parts = req.env.get('REPO_NAME', '').split('/') |
|
|||
738 | if parts[:len(repo_parts)] == repo_parts: |
|
|||
739 | parts = parts[len(repo_parts):] |
|
|||
740 | query = '/'.join(parts) |
|
|||
741 | else: |
|
|||
742 | query = req.env['QUERY_STRING'].split('&', 1)[0] |
|
|||
743 | query = query.split(';', 1)[0] |
|
|||
744 |
|
||||
745 | if req.form.has_key('cmd'): |
|
|||
746 | # old style |
|
|||
747 | return |
|
|||
748 |
|
||||
749 | args = query.split('/', 2) |
|
|||
750 | if not args or not args[0]: |
|
|||
751 | return |
|
|||
752 |
|
||||
753 | cmd = args.pop(0) |
|
|||
754 | style = cmd.rfind('-') |
|
|||
755 | if style != -1: |
|
|||
756 | req.form['style'] = [cmd[:style]] |
|
|||
757 | cmd = cmd[style+1:] |
|
|||
758 | # avoid accepting e.g. style parameter as command |
|
|||
759 | if hasattr(self, 'do_' + cmd): |
|
|||
760 | req.form['cmd'] = [cmd] |
|
|||
761 |
|
||||
762 | if args and args[0]: |
|
|||
763 | node = args.pop(0) |
|
|||
764 | req.form['node'] = [node] |
|
|||
765 | if args: |
|
|||
766 | req.form['file'] = args |
|
|||
767 |
|
||||
768 | if cmd == 'static': |
|
|||
769 | req.form['file'] = req.form['node'] |
|
|||
770 | elif cmd == 'archive': |
|
|||
771 | fn = req.form['node'][0] |
|
|||
772 | for type_, spec in self.archive_specs.iteritems(): |
|
|||
773 | ext = spec[2] |
|
|||
774 | if fn.endswith(ext): |
|
|||
775 | req.form['node'] = [fn[:-len(ext)]] |
|
|||
776 | req.form['type'] = [type_] |
|
|||
777 |
|
||||
778 | def sessionvars(**map): |
|
|||
779 | fields = [] |
|
|||
780 | if req.form.has_key('style'): |
|
|||
781 | style = req.form['style'][0] |
|
|||
782 | if style != self.config('web', 'style', ''): |
|
|||
783 | fields.append(('style', style)) |
|
|||
784 |
|
||||
785 | separator = req.url[-1] == '?' and ';' or '?' |
|
|||
786 | for name, value in fields: |
|
|||
787 | yield dict(name=name, value=value, separator=separator) |
|
|||
788 | separator = ';' |
|
|||
789 |
|
||||
790 | self.refresh() |
|
|||
791 |
|
||||
792 | expand_form(req.form) |
|
|||
793 | rewrite_request(req) |
|
|||
794 |
|
||||
795 | style = self.config("web", "style", "") |
|
|||
796 | if req.form.has_key('style'): |
|
|||
797 | style = req.form['style'][0] |
|
|||
798 | mapfile = style_map(self.templatepath, style) |
|
|||
799 |
|
||||
800 | proto = req.env.get('wsgi.url_scheme') |
|
|||
801 | if proto == 'https': |
|
|||
802 | proto = 'https' |
|
|||
803 | default_port = "443" |
|
|||
804 | else: |
|
|||
805 | proto = 'http' |
|
|||
806 | default_port = "80" |
|
|||
807 |
|
||||
808 | port = req.env["SERVER_PORT"] |
|
|||
809 | port = port != default_port and (":" + port) or "" |
|
|||
810 | urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port) |
|
|||
811 | staticurl = self.config("web", "staticurl") or req.url + 'static/' |
|
|||
812 | if not staticurl.endswith('/'): |
|
|||
813 | staticurl += '/' |
|
|||
814 |
|
||||
815 | if not self.reponame: |
|
|||
816 | self.reponame = (self.config("web", "name") |
|
|||
817 | or req.env.get('REPO_NAME') |
|
|||
818 | or req.url.strip('/') |
|
|||
819 | or os.path.basename(self.repo.root)) |
|
|||
820 |
|
||||
821 | self.t = templater.templater(mapfile, templater.common_filters, |
|
|||
822 | defaults={"url": req.url, |
|
|||
823 | "staticurl": staticurl, |
|
|||
824 | "urlbase": urlbase, |
|
|||
825 | "repo": self.reponame, |
|
|||
826 | "header": header, |
|
|||
827 | "footer": footer, |
|
|||
828 | "motd": motd, |
|
|||
829 | "rawfileheader": rawfileheader, |
|
|||
830 | "sessionvars": sessionvars |
|
|||
831 | }) |
|
|||
832 |
|
||||
833 | try: |
|
|||
834 | if not req.form.has_key('cmd'): |
|
|||
835 | req.form['cmd'] = [self.t.cache['default']] |
|
|||
836 |
|
||||
837 | cmd = req.form['cmd'][0] |
|
|||
838 |
|
||||
839 | try: |
|
|||
840 | method = getattr(self, 'do_' + cmd) |
|
|||
841 | method(req) |
|
|||
842 | except revlog.LookupError, err: |
|
|||
843 | req.respond(404, self.t( |
|
|||
844 | 'error', error='revision not found: %s' % err.name)) |
|
|||
845 | except (hg.RepoError, revlog.RevlogError), inst: |
|
|||
846 | req.respond('500 Internal Server Error', |
|
|||
847 | self.t('error', error=str(inst))) |
|
|||
848 | except ErrorResponse, inst: |
|
|||
849 | req.respond(inst.code, self.t('error', error=inst.message)) |
|
|||
850 | except AttributeError: |
|
|||
851 | req.respond(400, |
|
|||
852 | self.t('error', error='No such method: ' + cmd)) |
|
|||
853 | finally: |
|
|||
854 | self.t = None |
|
|||
855 |
|
||||
856 | def changectx(self, req): |
|
868 | def changectx(self, req): | |
857 | if req.form.has_key('node'): |
|
869 | if req.form.has_key('node'): | |
858 | changeid = req.form['node'][0] |
|
870 | changeid = req.form['node'][0] | |
@@ -884,181 +896,6 b' class hgweb(object):' | |||||
884 |
|
896 | |||
885 | return fctx |
|
897 | return fctx | |
886 |
|
898 | |||
887 | def do_log(self, req): |
|
|||
888 | if req.form.has_key('file') and req.form['file'][0]: |
|
|||
889 | self.do_filelog(req) |
|
|||
890 | else: |
|
|||
891 | self.do_changelog(req) |
|
|||
892 |
|
||||
893 | def do_rev(self, req): |
|
|||
894 | self.do_changeset(req) |
|
|||
895 |
|
||||
896 | def do_file(self, req): |
|
|||
897 | path = self.cleanpath(req.form.get('file', [''])[0]) |
|
|||
898 | if path: |
|
|||
899 | try: |
|
|||
900 | req.write(self.filerevision(self.filectx(req))) |
|
|||
901 | return |
|
|||
902 | except revlog.LookupError: |
|
|||
903 | pass |
|
|||
904 |
|
||||
905 | req.write(self.manifest(self.changectx(req), path)) |
|
|||
906 |
|
||||
907 | def do_diff(self, req): |
|
|||
908 | self.do_filediff(req) |
|
|||
909 |
|
||||
910 | def do_changelog(self, req, shortlog = False): |
|
|||
911 | if req.form.has_key('node'): |
|
|||
912 | ctx = self.changectx(req) |
|
|||
913 | else: |
|
|||
914 | if req.form.has_key('rev'): |
|
|||
915 | hi = req.form['rev'][0] |
|
|||
916 | else: |
|
|||
917 | hi = self.repo.changelog.count() - 1 |
|
|||
918 | try: |
|
|||
919 | ctx = self.repo.changectx(hi) |
|
|||
920 | except hg.RepoError: |
|
|||
921 | req.write(self.search(hi)) # XXX redirect to 404 page? |
|
|||
922 | return |
|
|||
923 |
|
||||
924 | req.write(self.changelog(ctx, shortlog = shortlog)) |
|
|||
925 |
|
||||
926 | def do_shortlog(self, req): |
|
|||
927 | self.do_changelog(req, shortlog = True) |
|
|||
928 |
|
||||
929 | def do_changeset(self, req): |
|
|||
930 | req.write(self.changeset(self.changectx(req))) |
|
|||
931 |
|
||||
932 | def do_manifest(self, req): |
|
|||
933 | req.write(self.manifest(self.changectx(req), |
|
|||
934 | self.cleanpath(req.form['path'][0]))) |
|
|||
935 |
|
||||
936 | def do_tags(self, req): |
|
|||
937 | req.write(self.tags()) |
|
|||
938 |
|
||||
939 | def do_summary(self, req): |
|
|||
940 | req.write(self.summary()) |
|
|||
941 |
|
||||
942 | def do_filediff(self, req): |
|
|||
943 | req.write(self.filediff(self.filectx(req))) |
|
|||
944 |
|
||||
945 | def do_annotate(self, req): |
|
|||
946 | req.write(self.fileannotate(self.filectx(req))) |
|
|||
947 |
|
||||
948 | def do_filelog(self, req): |
|
|||
949 | req.write(self.filelog(self.filectx(req))) |
|
|||
950 |
|
||||
951 | def do_lookup(self, req): |
|
|||
952 | try: |
|
|||
953 | r = hex(self.repo.lookup(req.form['key'][0])) |
|
|||
954 | success = 1 |
|
|||
955 | except Exception,inst: |
|
|||
956 | r = str(inst) |
|
|||
957 | success = 0 |
|
|||
958 | resp = "%s %s\n" % (success, r) |
|
|||
959 | req.httphdr("application/mercurial-0.1", length=len(resp)) |
|
|||
960 | req.write(resp) |
|
|||
961 |
|
||||
962 | def do_heads(self, req): |
|
|||
963 | resp = " ".join(map(hex, self.repo.heads())) + "\n" |
|
|||
964 | req.httphdr("application/mercurial-0.1", length=len(resp)) |
|
|||
965 | req.write(resp) |
|
|||
966 |
|
||||
967 | def do_branches(self, req): |
|
|||
968 | nodes = [] |
|
|||
969 | if req.form.has_key('nodes'): |
|
|||
970 | nodes = map(bin, req.form['nodes'][0].split(" ")) |
|
|||
971 | resp = cStringIO.StringIO() |
|
|||
972 | for b in self.repo.branches(nodes): |
|
|||
973 | resp.write(" ".join(map(hex, b)) + "\n") |
|
|||
974 | resp = resp.getvalue() |
|
|||
975 | req.httphdr("application/mercurial-0.1", length=len(resp)) |
|
|||
976 | req.write(resp) |
|
|||
977 |
|
||||
978 | def do_between(self, req): |
|
|||
979 | if req.form.has_key('pairs'): |
|
|||
980 | pairs = [map(bin, p.split("-")) |
|
|||
981 | for p in req.form['pairs'][0].split(" ")] |
|
|||
982 | resp = cStringIO.StringIO() |
|
|||
983 | for b in self.repo.between(pairs): |
|
|||
984 | resp.write(" ".join(map(hex, b)) + "\n") |
|
|||
985 | resp = resp.getvalue() |
|
|||
986 | req.httphdr("application/mercurial-0.1", length=len(resp)) |
|
|||
987 | req.write(resp) |
|
|||
988 |
|
||||
989 | def do_changegroup(self, req): |
|
|||
990 | req.httphdr("application/mercurial-0.1") |
|
|||
991 | nodes = [] |
|
|||
992 | if not self.allowpull: |
|
|||
993 | return |
|
|||
994 |
|
||||
995 | if req.form.has_key('roots'): |
|
|||
996 | nodes = map(bin, req.form['roots'][0].split(" ")) |
|
|||
997 |
|
||||
998 | z = zlib.compressobj() |
|
|||
999 | f = self.repo.changegroup(nodes, 'serve') |
|
|||
1000 | while 1: |
|
|||
1001 | chunk = f.read(4096) |
|
|||
1002 | if not chunk: |
|
|||
1003 | break |
|
|||
1004 | req.write(z.compress(chunk)) |
|
|||
1005 |
|
||||
1006 | req.write(z.flush()) |
|
|||
1007 |
|
||||
1008 | def do_changegroupsubset(self, req): |
|
|||
1009 | req.httphdr("application/mercurial-0.1") |
|
|||
1010 | bases = [] |
|
|||
1011 | heads = [] |
|
|||
1012 | if not self.allowpull: |
|
|||
1013 | return |
|
|||
1014 |
|
||||
1015 | if req.form.has_key('bases'): |
|
|||
1016 | bases = [bin(x) for x in req.form['bases'][0].split(' ')] |
|
|||
1017 | if req.form.has_key('heads'): |
|
|||
1018 | heads = [bin(x) for x in req.form['heads'][0].split(' ')] |
|
|||
1019 |
|
||||
1020 | z = zlib.compressobj() |
|
|||
1021 | f = self.repo.changegroupsubset(bases, heads, 'serve') |
|
|||
1022 | while 1: |
|
|||
1023 | chunk = f.read(4096) |
|
|||
1024 | if not chunk: |
|
|||
1025 | break |
|
|||
1026 | req.write(z.compress(chunk)) |
|
|||
1027 |
|
||||
1028 | req.write(z.flush()) |
|
|||
1029 |
|
||||
1030 | def do_archive(self, req): |
|
|||
1031 | type_ = req.form['type'][0] |
|
|||
1032 | allowed = self.configlist("web", "allow_archive") |
|
|||
1033 | if (type_ in self.archives and (type_ in allowed or |
|
|||
1034 | self.configbool("web", "allow" + type_, False))): |
|
|||
1035 | self.archive(req, req.form['node'][0], type_) |
|
|||
1036 | return |
|
|||
1037 |
|
||||
1038 | req.respond(400, self.t('error', |
|
|||
1039 | error='Unsupported archive type: %s' % type_)) |
|
|||
1040 |
|
||||
1041 | def do_static(self, req): |
|
|||
1042 | fname = req.form['file'][0] |
|
|||
1043 | # a repo owner may set web.static in .hg/hgrc to get any file |
|
|||
1044 | # readable by the user running the CGI script |
|
|||
1045 | static = self.config("web", "static", |
|
|||
1046 | os.path.join(self.templatepath, "static"), |
|
|||
1047 | untrusted=False) |
|
|||
1048 | req.write(staticfile(static, fname, req)) |
|
|||
1049 |
|
||||
1050 | def do_capabilities(self, req): |
|
|||
1051 | caps = ['lookup', 'changegroupsubset'] |
|
|||
1052 | if self.configbool('server', 'uncompressed'): |
|
|||
1053 | caps.append('stream=%d' % self.repo.changelog.version) |
|
|||
1054 | # XXX: make configurable and/or share code with do_unbundle: |
|
|||
1055 | unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN'] |
|
|||
1056 | if unbundleversions: |
|
|||
1057 | caps.append('unbundle=%s' % ','.join(unbundleversions)) |
|
|||
1058 | resp = ' '.join(caps) |
|
|||
1059 | req.httphdr("application/mercurial-0.1", length=len(resp)) |
|
|||
1060 | req.write(resp) |
|
|||
1061 |
|
||||
1062 | def check_perm(self, req, op, default): |
|
899 | def check_perm(self, req, op, default): | |
1063 | '''check permission for operation based on user auth. |
|
900 | '''check permission for operation based on user auth. | |
1064 | return true if op allowed, else false. |
|
901 | return true if op allowed, else false. | |
@@ -1072,138 +909,3 b' class hgweb(object):' | |||||
1072 |
|
909 | |||
1073 | allow = self.configlist('web', 'allow_' + op) |
|
910 | allow = self.configlist('web', 'allow_' + op) | |
1074 | return (allow and (allow == ['*'] or user in allow)) or default |
|
911 | return (allow and (allow == ['*'] or user in allow)) or default | |
1075 |
|
||||
1076 | def do_unbundle(self, req): |
|
|||
1077 | def bail(response, headers={}): |
|
|||
1078 | length = int(req.env['CONTENT_LENGTH']) |
|
|||
1079 | for s in util.filechunkiter(req, limit=length): |
|
|||
1080 | # drain incoming bundle, else client will not see |
|
|||
1081 | # response when run outside cgi script |
|
|||
1082 | pass |
|
|||
1083 | req.httphdr("application/mercurial-0.1", headers=headers) |
|
|||
1084 | req.write('0\n') |
|
|||
1085 | req.write(response) |
|
|||
1086 |
|
||||
1087 | # require ssl by default, auth info cannot be sniffed and |
|
|||
1088 | # replayed |
|
|||
1089 | ssl_req = self.configbool('web', 'push_ssl', True) |
|
|||
1090 | if ssl_req: |
|
|||
1091 | if req.env.get('wsgi.url_scheme') != 'https': |
|
|||
1092 | bail(_('ssl required\n')) |
|
|||
1093 | return |
|
|||
1094 | proto = 'https' |
|
|||
1095 | else: |
|
|||
1096 | proto = 'http' |
|
|||
1097 |
|
||||
1098 | # do not allow push unless explicitly allowed |
|
|||
1099 | if not self.check_perm(req, 'push', False): |
|
|||
1100 | bail(_('push not authorized\n'), |
|
|||
1101 | headers={'status': '401 Unauthorized'}) |
|
|||
1102 | return |
|
|||
1103 |
|
||||
1104 | their_heads = req.form['heads'][0].split(' ') |
|
|||
1105 |
|
||||
1106 | def check_heads(): |
|
|||
1107 | heads = map(hex, self.repo.heads()) |
|
|||
1108 | return their_heads == [hex('force')] or their_heads == heads |
|
|||
1109 |
|
||||
1110 | # fail early if possible |
|
|||
1111 | if not check_heads(): |
|
|||
1112 | bail(_('unsynced changes\n')) |
|
|||
1113 | return |
|
|||
1114 |
|
||||
1115 | req.httphdr("application/mercurial-0.1") |
|
|||
1116 |
|
||||
1117 | # do not lock repo until all changegroup data is |
|
|||
1118 | # streamed. save to temporary file. |
|
|||
1119 |
|
||||
1120 | fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-') |
|
|||
1121 | fp = os.fdopen(fd, 'wb+') |
|
|||
1122 | try: |
|
|||
1123 | length = int(req.env['CONTENT_LENGTH']) |
|
|||
1124 | for s in util.filechunkiter(req, limit=length): |
|
|||
1125 | fp.write(s) |
|
|||
1126 |
|
||||
1127 | try: |
|
|||
1128 | lock = self.repo.lock() |
|
|||
1129 | try: |
|
|||
1130 | if not check_heads(): |
|
|||
1131 | req.write('0\n') |
|
|||
1132 | req.write(_('unsynced changes\n')) |
|
|||
1133 | return |
|
|||
1134 |
|
||||
1135 | fp.seek(0) |
|
|||
1136 | header = fp.read(6) |
|
|||
1137 | if not header.startswith("HG"): |
|
|||
1138 | # old client with uncompressed bundle |
|
|||
1139 | def generator(f): |
|
|||
1140 | yield header |
|
|||
1141 | for chunk in f: |
|
|||
1142 | yield chunk |
|
|||
1143 | elif not header.startswith("HG10"): |
|
|||
1144 | req.write("0\n") |
|
|||
1145 | req.write(_("unknown bundle version\n")) |
|
|||
1146 | return |
|
|||
1147 | elif header == "HG10GZ": |
|
|||
1148 | def generator(f): |
|
|||
1149 | zd = zlib.decompressobj() |
|
|||
1150 | for chunk in f: |
|
|||
1151 | yield zd.decompress(chunk) |
|
|||
1152 | elif header == "HG10BZ": |
|
|||
1153 | def generator(f): |
|
|||
1154 | zd = bz2.BZ2Decompressor() |
|
|||
1155 | zd.decompress("BZ") |
|
|||
1156 | for chunk in f: |
|
|||
1157 | yield zd.decompress(chunk) |
|
|||
1158 | elif header == "HG10UN": |
|
|||
1159 | def generator(f): |
|
|||
1160 | for chunk in f: |
|
|||
1161 | yield chunk |
|
|||
1162 | else: |
|
|||
1163 | req.write("0\n") |
|
|||
1164 | req.write(_("unknown bundle compression type\n")) |
|
|||
1165 | return |
|
|||
1166 | gen = generator(util.filechunkiter(fp, 4096)) |
|
|||
1167 |
|
||||
1168 | # send addchangegroup output to client |
|
|||
1169 |
|
||||
1170 | old_stdout = sys.stdout |
|
|||
1171 | sys.stdout = cStringIO.StringIO() |
|
|||
1172 |
|
||||
1173 | try: |
|
|||
1174 | url = 'remote:%s:%s' % (proto, |
|
|||
1175 | req.env.get('REMOTE_HOST', '')) |
|
|||
1176 | try: |
|
|||
1177 | ret = self.repo.addchangegroup( |
|
|||
1178 | util.chunkbuffer(gen), 'serve', url) |
|
|||
1179 | except util.Abort, inst: |
|
|||
1180 | sys.stdout.write("abort: %s\n" % inst) |
|
|||
1181 | ret = 0 |
|
|||
1182 | finally: |
|
|||
1183 | val = sys.stdout.getvalue() |
|
|||
1184 | sys.stdout = old_stdout |
|
|||
1185 | req.write('%d\n' % ret) |
|
|||
1186 | req.write(val) |
|
|||
1187 | finally: |
|
|||
1188 | del lock |
|
|||
1189 | except (OSError, IOError), inst: |
|
|||
1190 | req.write('0\n') |
|
|||
1191 | filename = getattr(inst, 'filename', '') |
|
|||
1192 | # Don't send our filesystem layout to the client |
|
|||
1193 | if filename.startswith(self.repo.root): |
|
|||
1194 | filename = filename[len(self.repo.root)+1:] |
|
|||
1195 | else: |
|
|||
1196 | filename = '' |
|
|||
1197 | error = getattr(inst, 'strerror', 'Unknown error') |
|
|||
1198 | if inst.errno == errno.ENOENT: |
|
|||
1199 | code = 404 |
|
|||
1200 | else: |
|
|||
1201 | code = 500 |
|
|||
1202 | req.respond(code, '%s: %s\n' % (error, filename)) |
|
|||
1203 | finally: |
|
|||
1204 | fp.close() |
|
|||
1205 | os.unlink(tempname) |
|
|||
1206 |
|
||||
1207 | def do_stream_out(self, req): |
|
|||
1208 | req.httphdr("application/mercurial-0.1") |
|
|||
1209 | streamclone.stream_out(self.repo, req, untrusted=True) |
|
@@ -20,7 +20,8 b' class hgwebdir(object):' | |||||
20 | return [(util.pconvert(name).strip('/'), path) |
|
20 | return [(util.pconvert(name).strip('/'), path) | |
21 | for name, path in items] |
|
21 | for name, path in items] | |
22 |
|
22 | |||
23 | self.parentui = parentui |
|
23 | self.parentui = parentui or ui.ui(report_untrusted=False, | |
|
24 | interactive = False) | |||
24 | self.motd = None |
|
25 | self.motd = None | |
25 | self.style = None |
|
26 | self.style = None | |
26 | self.stripecount = None |
|
27 | self.stripecount = None | |
@@ -69,50 +70,66 b' class hgwebdir(object):' | |||||
69 | return req |
|
70 | return req | |
70 |
|
71 | |||
71 | def run_wsgi(self, req): |
|
72 | def run_wsgi(self, req): | |
72 | def header(**map): |
|
|||
73 | header_file = cStringIO.StringIO( |
|
|||
74 | ''.join(tmpl("header", encoding=util._encoding, **map))) |
|
|||
75 | msg = mimetools.Message(header_file, 0) |
|
|||
76 | req.header(msg.items()) |
|
|||
77 | yield header_file.read() |
|
|||
78 |
|
73 | |||
79 | def footer(**map): |
|
74 | try: | |
80 | yield tmpl("footer", **map) |
|
75 | try: | |
81 |
|
76 | |||
82 | def motd(**map): |
|
77 | virtual = req.env.get("PATH_INFO", "").strip('/') | |
83 |
|
|
78 | ||
84 |
|
|
79 | # a static file | |
85 | else: |
|
80 | if virtual.startswith('static/') or 'static' in req.form: | |
86 | yield config('web', 'motd', '') |
|
81 | static = os.path.join(templater.templatepath(), 'static') | |
|
82 | if virtual.startswith('static/'): | |||
|
83 | fname = virtual[7:] | |||
|
84 | else: | |||
|
85 | fname = req.form['static'][0] | |||
|
86 | req.write(staticfile(static, fname, req)) | |||
|
87 | return | |||
87 |
|
88 | |||
88 | parentui = self.parentui or ui.ui(report_untrusted=False, |
|
89 | # top-level index | |
89 | interactive=False) |
|
90 | elif not virtual: | |
90 |
|
91 | tmpl = self.templater(req) | ||
91 | def config(section, name, default=None, untrusted=True): |
|
92 | self.makeindex(req, tmpl) | |
92 | return parentui.config(section, name, default, untrusted) |
|
93 | return | |
93 |
|
94 | |||
94 | url = req.env.get('SCRIPT_NAME', '') |
|
95 | # nested indexes and hgwebs | |
95 | if not url.endswith('/'): |
|
96 | repos = dict(self.repos) | |
96 | url += '/' |
|
97 | while virtual: | |
97 |
|
98 | real = repos.get(virtual) | ||
98 | staticurl = config('web', 'staticurl') or url + 'static/' |
|
99 | if real: | |
99 | if not staticurl.endswith('/'): |
|
100 | req.env['REPO_NAME'] = virtual | |
100 | staticurl += '/' |
|
101 | try: | |
|
102 | repo = hg.repository(self.parentui, real) | |||
|
103 | hgweb(repo).run_wsgi(req) | |||
|
104 | return | |||
|
105 | except IOError, inst: | |||
|
106 | raise ErrorResponse(500, inst.strerror) | |||
|
107 | except hg.RepoError, inst: | |||
|
108 | raise ErrorResponse(500, str(inst)) | |||
101 |
|
109 | |||
102 | style = self.style |
|
110 | # browse subdirectories | |
103 | if style is None: |
|
111 | subdir = virtual + '/' | |
104 | style = config('web', 'style', '') |
|
112 | if [r for r in repos if r.startswith(subdir)]: | |
105 | if req.form.has_key('style'): |
|
113 | tmpl = self.templater(req) | |
106 | style = req.form['style'][0] |
|
114 | self.makeindex(req, tmpl, subdir) | |
107 | if self.stripecount is None: |
|
115 | return | |
108 | self.stripecount = int(config('web', 'stripes', 1)) |
|
116 | ||
109 | mapfile = style_map(templater.templatepath(), style) |
|
117 | up = virtual.rfind('/') | |
110 | tmpl = templater.templater(mapfile, templater.common_filters, |
|
118 | if up < 0: | |
111 | defaults={"header": header, |
|
119 | break | |
112 | "footer": footer, |
|
120 | virtual = virtual[:up] | |
113 | "motd": motd, |
|
121 | ||
114 | "url": url, |
|
122 | # prefixes not found | |
115 | "staticurl": staticurl}) |
|
123 | tmpl = self.templater(req) | |
|
124 | req.respond(404, tmpl("notfound", repo=virtual)) | |||
|
125 | ||||
|
126 | except ErrorResponse, err: | |||
|
127 | tmpl = self.templater(req) | |||
|
128 | req.respond(err.code, tmpl('error', error=err.message or '')) | |||
|
129 | finally: | |||
|
130 | tmpl = None | |||
|
131 | ||||
|
132 | def makeindex(self, req, tmpl, subdir=""): | |||
116 |
|
133 | |||
117 | def archivelist(ui, nodeid, url): |
|
134 | def archivelist(ui, nodeid, url): | |
118 | allowed = ui.configlist("web", "allow_archive", untrusted=True) |
|
135 | allowed = ui.configlist("web", "allow_archive", untrusted=True) | |
@@ -142,7 +159,7 b' class hgwebdir(object):' | |||||
142 | continue |
|
159 | continue | |
143 | name = name[len(subdir):] |
|
160 | name = name[len(subdir):] | |
144 |
|
161 | |||
145 | u = ui.ui(parentui=parentui) |
|
162 | u = ui.ui(parentui=self.parentui) | |
146 | try: |
|
163 | try: | |
147 | u.readconfig(os.path.join(path, '.hg', 'hgrc')) |
|
164 | u.readconfig(os.path.join(path, '.hg', 'hgrc')) | |
148 | except Exception, e: |
|
165 | except Exception, e: | |
@@ -196,67 +213,65 b' class hgwebdir(object):' | |||||
196 | row['parity'] = parity.next() |
|
213 | row['parity'] = parity.next() | |
197 | yield row |
|
214 | yield row | |
198 |
|
215 | |||
199 | def makeindex(req, subdir=""): |
|
216 | sortable = ["name", "description", "contact", "lastchange"] | |
200 | sortable = ["name", "description", "contact", "lastchange"] |
|
217 | sortcolumn, descending = self.repos_sorted | |
201 | sortcolumn, descending = self.repos_sorted |
|
218 | if req.form.has_key('sort'): | |
202 |
|
|
219 | sortcolumn = req.form['sort'][0] | |
203 | sortcolumn = req.form['sort'][0] |
|
220 | descending = sortcolumn.startswith('-') | |
204 | descending = sortcolumn.startswith('-') |
|
221 | if descending: | |
205 | if descending: |
|
222 | sortcolumn = sortcolumn[1:] | |
206 |
|
|
223 | if sortcolumn not in sortable: | |
207 |
|
|
224 | sortcolumn = "" | |
208 | sortcolumn = "" |
|
|||
209 |
|
225 | |||
210 |
|
|
226 | sort = [("sort_%s" % column, | |
211 |
|
|
227 | "%s%s" % ((not descending and column == sortcolumn) | |
212 |
|
|
228 | and "-" or "", column)) | |
213 |
|
|
229 | for column in sortable] | |
214 |
|
|
230 | req.write(tmpl("index", entries=entries, subdir=subdir, | |
215 |
|
|
231 | sortcolumn=sortcolumn, descending=descending, | |
216 |
|
|
232 | **dict(sort))) | |
|
233 | ||||
|
234 | def templater(self, req): | |||
|
235 | ||||
|
236 | def header(**map): | |||
|
237 | header_file = cStringIO.StringIO( | |||
|
238 | ''.join(tmpl("header", encoding=util._encoding, **map))) | |||
|
239 | msg = mimetools.Message(header_file, 0) | |||
|
240 | req.header(msg.items()) | |||
|
241 | yield header_file.read() | |||
|
242 | ||||
|
243 | def footer(**map): | |||
|
244 | yield tmpl("footer", **map) | |||
217 |
|
245 | |||
218 | try: |
|
246 | def motd(**map): | |
219 | try: |
|
247 | if self.motd is not None: | |
220 | virtual = req.env.get("PATH_INFO", "").strip('/') |
|
248 | yield self.motd | |
221 | if virtual.startswith('static/'): |
|
249 | else: | |
222 | static = os.path.join(templater.templatepath(), 'static') |
|
250 | yield config('web', 'motd', '') | |
223 | fname = virtual[7:] |
|
251 | ||
224 | req.write(staticfile(static, fname, req)) |
|
252 | def config(section, name, default=None, untrusted=True): | |
225 | elif virtual: |
|
253 | return self.parentui.config(section, name, default, untrusted) | |
226 | repos = dict(self.repos) |
|
254 | ||
227 | while virtual: |
|
255 | url = req.env.get('SCRIPT_NAME', '') | |
228 | real = repos.get(virtual) |
|
256 | if not url.endswith('/'): | |
229 | if real: |
|
257 | url += '/' | |
230 | req.env['REPO_NAME'] = virtual |
|
|||
231 | try: |
|
|||
232 | repo = hg.repository(parentui, real) |
|
|||
233 | hgweb(repo).run_wsgi(req) |
|
|||
234 | return |
|
|||
235 | except IOError, inst: |
|
|||
236 | raise ErrorResponse(500, inst.strerror) |
|
|||
237 | except hg.RepoError, inst: |
|
|||
238 | raise ErrorResponse(500, str(inst)) |
|
|||
239 |
|
258 | |||
240 | # browse subdirectories |
|
259 | staticurl = config('web', 'staticurl') or url + 'static/' | |
241 | subdir = virtual + '/' |
|
260 | if not staticurl.endswith('/'): | |
242 | if [r for r in repos if r.startswith(subdir)]: |
|
261 | staticurl += '/' | |
243 | makeindex(req, subdir) |
|
|||
244 | return |
|
|||
245 |
|
||||
246 | up = virtual.rfind('/') |
|
|||
247 | if up < 0: |
|
|||
248 | break |
|
|||
249 | virtual = virtual[:up] |
|
|||
250 |
|
262 | |||
251 | req.respond(404, tmpl("notfound", repo=virtual)) |
|
263 | style = self.style | |
252 | else: |
|
264 | if style is None: | |
253 | if req.form.has_key('static'): |
|
265 | style = config('web', 'style', '') | |
254 | static = os.path.join(templater.templatepath(), "static") |
|
266 | if req.form.has_key('style'): | |
255 |
|
|
267 | style = req.form['style'][0] | |
256 | req.write(staticfile(static, fname, req)) |
|
268 | if self.stripecount is None: | |
257 | else: |
|
269 | self.stripecount = int(config('web', 'stripes', 1)) | |
258 | makeindex(req) |
|
270 | mapfile = style_map(templater.templatepath(), style) | |
259 | except ErrorResponse, err: |
|
271 | tmpl = templater.templater(mapfile, templater.common_filters, | |
260 | req.respond(err.code, tmpl('error', error=err.message or '')) |
|
272 | defaults={"header": header, | |
261 | finally: |
|
273 | "footer": footer, | |
262 | tmpl = None |
|
274 | "motd": motd, | |
|
275 | "url": url, | |||
|
276 | "staticurl": staticurl}) | |||
|
277 | return tmpl |
@@ -403,7 +403,12 b' class ui(object):' | |||||
403 | readline.read_history_file |
|
403 | readline.read_history_file | |
404 | except ImportError: |
|
404 | except ImportError: | |
405 | pass |
|
405 | pass | |
406 |
|
|
406 | line = raw_input(prompt) | |
|
407 | # When stdin is in binary mode on Windows, it can cause | |||
|
408 | # raw_input() to emit an extra trailing carriage return | |||
|
409 | if os.linesep == '\r\n' and line and line[-1] == '\r': | |||
|
410 | line = line[:-1] | |||
|
411 | return line | |||
407 |
|
412 | |||
408 | def prompt(self, msg, pat=None, default="y", matchflags=0): |
|
413 | def prompt(self, msg, pat=None, default="y", matchflags=0): | |
409 | if not self.interactive: return default |
|
414 | if not self.interactive: return default |
@@ -180,6 +180,17 b' def testpid(pid):' | |||||
180 |
|
180 | |||
181 | def system_rcpath_win32(): |
|
181 | def system_rcpath_win32(): | |
182 | '''return default os-specific hgrc search path''' |
|
182 | '''return default os-specific hgrc search path''' | |
|
183 | proc = win32api.GetCurrentProcess() | |||
|
184 | try: | |||
|
185 | # This will fail on windows < NT | |||
|
186 | filename = win32process.GetModuleFileNameEx(proc, 0) | |||
|
187 | except: | |||
|
188 | filename = win32api.GetModuleFileName(0) | |||
|
189 | # Use mercurial.ini found in directory with hg.exe | |||
|
190 | progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini') | |||
|
191 | if os.path.isfile(progrc): | |||
|
192 | return [progrc] | |||
|
193 | # else look for a system rcpath in the registry | |||
183 | try: |
|
194 | try: | |
184 | value = win32api.RegQueryValue( |
|
195 | value = win32api.RegQueryValue( | |
185 | win32con.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Mercurial') |
|
196 | win32con.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Mercurial') | |
@@ -193,19 +204,12 b' def system_rcpath_win32():' | |||||
193 | rcpath.append(os.path.join(p, f)) |
|
204 | rcpath.append(os.path.join(p, f)) | |
194 | return rcpath |
|
205 | return rcpath | |
195 | except pywintypes.error: |
|
206 | except pywintypes.error: | |
196 | pass |
|
207 | return [] | |
197 | proc = win32api.GetCurrentProcess() |
|
|||
198 | try: |
|
|||
199 | # This will fail on windows < NT |
|
|||
200 | filename = win32process.GetModuleFileNameEx(proc, 0) |
|
|||
201 | except: |
|
|||
202 | filename = win32api.GetModuleFileName(0) |
|
|||
203 | return [os.path.join(os.path.dirname(filename), 'mercurial.ini')] |
|
|||
204 |
|
208 | |||
205 | def user_rcpath_win32(): |
|
209 | def user_rcpath_win32(): | |
206 | '''return os-specific hgrc search path to the user dir''' |
|
210 | '''return os-specific hgrc search path to the user dir''' | |
207 | userdir = os.path.expanduser('~') |
|
211 | userdir = os.path.expanduser('~') | |
208 | if userdir == '~': |
|
212 | if sys.getwindowsversion() != 2 and userdir == '~': | |
209 | # We are on win < nt: fetch the APPDATA directory location and use |
|
213 | # We are on win < nt: fetch the APPDATA directory location and use | |
210 | # the parent directory as the user home dir. |
|
214 | # the parent directory as the user home dir. | |
211 | appdir = shell.SHGetPathFromIDList( |
|
215 | appdir = shell.SHGetPathFromIDList( |
@@ -22,15 +22,20 b'' | |||||
22 | # interface and limitations. See [GDR 2001-12-04b] for requirements and |
|
22 | # interface and limitations. See [GDR 2001-12-04b] for requirements and | |
23 | # design. |
|
23 | # design. | |
24 |
|
24 | |||
25 | """Usage: |
|
25 | r"""Usage: | |
26 |
|
26 | |||
27 | coverage.py -x MODULE.py [ARG1 ARG2 ...] |
|
27 | coverage.py -x [-p] MODULE.py [ARG1 ARG2 ...] | |
28 | Execute module, passing the given command-line arguments, collecting |
|
28 | Execute module, passing the given command-line arguments, collecting | |
29 | coverage data. |
|
29 | coverage data. With the -p option, write to a temporary file containing | |
|
30 | the machine name and process ID. | |||
30 |
|
31 | |||
31 | coverage.py -e |
|
32 | coverage.py -e | |
32 | Erase collected coverage data. |
|
33 | Erase collected coverage data. | |
33 |
|
34 | |||
|
35 | coverage.py -c | |||
|
36 | Collect data from multiple coverage files (as created by -p option above) | |||
|
37 | and store it into a single file representing the union of the coverage. | |||
|
38 | ||||
34 | coverage.py -r [-m] [-o dir1,dir2,...] FILE1 FILE2 ... |
|
39 | coverage.py -r [-m] [-o dir1,dir2,...] FILE1 FILE2 ... | |
35 | Report on the statement coverage for the given files. With the -m |
|
40 | Report on the statement coverage for the given files. With the -m | |
36 | option, show line numbers of the statements that weren't executed. |
|
41 | option, show line numbers of the statements that weren't executed. | |
@@ -49,16 +54,26 b' coverage.py -a [-d dir] [-o dir1,dir2,..' | |||||
49 | Coverage data is saved in the file .coverage by default. Set the |
|
54 | Coverage data is saved in the file .coverage by default. Set the | |
50 | COVERAGE_FILE environment variable to save it somewhere else.""" |
|
55 | COVERAGE_FILE environment variable to save it somewhere else.""" | |
51 |
|
56 | |||
52 |
__version__ = "2. |
|
57 | __version__ = "2.77.20070729" # see detailed history at the end of this file. | |
53 |
|
58 | |||
54 | import compiler |
|
59 | import compiler | |
55 | import compiler.visitor |
|
60 | import compiler.visitor | |
|
61 | import glob | |||
56 | import os |
|
62 | import os | |
57 | import re |
|
63 | import re | |
58 | import string |
|
64 | import string | |
|
65 | import symbol | |||
59 | import sys |
|
66 | import sys | |
60 | import threading |
|
67 | import threading | |
|
68 | import token | |||
61 | import types |
|
69 | import types | |
|
70 | from socket import gethostname | |||
|
71 | ||||
|
72 | # Python version compatibility | |||
|
73 | try: | |||
|
74 | strclass = basestring # new to 2.3 | |||
|
75 | except: | |||
|
76 | strclass = str | |||
62 |
|
77 | |||
63 | # 2. IMPLEMENTATION |
|
78 | # 2. IMPLEMENTATION | |
64 | # |
|
79 | # | |
@@ -81,25 +96,29 b' import types' | |||||
81 | # names to increase speed. |
|
96 | # names to increase speed. | |
82 |
|
97 | |||
83 | class StatementFindingAstVisitor(compiler.visitor.ASTVisitor): |
|
98 | class StatementFindingAstVisitor(compiler.visitor.ASTVisitor): | |
|
99 | """ A visitor for a parsed Abstract Syntax Tree which finds executable | |||
|
100 | statements. | |||
|
101 | """ | |||
84 | def __init__(self, statements, excluded, suite_spots): |
|
102 | def __init__(self, statements, excluded, suite_spots): | |
85 | compiler.visitor.ASTVisitor.__init__(self) |
|
103 | compiler.visitor.ASTVisitor.__init__(self) | |
86 | self.statements = statements |
|
104 | self.statements = statements | |
87 | self.excluded = excluded |
|
105 | self.excluded = excluded | |
88 | self.suite_spots = suite_spots |
|
106 | self.suite_spots = suite_spots | |
89 | self.excluding_suite = 0 |
|
107 | self.excluding_suite = 0 | |
90 |
|
108 | |||
91 | def doRecursive(self, node): |
|
109 | def doRecursive(self, node): | |
92 | self.recordNodeLine(node) |
|
|||
93 | for n in node.getChildNodes(): |
|
110 | for n in node.getChildNodes(): | |
94 | self.dispatch(n) |
|
111 | self.dispatch(n) | |
95 |
|
112 | |||
96 | visitStmt = visitModule = doRecursive |
|
113 | visitStmt = visitModule = doRecursive | |
97 |
|
114 | |||
98 | def doCode(self, node): |
|
115 | def doCode(self, node): | |
99 | if hasattr(node, 'decorators') and node.decorators: |
|
116 | if hasattr(node, 'decorators') and node.decorators: | |
100 | self.dispatch(node.decorators) |
|
117 | self.dispatch(node.decorators) | |
101 |
self. |
|
118 | self.recordAndDispatch(node.code) | |
102 |
|
119 | else: | ||
|
120 | self.doSuite(node, node.code) | |||
|
121 | ||||
103 | visitFunction = visitClass = doCode |
|
122 | visitFunction = visitClass = doCode | |
104 |
|
123 | |||
105 | def getFirstLine(self, node): |
|
124 | def getFirstLine(self, node): | |
@@ -119,17 +138,40 b' class StatementFindingAstVisitor(compile' | |||||
119 | for n in node.getChildNodes(): |
|
138 | for n in node.getChildNodes(): | |
120 | lineno = max(lineno, self.getLastLine(n)) |
|
139 | lineno = max(lineno, self.getLastLine(n)) | |
121 | return lineno |
|
140 | return lineno | |
122 |
|
141 | |||
123 | def doStatement(self, node): |
|
142 | def doStatement(self, node): | |
124 | self.recordLine(self.getFirstLine(node)) |
|
143 | self.recordLine(self.getFirstLine(node)) | |
125 |
|
144 | |||
126 |
visitAssert = visitAssign = visitAssTuple = |
|
145 | visitAssert = visitAssign = visitAssTuple = visitPrint = \ | |
127 | visitPrintnl = visitRaise = visitSubscript = visitDecorators = \ |
|
146 | visitPrintnl = visitRaise = visitSubscript = visitDecorators = \ | |
128 | doStatement |
|
147 | doStatement | |
|
148 | ||||
|
149 | def visitPass(self, node): | |||
|
150 | # Pass statements have weird interactions with docstrings. If this | |||
|
151 | # pass statement is part of one of those pairs, claim that the statement | |||
|
152 | # is on the later of the two lines. | |||
|
153 | l = node.lineno | |||
|
154 | if l: | |||
|
155 | lines = self.suite_spots.get(l, [l,l]) | |||
|
156 | self.statements[lines[1]] = 1 | |||
|
157 | ||||
|
158 | def visitDiscard(self, node): | |||
|
159 | # Discard nodes are statements that execute an expression, but then | |||
|
160 | # discard the results. This includes function calls, so we can't | |||
|
161 | # ignore them all. But if the expression is a constant, the statement | |||
|
162 | # won't be "executed", so don't count it now. | |||
|
163 | if node.expr.__class__.__name__ != 'Const': | |||
|
164 | self.doStatement(node) | |||
129 |
|
165 | |||
130 | def recordNodeLine(self, node): |
|
166 | def recordNodeLine(self, node): | |
131 | return self.recordLine(node.lineno) |
|
167 | # Stmt nodes often have None, but shouldn't claim the first line of | |
132 |
|
168 | # their children (because the first child might be an ignorable line | ||
|
169 | # like "global a"). | |||
|
170 | if node.__class__.__name__ != 'Stmt': | |||
|
171 | return self.recordLine(self.getFirstLine(node)) | |||
|
172 | else: | |||
|
173 | return 0 | |||
|
174 | ||||
133 | def recordLine(self, lineno): |
|
175 | def recordLine(self, lineno): | |
134 | # Returns a bool, whether the line is included or excluded. |
|
176 | # Returns a bool, whether the line is included or excluded. | |
135 | if lineno: |
|
177 | if lineno: | |
@@ -137,7 +179,7 b' class StatementFindingAstVisitor(compile' | |||||
137 | # keyword. |
|
179 | # keyword. | |
138 | if lineno in self.suite_spots: |
|
180 | if lineno in self.suite_spots: | |
139 | lineno = self.suite_spots[lineno][0] |
|
181 | lineno = self.suite_spots[lineno][0] | |
140 | # If we're inside an exluded suite, record that this line was |
|
182 | # If we're inside an excluded suite, record that this line was | |
141 | # excluded. |
|
183 | # excluded. | |
142 | if self.excluding_suite: |
|
184 | if self.excluding_suite: | |
143 | self.excluded[lineno] = 1 |
|
185 | self.excluded[lineno] = 1 | |
@@ -153,9 +195,9 b' class StatementFindingAstVisitor(compile' | |||||
153 | self.statements[lineno] = 1 |
|
195 | self.statements[lineno] = 1 | |
154 | return 1 |
|
196 | return 1 | |
155 | return 0 |
|
197 | return 0 | |
156 |
|
198 | |||
157 | default = recordNodeLine |
|
199 | default = recordNodeLine | |
158 |
|
200 | |||
159 | def recordAndDispatch(self, node): |
|
201 | def recordAndDispatch(self, node): | |
160 | self.recordNodeLine(node) |
|
202 | self.recordNodeLine(node) | |
161 | self.dispatch(node) |
|
203 | self.dispatch(node) | |
@@ -166,7 +208,7 b' class StatementFindingAstVisitor(compile' | |||||
166 | self.excluding_suite = 1 |
|
208 | self.excluding_suite = 1 | |
167 | self.recordAndDispatch(body) |
|
209 | self.recordAndDispatch(body) | |
168 | self.excluding_suite = exsuite |
|
210 | self.excluding_suite = exsuite | |
169 |
|
211 | |||
170 | def doPlainWordSuite(self, prevsuite, suite): |
|
212 | def doPlainWordSuite(self, prevsuite, suite): | |
171 | # Finding the exclude lines for else's is tricky, because they aren't |
|
213 | # Finding the exclude lines for else's is tricky, because they aren't | |
172 | # present in the compiler parse tree. Look at the previous suite, |
|
214 | # present in the compiler parse tree. Look at the previous suite, | |
@@ -180,15 +222,17 b' class StatementFindingAstVisitor(compile' | |||||
180 | break |
|
222 | break | |
181 | else: |
|
223 | else: | |
182 | self.doSuite(None, suite) |
|
224 | self.doSuite(None, suite) | |
183 |
|
225 | |||
184 | def doElse(self, prevsuite, node): |
|
226 | def doElse(self, prevsuite, node): | |
185 | if node.else_: |
|
227 | if node.else_: | |
186 | self.doPlainWordSuite(prevsuite, node.else_) |
|
228 | self.doPlainWordSuite(prevsuite, node.else_) | |
187 |
|
229 | |||
188 | def visitFor(self, node): |
|
230 | def visitFor(self, node): | |
189 | self.doSuite(node, node.body) |
|
231 | self.doSuite(node, node.body) | |
190 | self.doElse(node.body, node) |
|
232 | self.doElse(node.body, node) | |
191 |
|
233 | |||
|
234 | visitWhile = visitFor | |||
|
235 | ||||
192 | def visitIf(self, node): |
|
236 | def visitIf(self, node): | |
193 | # The first test has to be handled separately from the rest. |
|
237 | # The first test has to be handled separately from the rest. | |
194 | # The first test is credited to the line with the "if", but the others |
|
238 | # The first test is credited to the line with the "if", but the others | |
@@ -198,10 +242,6 b' class StatementFindingAstVisitor(compile' | |||||
198 | self.doSuite(t, n) |
|
242 | self.doSuite(t, n) | |
199 | self.doElse(node.tests[-1][1], node) |
|
243 | self.doElse(node.tests[-1][1], node) | |
200 |
|
244 | |||
201 | def visitWhile(self, node): |
|
|||
202 | self.doSuite(node, node.body) |
|
|||
203 | self.doElse(node.body, node) |
|
|||
204 |
|
||||
205 | def visitTryExcept(self, node): |
|
245 | def visitTryExcept(self, node): | |
206 | self.doSuite(node, node.body) |
|
246 | self.doSuite(node, node.body) | |
207 | for i in range(len(node.handlers)): |
|
247 | for i in range(len(node.handlers)): | |
@@ -216,11 +256,14 b' class StatementFindingAstVisitor(compile' | |||||
216 | else: |
|
256 | else: | |
217 | self.doSuite(a, h) |
|
257 | self.doSuite(a, h) | |
218 | self.doElse(node.handlers[-1][2], node) |
|
258 | self.doElse(node.handlers[-1][2], node) | |
219 |
|
259 | |||
220 | def visitTryFinally(self, node): |
|
260 | def visitTryFinally(self, node): | |
221 | self.doSuite(node, node.body) |
|
261 | self.doSuite(node, node.body) | |
222 | self.doPlainWordSuite(node.body, node.final) |
|
262 | self.doPlainWordSuite(node.body, node.final) | |
223 |
|
263 | |||
|
264 | def visitWith(self, node): | |||
|
265 | self.doSuite(node, node.body) | |||
|
266 | ||||
224 | def visitGlobal(self, node): |
|
267 | def visitGlobal(self, node): | |
225 | # "global" statements don't execute like others (they don't call the |
|
268 | # "global" statements don't execute like others (they don't call the | |
226 | # trace function), so don't record their line numbers. |
|
269 | # trace function), so don't record their line numbers. | |
@@ -228,9 +271,9 b' class StatementFindingAstVisitor(compile' | |||||
228 |
|
271 | |||
229 | the_coverage = None |
|
272 | the_coverage = None | |
230 |
|
273 | |||
|
274 | class CoverageException(Exception): pass | |||
|
275 | ||||
231 | class coverage: |
|
276 | class coverage: | |
232 | error = "coverage error" |
|
|||
233 |
|
||||
234 | # Name of the cache file (unless environment variable is set). |
|
277 | # Name of the cache file (unless environment variable is set). | |
235 | cache_default = ".coverage" |
|
278 | cache_default = ".coverage" | |
236 |
|
279 | |||
@@ -240,7 +283,7 b' class coverage:' | |||||
240 | # A dictionary with an entry for (Python source file name, line number |
|
283 | # A dictionary with an entry for (Python source file name, line number | |
241 | # in that file) if that line has been executed. |
|
284 | # in that file) if that line has been executed. | |
242 | c = {} |
|
285 | c = {} | |
243 |
|
286 | |||
244 | # A map from canonical Python source file name to a dictionary in |
|
287 | # A map from canonical Python source file name to a dictionary in | |
245 | # which there's an entry for each line number that has been |
|
288 | # which there's an entry for each line number that has been | |
246 | # executed. |
|
289 | # executed. | |
@@ -257,53 +300,58 b' class coverage:' | |||||
257 | def __init__(self): |
|
300 | def __init__(self): | |
258 | global the_coverage |
|
301 | global the_coverage | |
259 | if the_coverage: |
|
302 | if the_coverage: | |
260 |
raise |
|
303 | raise CoverageException, "Only one coverage object allowed." | |
261 | self.usecache = 1 |
|
304 | self.usecache = 1 | |
262 | self.cache = None |
|
305 | self.cache = None | |
|
306 | self.parallel_mode = False | |||
263 | self.exclude_re = '' |
|
307 | self.exclude_re = '' | |
264 | self.nesting = 0 |
|
308 | self.nesting = 0 | |
265 | self.cstack = [] |
|
309 | self.cstack = [] | |
266 | self.xstack = [] |
|
310 | self.xstack = [] | |
267 |
self.relative_dir = os.path.normcase(os.path.abspath(os.curdir)+os. |
|
311 | self.relative_dir = os.path.normcase(os.path.abspath(os.curdir)+os.sep) | |
|
312 | self.exclude('# *pragma[: ]*[nN][oO] *[cC][oO][vV][eE][rR]') | |||
268 |
|
313 | |||
269 | # t(f, x, y). This method is passed to sys.settrace as a trace function. |
|
314 | # t(f, x, y). This method is passed to sys.settrace as a trace function. | |
270 | # See [van Rossum 2001-07-20b, 9.2] for an explanation of sys.settrace and |
|
315 | # See [van Rossum 2001-07-20b, 9.2] for an explanation of sys.settrace and | |
271 | # the arguments and return value of the trace function. |
|
316 | # the arguments and return value of the trace function. | |
272 | # See [van Rossum 2001-07-20a, 3.2] for a description of frame and code |
|
317 | # See [van Rossum 2001-07-20a, 3.2] for a description of frame and code | |
273 | # objects. |
|
318 | # objects. | |
274 |
|
319 | |||
275 |
def t(self, f, w, |
|
320 | def t(self, f, w, unused): #pragma: no cover | |
276 | #print w, f.f_code.co_filename, f.f_lineno |
|
|||
277 | if w == 'line': |
|
321 | if w == 'line': | |
|
322 | #print "Executing %s @ %d" % (f.f_code.co_filename, f.f_lineno) | |||
278 | self.c[(f.f_code.co_filename, f.f_lineno)] = 1 |
|
323 | self.c[(f.f_code.co_filename, f.f_lineno)] = 1 | |
279 | for c in self.cstack: |
|
324 | for c in self.cstack: | |
280 | c[(f.f_code.co_filename, f.f_lineno)] = 1 |
|
325 | c[(f.f_code.co_filename, f.f_lineno)] = 1 | |
281 | return self.t |
|
326 | return self.t | |
282 |
|
327 | |||
283 | def help(self, error=None): |
|
328 | def help(self, error=None): #pragma: no cover | |
284 | if error: |
|
329 | if error: | |
285 | print error |
|
330 | print error | |
286 |
|
331 | |||
287 | print __doc__ |
|
332 | print __doc__ | |
288 | sys.exit(1) |
|
333 | sys.exit(1) | |
289 |
|
334 | |||
290 | def command_line(self): |
|
335 | def command_line(self, argv, help_fn=None): | |
291 | import getopt |
|
336 | import getopt | |
|
337 | help_fn = help_fn or self.help | |||
292 | settings = {} |
|
338 | settings = {} | |
293 | optmap = { |
|
339 | optmap = { | |
294 | '-a': 'annotate', |
|
340 | '-a': 'annotate', | |
|
341 | '-c': 'collect', | |||
295 | '-d:': 'directory=', |
|
342 | '-d:': 'directory=', | |
296 | '-e': 'erase', |
|
343 | '-e': 'erase', | |
297 | '-h': 'help', |
|
344 | '-h': 'help', | |
298 | '-i': 'ignore-errors', |
|
345 | '-i': 'ignore-errors', | |
299 | '-m': 'show-missing', |
|
346 | '-m': 'show-missing', | |
|
347 | '-p': 'parallel-mode', | |||
300 | '-r': 'report', |
|
348 | '-r': 'report', | |
301 | '-x': 'execute', |
|
349 | '-x': 'execute', | |
302 | '-o': 'omit=', |
|
350 | '-o:': 'omit=', | |
303 | } |
|
351 | } | |
304 | short_opts = string.join(map(lambda o: o[1:], optmap.keys()), '') |
|
352 | short_opts = string.join(map(lambda o: o[1:], optmap.keys()), '') | |
305 | long_opts = optmap.values() |
|
353 | long_opts = optmap.values() | |
306 |
options, args = getopt.getopt( |
|
354 | options, args = getopt.getopt(argv, short_opts, long_opts) | |
307 | for o, a in options: |
|
355 | for o, a in options: | |
308 | if optmap.has_key(o): |
|
356 | if optmap.has_key(o): | |
309 | settings[optmap[o]] = 1 |
|
357 | settings[optmap[o]] = 1 | |
@@ -312,69 +360,84 b' class coverage:' | |||||
312 | elif o[2:] in long_opts: |
|
360 | elif o[2:] in long_opts: | |
313 | settings[o[2:]] = 1 |
|
361 | settings[o[2:]] = 1 | |
314 | elif o[2:] + '=' in long_opts: |
|
362 | elif o[2:] + '=' in long_opts: | |
315 | settings[o[2:]] = a |
|
363 | settings[o[2:]+'='] = a | |
316 | else: |
|
364 | else: #pragma: no cover | |
317 | self.help("Unknown option: '%s'." % o) |
|
365 | pass # Can't get here, because getopt won't return anything unknown. | |
|
366 | ||||
318 | if settings.get('help'): |
|
367 | if settings.get('help'): | |
319 |
|
|
368 | help_fn() | |
|
369 | ||||
320 | for i in ['erase', 'execute']: |
|
370 | for i in ['erase', 'execute']: | |
321 | for j in ['annotate', 'report']: |
|
371 | for j in ['annotate', 'report', 'collect']: | |
322 | if settings.get(i) and settings.get(j): |
|
372 | if settings.get(i) and settings.get(j): | |
323 |
|
|
373 | help_fn("You can't specify the '%s' and '%s' " | |
324 | "options at the same time." % (i, j)) |
|
374 | "options at the same time." % (i, j)) | |
|
375 | ||||
325 | args_needed = (settings.get('execute') |
|
376 | args_needed = (settings.get('execute') | |
326 | or settings.get('annotate') |
|
377 | or settings.get('annotate') | |
327 | or settings.get('report')) |
|
378 | or settings.get('report')) | |
328 |
action = settings.get('erase') |
|
379 | action = (settings.get('erase') | |
|
380 | or settings.get('collect') | |||
|
381 | or args_needed) | |||
329 | if not action: |
|
382 | if not action: | |
330 |
|
|
383 | help_fn("You must specify at least one of -e, -x, -c, -r, or -a.") | |
331 | if not args_needed and args: |
|
384 | if not args_needed and args: | |
332 |
|
|
385 | help_fn("Unexpected arguments: %s" % " ".join(args)) | |
333 |
|
386 | |||
|
387 | self.parallel_mode = settings.get('parallel-mode') | |||
334 | self.get_ready() |
|
388 | self.get_ready() | |
335 | self.exclude('#pragma[: ]+[nN][oO] [cC][oO][vV][eE][rR]') |
|
|||
336 |
|
389 | |||
337 | if settings.get('erase'): |
|
390 | if settings.get('erase'): | |
338 | self.erase() |
|
391 | self.erase() | |
339 | if settings.get('execute'): |
|
392 | if settings.get('execute'): | |
340 | if not args: |
|
393 | if not args: | |
341 |
|
|
394 | help_fn("Nothing to do.") | |
342 | sys.argv = args |
|
395 | sys.argv = args | |
343 | self.start() |
|
396 | self.start() | |
344 | import __main__ |
|
397 | import __main__ | |
345 | sys.path[0] = os.path.dirname(sys.argv[0]) |
|
398 | sys.path[0] = os.path.dirname(sys.argv[0]) | |
346 | execfile(sys.argv[0], __main__.__dict__) |
|
399 | execfile(sys.argv[0], __main__.__dict__) | |
|
400 | if settings.get('collect'): | |||
|
401 | self.collect() | |||
347 | if not args: |
|
402 | if not args: | |
348 | args = self.cexecuted.keys() |
|
403 | args = self.cexecuted.keys() | |
|
404 | ||||
349 | ignore_errors = settings.get('ignore-errors') |
|
405 | ignore_errors = settings.get('ignore-errors') | |
350 | show_missing = settings.get('show-missing') |
|
406 | show_missing = settings.get('show-missing') | |
351 | directory = settings.get('directory') |
|
407 | directory = settings.get('directory=') | |
352 | omit = filter(None, settings.get('omit', '').split(',')) |
|
408 | ||
353 | omit += ['/<'] # Always skip /<string> etc. |
|
409 | omit = settings.get('omit=') | |
|
410 | if omit is not None: | |||
|
411 | omit = omit.split(',') | |||
|
412 | else: | |||
|
413 | omit = [] | |||
354 |
|
414 | |||
355 | if settings.get('report'): |
|
415 | if settings.get('report'): | |
356 | self.report(args, show_missing, ignore_errors, omit_prefixes=omit) |
|
416 | self.report(args, show_missing, ignore_errors, omit_prefixes=omit) | |
357 | if settings.get('annotate'): |
|
417 | if settings.get('annotate'): | |
358 | self.annotate(args, directory, ignore_errors, omit_prefixes=omit) |
|
418 | self.annotate(args, directory, ignore_errors, omit_prefixes=omit) | |
359 |
|
419 | |||
360 | def use_cache(self, usecache): |
|
420 | def use_cache(self, usecache, cache_file=None): | |
361 | self.usecache = usecache |
|
421 | self.usecache = usecache | |
362 |
|
422 | if cache_file and not self.cache: | ||
363 | def get_ready(self): |
|
423 | self.cache_default = cache_file | |
|
424 | ||||
|
425 | def get_ready(self, parallel_mode=False): | |||
364 | if self.usecache and not self.cache: |
|
426 | if self.usecache and not self.cache: | |
365 |
self.cache = os. |
|
427 | self.cache = os.environ.get(self.cache_env, self.cache_default) | |
366 | self.cache_default)) |
|
428 | if self.parallel_mode: | |
|
429 | self.cache += "." + gethostname() + "." + str(os.getpid()) | |||
367 | self.restore() |
|
430 | self.restore() | |
368 | self.analysis_cache = {} |
|
431 | self.analysis_cache = {} | |
369 |
|
432 | |||
370 | def start(self): |
|
433 | def start(self, parallel_mode=False): | |
371 | self.get_ready() |
|
434 | self.get_ready() | |
372 | if self.nesting == 0: #pragma: no cover |
|
435 | if self.nesting == 0: #pragma: no cover | |
373 | sys.settrace(self.t) |
|
436 | sys.settrace(self.t) | |
374 | if hasattr(threading, 'settrace'): |
|
437 | if hasattr(threading, 'settrace'): | |
375 | threading.settrace(self.t) |
|
438 | threading.settrace(self.t) | |
376 | self.nesting += 1 |
|
439 | self.nesting += 1 | |
377 |
|
440 | |||
378 | def stop(self): |
|
441 | def stop(self): | |
379 | self.nesting -= 1 |
|
442 | self.nesting -= 1 | |
380 | if self.nesting == 0: #pragma: no cover |
|
443 | if self.nesting == 0: #pragma: no cover | |
@@ -383,12 +446,12 b' class coverage:' | |||||
383 | threading.settrace(None) |
|
446 | threading.settrace(None) | |
384 |
|
447 | |||
385 | def erase(self): |
|
448 | def erase(self): | |
|
449 | self.get_ready() | |||
386 | self.c = {} |
|
450 | self.c = {} | |
387 | self.analysis_cache = {} |
|
451 | self.analysis_cache = {} | |
388 | self.cexecuted = {} |
|
452 | self.cexecuted = {} | |
389 | if self.cache and os.path.exists(self.cache): |
|
453 | if self.cache and os.path.exists(self.cache): | |
390 | os.remove(self.cache) |
|
454 | os.remove(self.cache) | |
391 | self.exclude_re = "" |
|
|||
392 |
|
455 | |||
393 | def exclude(self, re): |
|
456 | def exclude(self, re): | |
394 | if self.exclude_re: |
|
457 | if self.exclude_re: | |
@@ -398,7 +461,7 b' class coverage:' | |||||
398 | def begin_recursive(self): |
|
461 | def begin_recursive(self): | |
399 | self.cstack.append(self.c) |
|
462 | self.cstack.append(self.c) | |
400 | self.xstack.append(self.exclude_re) |
|
463 | self.xstack.append(self.exclude_re) | |
401 |
|
464 | |||
402 | def end_recursive(self): |
|
465 | def end_recursive(self): | |
403 | self.c = self.cstack.pop() |
|
466 | self.c = self.cstack.pop() | |
404 | self.exclude_re = self.xstack.pop() |
|
467 | self.exclude_re = self.xstack.pop() | |
@@ -406,8 +469,6 b' class coverage:' | |||||
406 | # save(). Save coverage data to the coverage cache. |
|
469 | # save(). Save coverage data to the coverage cache. | |
407 |
|
470 | |||
408 | def save(self): |
|
471 | def save(self): | |
409 | # move to directory that must exist. |
|
|||
410 | os.chdir(os.sep) |
|
|||
411 | if self.usecache and self.cache: |
|
472 | if self.usecache and self.cache: | |
412 | self.canonicalize_filenames() |
|
473 | self.canonicalize_filenames() | |
413 | cache = open(self.cache, 'wb') |
|
474 | cache = open(self.cache, 'wb') | |
@@ -421,17 +482,45 b' class coverage:' | |||||
421 | self.c = {} |
|
482 | self.c = {} | |
422 | self.cexecuted = {} |
|
483 | self.cexecuted = {} | |
423 | assert self.usecache |
|
484 | assert self.usecache | |
424 |
if |
|
485 | if os.path.exists(self.cache): | |
425 | return |
|
486 | self.cexecuted = self.restore_file(self.cache) | |
|
487 | ||||
|
488 | def restore_file(self, file_name): | |||
426 | try: |
|
489 | try: | |
427 |
cache = open( |
|
490 | cache = open(file_name, 'rb') | |
428 | import marshal |
|
491 | import marshal | |
429 | cexecuted = marshal.load(cache) |
|
492 | cexecuted = marshal.load(cache) | |
430 | cache.close() |
|
493 | cache.close() | |
431 | if isinstance(cexecuted, types.DictType): |
|
494 | if isinstance(cexecuted, types.DictType): | |
432 |
|
|
495 | return cexecuted | |
|
496 | else: | |||
|
497 | return {} | |||
433 | except: |
|
498 | except: | |
434 |
|
|
499 | return {} | |
|
500 | ||||
|
501 | # collect(). Collect data in multiple files produced by parallel mode | |||
|
502 | ||||
|
503 | def collect(self): | |||
|
504 | cache_dir, local = os.path.split(self.cache) | |||
|
505 | for f in os.listdir(cache_dir or '.'): | |||
|
506 | if not f.startswith(local): | |||
|
507 | continue | |||
|
508 | ||||
|
509 | full_path = os.path.join(cache_dir, f) | |||
|
510 | cexecuted = self.restore_file(full_path) | |||
|
511 | self.merge_data(cexecuted) | |||
|
512 | ||||
|
513 | def merge_data(self, new_data): | |||
|
514 | for file_name, file_data in new_data.items(): | |||
|
515 | if self.cexecuted.has_key(file_name): | |||
|
516 | self.merge_file_data(self.cexecuted[file_name], file_data) | |||
|
517 | else: | |||
|
518 | self.cexecuted[file_name] = file_data | |||
|
519 | ||||
|
520 | def merge_file_data(self, cache_data, new_data): | |||
|
521 | for line_number in new_data.keys(): | |||
|
522 | if not cache_data.has_key(line_number): | |||
|
523 | cache_data[line_number] = new_data[line_number] | |||
435 |
|
524 | |||
436 | # canonical_filename(filename). Return a canonical filename for the |
|
525 | # canonical_filename(filename). Return a canonical filename for the | |
437 | # file (that is, an absolute path with no redundant components and |
|
526 | # file (that is, an absolute path with no redundant components and | |
@@ -452,11 +541,14 b' class coverage:' | |||||
452 | self.canonical_filename_cache[filename] = cf |
|
541 | self.canonical_filename_cache[filename] = cf | |
453 | return self.canonical_filename_cache[filename] |
|
542 | return self.canonical_filename_cache[filename] | |
454 |
|
543 | |||
455 | # canonicalize_filenames(). Copy results from "c" to "cexecuted", |
|
544 | # canonicalize_filenames(). Copy results from "c" to "cexecuted", | |
456 | # canonicalizing filenames on the way. Clear the "c" map. |
|
545 | # canonicalizing filenames on the way. Clear the "c" map. | |
457 |
|
546 | |||
458 | def canonicalize_filenames(self): |
|
547 | def canonicalize_filenames(self): | |
459 | for filename, lineno in self.c.keys(): |
|
548 | for filename, lineno in self.c.keys(): | |
|
549 | if filename == '<string>': | |||
|
550 | # Can't do anything useful with exec'd strings, so skip them. | |||
|
551 | continue | |||
460 | f = self.canonical_filename(filename) |
|
552 | f = self.canonical_filename(filename) | |
461 | if not self.cexecuted.has_key(f): |
|
553 | if not self.cexecuted.has_key(f): | |
462 | self.cexecuted[f] = {} |
|
554 | self.cexecuted[f] = {} | |
@@ -468,18 +560,20 b' class coverage:' | |||||
468 | def morf_filename(self, morf): |
|
560 | def morf_filename(self, morf): | |
469 | if isinstance(morf, types.ModuleType): |
|
561 | if isinstance(morf, types.ModuleType): | |
470 | if not hasattr(morf, '__file__'): |
|
562 | if not hasattr(morf, '__file__'): | |
471 |
raise |
|
563 | raise CoverageException, "Module has no __file__ attribute." | |
472 |
f |
|
564 | f = morf.__file__ | |
473 | else: |
|
565 | else: | |
474 |
f |
|
566 | f = morf | |
475 |
return self.canonical_filename(f |
|
567 | return self.canonical_filename(f) | |
476 |
|
568 | |||
477 | # analyze_morf(morf). Analyze the module or filename passed as |
|
569 | # analyze_morf(morf). Analyze the module or filename passed as | |
478 | # the argument. If the source code can't be found, raise an error. |
|
570 | # the argument. If the source code can't be found, raise an error. | |
479 | # Otherwise, return a tuple of (1) the canonical filename of the |
|
571 | # Otherwise, return a tuple of (1) the canonical filename of the | |
480 | # source code for the module, (2) a list of lines of statements |
|
572 | # source code for the module, (2) a list of lines of statements | |
481 |
# in the source code, |
|
573 | # in the source code, (3) a list of lines of excluded statements, | |
482 |
|
574 | # and (4), a map of line numbers to multi-line line number ranges, for | ||
|
575 | # statements that cross lines. | |||
|
576 | ||||
483 | def analyze_morf(self, morf): |
|
577 | def analyze_morf(self, morf): | |
484 | if self.analysis_cache.has_key(morf): |
|
578 | if self.analysis_cache.has_key(morf): | |
485 | return self.analysis_cache[morf] |
|
579 | return self.analysis_cache[morf] | |
@@ -487,30 +581,69 b' class coverage:' | |||||
487 | ext = os.path.splitext(filename)[1] |
|
581 | ext = os.path.splitext(filename)[1] | |
488 | if ext == '.pyc': |
|
582 | if ext == '.pyc': | |
489 | if not os.path.exists(filename[0:-1]): |
|
583 | if not os.path.exists(filename[0:-1]): | |
490 |
raise |
|
584 | raise CoverageException, ("No source for compiled code '%s'." | |
491 | % filename) |
|
585 | % filename) | |
492 | filename = filename[0:-1] |
|
586 | filename = filename[0:-1] | |
493 | elif ext != '.py': |
|
587 | elif ext != '.py': | |
494 |
raise |
|
588 | raise CoverageException, "File '%s' not Python source." % filename | |
495 | source = open(filename, 'r') |
|
589 | source = open(filename, 'r') | |
496 | lines, excluded_lines = self.find_executable_statements( |
|
590 | lines, excluded_lines, line_map = self.find_executable_statements( | |
497 | source.read(), exclude=self.exclude_re |
|
591 | source.read(), exclude=self.exclude_re | |
498 | ) |
|
592 | ) | |
499 | source.close() |
|
593 | source.close() | |
500 | result = filename, lines, excluded_lines |
|
594 | result = filename, lines, excluded_lines, line_map | |
501 | self.analysis_cache[morf] = result |
|
595 | self.analysis_cache[morf] = result | |
502 | return result |
|
596 | return result | |
503 |
|
597 | |||
|
598 | def first_line_of_tree(self, tree): | |||
|
599 | while True: | |||
|
600 | if len(tree) == 3 and type(tree[2]) == type(1): | |||
|
601 | return tree[2] | |||
|
602 | tree = tree[1] | |||
|
603 | ||||
|
604 | def last_line_of_tree(self, tree): | |||
|
605 | while True: | |||
|
606 | if len(tree) == 3 and type(tree[2]) == type(1): | |||
|
607 | return tree[2] | |||
|
608 | tree = tree[-1] | |||
|
609 | ||||
|
610 | def find_docstring_pass_pair(self, tree, spots): | |||
|
611 | for i in range(1, len(tree)): | |||
|
612 | if self.is_string_constant(tree[i]) and self.is_pass_stmt(tree[i+1]): | |||
|
613 | first_line = self.first_line_of_tree(tree[i]) | |||
|
614 | last_line = self.last_line_of_tree(tree[i+1]) | |||
|
615 | self.record_multiline(spots, first_line, last_line) | |||
|
616 | ||||
|
617 | def is_string_constant(self, tree): | |||
|
618 | try: | |||
|
619 | return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.expr_stmt | |||
|
620 | except: | |||
|
621 | return False | |||
|
622 | ||||
|
623 | def is_pass_stmt(self, tree): | |||
|
624 | try: | |||
|
625 | return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.pass_stmt | |||
|
626 | except: | |||
|
627 | return False | |||
|
628 | ||||
|
629 | def record_multiline(self, spots, i, j): | |||
|
630 | for l in range(i, j+1): | |||
|
631 | spots[l] = (i, j) | |||
|
632 | ||||
504 | def get_suite_spots(self, tree, spots): |
|
633 | def get_suite_spots(self, tree, spots): | |
505 | import symbol, token |
|
634 | """ Analyze a parse tree to find suite introducers which span a number | |
|
635 | of lines. | |||
|
636 | """ | |||
506 | for i in range(1, len(tree)): |
|
637 | for i in range(1, len(tree)): | |
507 |
if |
|
638 | if type(tree[i]) == type(()): | |
508 | if tree[i][0] == symbol.suite: |
|
639 | if tree[i][0] == symbol.suite: | |
509 | # Found a suite, look back for the colon and keyword. |
|
640 | # Found a suite, look back for the colon and keyword. | |
510 | lineno_colon = lineno_word = None |
|
641 | lineno_colon = lineno_word = None | |
511 | for j in range(i-1, 0, -1): |
|
642 | for j in range(i-1, 0, -1): | |
512 | if tree[j][0] == token.COLON: |
|
643 | if tree[j][0] == token.COLON: | |
513 | lineno_colon = tree[j][2] |
|
644 | # Colons are never executed themselves: we want the | |
|
645 | # line number of the last token before the colon. | |||
|
646 | lineno_colon = self.last_line_of_tree(tree[j-1]) | |||
514 | elif tree[j][0] == token.NAME: |
|
647 | elif tree[j][0] == token.NAME: | |
515 | if tree[j][1] == 'elif': |
|
648 | if tree[j][1] == 'elif': | |
516 | # Find the line number of the first non-terminal |
|
649 | # Find the line number of the first non-terminal | |
@@ -532,8 +665,18 b' class coverage:' | |||||
532 | if lineno_colon and lineno_word: |
|
665 | if lineno_colon and lineno_word: | |
533 | # Found colon and keyword, mark all the lines |
|
666 | # Found colon and keyword, mark all the lines | |
534 | # between the two with the two line numbers. |
|
667 | # between the two with the two line numbers. | |
535 |
for |
|
668 | self.record_multiline(spots, lineno_word, lineno_colon) | |
536 | spots[l] = (lineno_word, lineno_colon) |
|
669 | ||
|
670 | # "pass" statements are tricky: different versions of Python | |||
|
671 | # treat them differently, especially in the common case of a | |||
|
672 | # function with a doc string and a single pass statement. | |||
|
673 | self.find_docstring_pass_pair(tree[i], spots) | |||
|
674 | ||||
|
675 | elif tree[i][0] == symbol.simple_stmt: | |||
|
676 | first_line = self.first_line_of_tree(tree[i]) | |||
|
677 | last_line = self.last_line_of_tree(tree[i]) | |||
|
678 | if first_line != last_line: | |||
|
679 | self.record_multiline(spots, first_line, last_line) | |||
537 | self.get_suite_spots(tree[i], spots) |
|
680 | self.get_suite_spots(tree[i], spots) | |
538 |
|
681 | |||
539 | def find_executable_statements(self, text, exclude=None): |
|
682 | def find_executable_statements(self, text, exclude=None): | |
@@ -547,10 +690,13 b' class coverage:' | |||||
547 | if reExclude.search(lines[i]): |
|
690 | if reExclude.search(lines[i]): | |
548 | excluded[i+1] = 1 |
|
691 | excluded[i+1] = 1 | |
549 |
|
692 | |||
|
693 | # Parse the code and analyze the parse tree to find out which statements | |||
|
694 | # are multiline, and where suites begin and end. | |||
550 | import parser |
|
695 | import parser | |
551 | tree = parser.suite(text+'\n\n').totuple(1) |
|
696 | tree = parser.suite(text+'\n\n').totuple(1) | |
552 | self.get_suite_spots(tree, suite_spots) |
|
697 | self.get_suite_spots(tree, suite_spots) | |
553 |
|
698 | #print "Suite spots:", suite_spots | ||
|
699 | ||||
554 | # Use the compiler module to parse the text and find the executable |
|
700 | # Use the compiler module to parse the text and find the executable | |
555 | # statements. We add newlines to be impervious to final partial lines. |
|
701 | # statements. We add newlines to be impervious to final partial lines. | |
556 | statements = {} |
|
702 | statements = {} | |
@@ -562,7 +708,7 b' class coverage:' | |||||
562 | lines.sort() |
|
708 | lines.sort() | |
563 | excluded_lines = excluded.keys() |
|
709 | excluded_lines = excluded.keys() | |
564 | excluded_lines.sort() |
|
710 | excluded_lines.sort() | |
565 | return lines, excluded_lines |
|
711 | return lines, excluded_lines, suite_spots | |
566 |
|
712 | |||
567 | # format_lines(statements, lines). Format a list of line numbers |
|
713 | # format_lines(statements, lines). Format a list of line numbers | |
568 | # for printing by coalescing groups of lines as long as the lines |
|
714 | # for printing by coalescing groups of lines as long as the lines | |
@@ -595,7 +741,8 b' class coverage:' | |||||
595 | return "%d" % start |
|
741 | return "%d" % start | |
596 | else: |
|
742 | else: | |
597 | return "%d-%d" % (start, end) |
|
743 | return "%d-%d" % (start, end) | |
598 |
ret |
|
744 | ret = string.join(map(stringify, pairs), ", ") | |
|
745 | return ret | |||
599 |
|
746 | |||
600 | # Backward compatibility with version 1. |
|
747 | # Backward compatibility with version 1. | |
601 | def analysis(self, morf): |
|
748 | def analysis(self, morf): | |
@@ -603,13 +750,17 b' class coverage:' | |||||
603 | return f, s, m, mf |
|
750 | return f, s, m, mf | |
604 |
|
751 | |||
605 | def analysis2(self, morf): |
|
752 | def analysis2(self, morf): | |
606 | filename, statements, excluded = self.analyze_morf(morf) |
|
753 | filename, statements, excluded, line_map = self.analyze_morf(morf) | |
607 | self.canonicalize_filenames() |
|
754 | self.canonicalize_filenames() | |
608 | if not self.cexecuted.has_key(filename): |
|
755 | if not self.cexecuted.has_key(filename): | |
609 | self.cexecuted[filename] = {} |
|
756 | self.cexecuted[filename] = {} | |
610 | missing = [] |
|
757 | missing = [] | |
611 | for line in statements: |
|
758 | for line in statements: | |
612 | if not self.cexecuted[filename].has_key(line): |
|
759 | lines = line_map.get(line, [line, line]) | |
|
760 | for l in range(lines[0], lines[1]+1): | |||
|
761 | if self.cexecuted[filename].has_key(l): | |||
|
762 | break | |||
|
763 | else: | |||
613 | missing.append(line) |
|
764 | missing.append(line) | |
614 | return (filename, statements, excluded, missing, |
|
765 | return (filename, statements, excluded, missing, | |
615 | self.format_lines(statements, missing)) |
|
766 | self.format_lines(statements, missing)) | |
@@ -647,6 +798,15 b' class coverage:' | |||||
647 | def report(self, morfs, show_missing=1, ignore_errors=0, file=None, omit_prefixes=[]): |
|
798 | def report(self, morfs, show_missing=1, ignore_errors=0, file=None, omit_prefixes=[]): | |
648 | if not isinstance(morfs, types.ListType): |
|
799 | if not isinstance(morfs, types.ListType): | |
649 | morfs = [morfs] |
|
800 | morfs = [morfs] | |
|
801 | # On windows, the shell doesn't expand wildcards. Do it here. | |||
|
802 | globbed = [] | |||
|
803 | for morf in morfs: | |||
|
804 | if isinstance(morf, strclass): | |||
|
805 | globbed.extend(glob.glob(morf)) | |||
|
806 | else: | |||
|
807 | globbed.append(morf) | |||
|
808 | morfs = globbed | |||
|
809 | ||||
650 | morfs = self.filter_by_prefix(morfs, omit_prefixes) |
|
810 | morfs = self.filter_by_prefix(morfs, omit_prefixes) | |
651 | morfs.sort(self.morf_name_compare) |
|
811 | morfs.sort(self.morf_name_compare) | |
652 |
|
812 | |||
@@ -684,8 +844,8 b' class coverage:' | |||||
684 | raise |
|
844 | raise | |
685 | except: |
|
845 | except: | |
686 | if not ignore_errors: |
|
846 | if not ignore_errors: | |
687 |
typ |
|
847 | typ, msg = sys.exc_info()[0:2] | |
688 |
print >>file, fmt_err % (name, typ |
|
848 | print >>file, fmt_err % (name, typ, msg) | |
689 | if len(morfs) > 1: |
|
849 | if len(morfs) > 1: | |
690 | print >>file, "-" * len(header) |
|
850 | print >>file, "-" * len(header) | |
691 | if total_statements > 0: |
|
851 | if total_statements > 0: | |
@@ -713,7 +873,7 b' class coverage:' | |||||
713 | except: |
|
873 | except: | |
714 | if not ignore_errors: |
|
874 | if not ignore_errors: | |
715 | raise |
|
875 | raise | |
716 |
|
876 | |||
717 | def annotate_file(self, filename, statements, excluded, missing, directory=None): |
|
877 | def annotate_file(self, filename, statements, excluded, missing, directory=None): | |
718 | source = open(filename, 'r') |
|
878 | source = open(filename, 'r') | |
719 | if directory: |
|
879 | if directory: | |
@@ -741,7 +901,7 b' class coverage:' | |||||
741 | if self.blank_re.match(line): |
|
901 | if self.blank_re.match(line): | |
742 | dest.write(' ') |
|
902 | dest.write(' ') | |
743 | elif self.else_re.match(line): |
|
903 | elif self.else_re.match(line): | |
744 | # Special logic for lines containing only 'else:'. |
|
904 | # Special logic for lines containing only 'else:'. | |
745 | # See [GDR 2001-12-04b, 3.2]. |
|
905 | # See [GDR 2001-12-04b, 3.2]. | |
746 | if i >= len(statements) and j >= len(missing): |
|
906 | if i >= len(statements) and j >= len(missing): | |
747 | dest.write('! ') |
|
907 | dest.write('! ') | |
@@ -765,18 +925,41 b' class coverage:' | |||||
765 | the_coverage = coverage() |
|
925 | the_coverage = coverage() | |
766 |
|
926 | |||
767 | # Module functions call methods in the singleton object. |
|
927 | # Module functions call methods in the singleton object. | |
768 | def use_cache(*args, **kw): return the_coverage.use_cache(*args, **kw) |
|
928 | def use_cache(*args, **kw): | |
769 |
|
|
929 | return the_coverage.use_cache(*args, **kw) | |
770 | def stop(*args, **kw): return the_coverage.stop(*args, **kw) |
|
930 | ||
771 | def erase(*args, **kw): return the_coverage.erase(*args, **kw) |
|
931 | def start(*args, **kw): | |
772 | def begin_recursive(*args, **kw): return the_coverage.begin_recursive(*args, **kw) |
|
932 | return the_coverage.start(*args, **kw) | |
773 | def end_recursive(*args, **kw): return the_coverage.end_recursive(*args, **kw) |
|
933 | ||
774 | def exclude(*args, **kw): return the_coverage.exclude(*args, **kw) |
|
934 | def stop(*args, **kw): | |
775 |
|
|
935 | return the_coverage.stop(*args, **kw) | |
776 | def analysis2(*args, **kw): return the_coverage.analysis2(*args, **kw) |
|
936 | ||
777 | def report(*args, **kw): return the_coverage.report(*args, **kw) |
|
937 | def erase(*args, **kw): | |
778 |
|
|
938 | return the_coverage.erase(*args, **kw) | |
779 | def annotate_file(*args, **kw): return the_coverage.annotate_file(*args, **kw) |
|
939 | ||
|
940 | def begin_recursive(*args, **kw): | |||
|
941 | return the_coverage.begin_recursive(*args, **kw) | |||
|
942 | ||||
|
943 | def end_recursive(*args, **kw): | |||
|
944 | return the_coverage.end_recursive(*args, **kw) | |||
|
945 | ||||
|
946 | def exclude(*args, **kw): | |||
|
947 | return the_coverage.exclude(*args, **kw) | |||
|
948 | ||||
|
949 | def analysis(*args, **kw): | |||
|
950 | return the_coverage.analysis(*args, **kw) | |||
|
951 | ||||
|
952 | def analysis2(*args, **kw): | |||
|
953 | return the_coverage.analysis2(*args, **kw) | |||
|
954 | ||||
|
955 | def report(*args, **kw): | |||
|
956 | return the_coverage.report(*args, **kw) | |||
|
957 | ||||
|
958 | def annotate(*args, **kw): | |||
|
959 | return the_coverage.annotate(*args, **kw) | |||
|
960 | ||||
|
961 | def annotate_file(*args, **kw): | |||
|
962 | return the_coverage.annotate_file(*args, **kw) | |||
780 |
|
963 | |||
781 | # Save coverage data when Python exits. (The atexit module wasn't |
|
964 | # Save coverage data when Python exits. (The atexit module wasn't | |
782 | # introduced until Python 2.0, so use sys.exitfunc when it's not |
|
965 | # introduced until Python 2.0, so use sys.exitfunc when it's not | |
@@ -789,7 +972,7 b' except ImportError:' | |||||
789 |
|
972 | |||
790 | # Command-line interface. |
|
973 | # Command-line interface. | |
791 | if __name__ == '__main__': |
|
974 | if __name__ == '__main__': | |
792 | the_coverage.command_line() |
|
975 | the_coverage.command_line(sys.argv[1:]) | |
793 |
|
976 | |||
794 |
|
977 | |||
795 | # A. REFERENCES |
|
978 | # A. REFERENCES | |
@@ -850,7 +1033,7 b" if __name__ == '__main__':" | |||||
850 | # Thanks, Allen. |
|
1033 | # Thanks, Allen. | |
851 | # |
|
1034 | # | |
852 | # 2005-12-02 NMB Call threading.settrace so that all threads are measured. |
|
1035 | # 2005-12-02 NMB Call threading.settrace so that all threads are measured. | |
853 | # Thanks Martin Fuzzey. Add a file argument to report so that reports can be |
|
1036 | # Thanks Martin Fuzzey. Add a file argument to report so that reports can be | |
854 | # captured to a different destination. |
|
1037 | # captured to a different destination. | |
855 | # |
|
1038 | # | |
856 | # 2005-12-03 NMB coverage.py can now measure itself. |
|
1039 | # 2005-12-03 NMB coverage.py can now measure itself. | |
@@ -858,10 +1041,46 b" if __name__ == '__main__':" | |||||
858 | # 2005-12-04 NMB Adapted Greg Rogers' patch for using relative filenames, |
|
1041 | # 2005-12-04 NMB Adapted Greg Rogers' patch for using relative filenames, | |
859 | # and sorting and omitting files to report on. |
|
1042 | # and sorting and omitting files to report on. | |
860 | # |
|
1043 | # | |
|
1044 | # 2006-07-23 NMB Applied Joseph Tate's patch for function decorators. | |||
|
1045 | # | |||
|
1046 | # 2006-08-21 NMB Applied Sigve Tjora and Mark van der Wal's fixes for argument | |||
|
1047 | # handling. | |||
|
1048 | # | |||
|
1049 | # 2006-08-22 NMB Applied Geoff Bache's parallel mode patch. | |||
|
1050 | # | |||
|
1051 | # 2006-08-23 NMB Refactorings to improve testability. Fixes to command-line | |||
|
1052 | # logic for parallel mode and collect. | |||
|
1053 | # | |||
|
1054 | # 2006-08-25 NMB "#pragma: nocover" is excluded by default. | |||
|
1055 | # | |||
|
1056 | # 2006-09-10 NMB Properly ignore docstrings and other constant expressions that | |||
|
1057 | # appear in the middle of a function, a problem reported by Tim Leslie. | |||
|
1058 | # Minor changes to avoid lint warnings. | |||
|
1059 | # | |||
|
1060 | # 2006-09-17 NMB coverage.erase() shouldn't clobber the exclude regex. | |||
|
1061 | # Change how parallel mode is invoked, and fix erase() so that it erases the | |||
|
1062 | # cache when called programmatically. | |||
|
1063 | # | |||
|
1064 | # 2007-07-21 NMB In reports, ignore code executed from strings, since we can't | |||
|
1065 | # do anything useful with it anyway. | |||
|
1066 | # Better file handling on Linux, thanks Guillaume Chazarain. | |||
|
1067 | # Better shell support on Windows, thanks Noel O'Boyle. | |||
|
1068 | # Python 2.2 support maintained, thanks Catherine Proulx. | |||
|
1069 | # | |||
|
1070 | # 2007-07-22 NMB Python 2.5 now fully supported. The method of dealing with | |||
|
1071 | # multi-line statements is now less sensitive to the exact line that Python | |||
|
1072 | # reports during execution. Pass statements are handled specially so that their | |||
|
1073 | # disappearance during execution won't throw off the measurement. | |||
|
1074 | # | |||
|
1075 | # 2007-07-23 NMB Now Python 2.5 is *really* fully supported: the body of the | |||
|
1076 | # new with statement is counted as executable. | |||
|
1077 | # | |||
|
1078 | # 2007-07-29 NMB Better packaging. | |||
|
1079 | ||||
861 | # C. COPYRIGHT AND LICENCE |
|
1080 | # C. COPYRIGHT AND LICENCE | |
862 | # |
|
1081 | # | |
863 | # Copyright 2001 Gareth Rees. All rights reserved. |
|
1082 | # Copyright 2001 Gareth Rees. All rights reserved. | |
864 |
# Copyright 2004-200 |
|
1083 | # Copyright 2004-2007 Ned Batchelder. All rights reserved. | |
865 | # |
|
1084 | # | |
866 | # Redistribution and use in source and binary forms, with or without |
|
1085 | # Redistribution and use in source and binary forms, with or without | |
867 | # modification, are permitted provided that the following conditions are |
|
1086 | # modification, are permitted provided that the following conditions are | |
@@ -888,4 +1107,4 b" if __name__ == '__main__':" | |||||
888 | # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH |
|
1107 | # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH | |
889 | # DAMAGE. |
|
1108 | # DAMAGE. | |
890 | # |
|
1109 | # | |
891 |
# $Id: coverage.py |
|
1110 | # $Id: coverage.py 74 2007-07-29 22:28:35Z nedbat $ |
@@ -27,7 +27,7 b' b=$root/b' | |||||
27 | EOF |
|
27 | EOF | |
28 |
|
28 | |||
29 | hg serve -p $HGPORT -d --pid-file=hg.pid --webdir-conf paths.conf \ |
|
29 | hg serve -p $HGPORT -d --pid-file=hg.pid --webdir-conf paths.conf \ | |
30 | -A access-paths.log -E error-paths.log |
|
30 | -A access-paths.log -E error-paths-1.log | |
31 | cat hg.pid >> $DAEMON_PIDS |
|
31 | cat hg.pid >> $DAEMON_PIDS | |
32 |
|
32 | |||
33 | echo % should give a 404 - file does not exist |
|
33 | echo % should give a 404 - file does not exist | |
@@ -48,7 +48,7 b' b=$root/b' | |||||
48 | EOF |
|
48 | EOF | |
49 |
|
49 | |||
50 | hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \ |
|
50 | hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \ | |
51 | -A access-paths.log -E error-paths.log |
|
51 | -A access-paths.log -E error-paths-2.log | |
52 | cat hg.pid >> $DAEMON_PIDS |
|
52 | cat hg.pid >> $DAEMON_PIDS | |
53 |
|
53 | |||
54 | echo % should succeed, slashy names |
|
54 | echo % should succeed, slashy names | |
@@ -75,3 +75,10 b' echo % should succeed' | |||||
75 | "$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/a/file/tip/a?style=raw' |
|
75 | "$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/a/file/tip/a?style=raw' | |
76 | "$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/b/file/tip/b?style=raw' |
|
76 | "$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/b/file/tip/b?style=raw' | |
77 | "$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/c/file/tip/c?style=raw' |
|
77 | "$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/c/file/tip/c?style=raw' | |
|
78 | ||||
|
79 | echo % paths errors 1 | |||
|
80 | cat error-paths-1.log | |||
|
81 | echo % paths errors 2 | |||
|
82 | cat error-paths-2.log | |||
|
83 | echo % collections errors | |||
|
84 | cat error-collections.log |
General Comments 0
You need to be logged in to leave comments.
Login now