##// END OF EJS Templates
templater: provide the standard template filters by default
Dirkjan Ochtman -
r8360:acc202b7 default
parent child Browse files
Show More
@@ -1,62 +1,60 b''
1 # highlight.py - highlight extension implementation file
1 # highlight.py - highlight extension implementation file
2 #
2 #
3 # Copyright 2007-2009 Adam Hupp <adam@hupp.org> and others
3 # Copyright 2007-2009 Adam Hupp <adam@hupp.org> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7 #
7 #
8 # The original module was split in an interface and an implementation
8 # The original module was split in an interface and an implementation
9 # file to defer pygments loading and speedup extension setup.
9 # file to defer pygments loading and speedup extension setup.
10
10
11 from mercurial import demandimport
11 from mercurial import demandimport
12 demandimport.ignore.extend(['pkgutil', 'pkg_resources', '__main__',])
12 demandimport.ignore.extend(['pkgutil', 'pkg_resources', '__main__',])
13
14 from mercurial import util, encoding
13 from mercurial import util, encoding
15 from mercurial.templatefilters import filters
16
14
17 from pygments import highlight
15 from pygments import highlight
18 from pygments.util import ClassNotFound
16 from pygments.util import ClassNotFound
19 from pygments.lexers import guess_lexer, guess_lexer_for_filename, TextLexer
17 from pygments.lexers import guess_lexer, guess_lexer_for_filename, TextLexer
20 from pygments.formatters import HtmlFormatter
18 from pygments.formatters import HtmlFormatter
21
19
22 SYNTAX_CSS = ('\n<link rel="stylesheet" href="{url}highlightcss" '
20 SYNTAX_CSS = ('\n<link rel="stylesheet" href="{url}highlightcss" '
23 'type="text/css" />')
21 'type="text/css" />')
24
22
25 def pygmentize(field, fctx, style, tmpl):
23 def pygmentize(field, fctx, style, tmpl):
26
24
27 # append a <link ...> to the syntax highlighting css
25 # append a <link ...> to the syntax highlighting css
28 old_header = ''.join(tmpl('header'))
26 old_header = ''.join(tmpl('header'))
29 if SYNTAX_CSS not in old_header:
27 if SYNTAX_CSS not in old_header:
30 new_header = old_header + SYNTAX_CSS
28 new_header = old_header + SYNTAX_CSS
31 tmpl.cache['header'] = new_header
29 tmpl.cache['header'] = new_header
32
30
33 text = fctx.data()
31 text = fctx.data()
34 if util.binary(text):
32 if util.binary(text):
35 return
33 return
36
34
37 # avoid UnicodeDecodeError in pygments
35 # avoid UnicodeDecodeError in pygments
38 text = encoding.tolocal(text)
36 text = encoding.tolocal(text)
39
37
40 # To get multi-line strings right, we can't format line-by-line
38 # To get multi-line strings right, we can't format line-by-line
41 try:
39 try:
42 lexer = guess_lexer_for_filename(fctx.path(), text[:1024],
40 lexer = guess_lexer_for_filename(fctx.path(), text[:1024],
43 encoding=encoding.encoding)
41 encoding=encoding.encoding)
44 except (ClassNotFound, ValueError):
42 except (ClassNotFound, ValueError):
45 try:
43 try:
46 lexer = guess_lexer(text[:1024], encoding=encoding.encoding)
44 lexer = guess_lexer(text[:1024], encoding=encoding.encoding)
47 except (ClassNotFound, ValueError):
45 except (ClassNotFound, ValueError):
48 lexer = TextLexer(encoding=encoding.encoding)
46 lexer = TextLexer(encoding=encoding.encoding)
49
47
50 formatter = HtmlFormatter(style=style, encoding=encoding.encoding)
48 formatter = HtmlFormatter(style=style, encoding=encoding.encoding)
51
49
52 colorized = highlight(text, lexer, formatter)
50 colorized = highlight(text, lexer, formatter)
53 # strip wrapping div
51 # strip wrapping div
54 colorized = colorized[:colorized.find('\n</pre>')]
52 colorized = colorized[:colorized.find('\n</pre>')]
55 colorized = colorized[colorized.find('<pre>')+5:]
53 colorized = colorized[colorized.find('<pre>')+5:]
56 coloriter = iter(colorized.splitlines())
54 coloriter = iter(colorized.splitlines())
57
55
58 filters['colorize'] = lambda x: coloriter.next()
56 tmpl.filters['colorize'] = lambda x: coloriter.next()
59
57
60 oldl = tmpl.cache[field]
58 oldl = tmpl.cache[field]
61 newl = oldl.replace('line|escape', 'line|colorize')
59 newl = oldl.replace('line|escape', 'line|colorize')
62 tmpl.cache[field] = newl
60 tmpl.cache[field] = newl
@@ -1,1225 +1,1223 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, bisect, stat, errno
10 import os, sys, bisect, stat, errno
11 import mdiff, bdiff, util, templater, templatefilters, patch, error, encoding
11 import mdiff, bdiff, util, templater, templatefilters, patch, error, encoding
12 import match as _match
12 import match as _match
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 def findpossible(cmd, table, strict=False):
16 def findpossible(cmd, table, strict=False):
17 """
17 """
18 Return cmd -> (aliases, command table entry)
18 Return cmd -> (aliases, command table entry)
19 for each matching command.
19 for each matching command.
20 Return debug commands (or their aliases) only if no normal command matches.
20 Return debug commands (or their aliases) only if no normal command matches.
21 """
21 """
22 choice = {}
22 choice = {}
23 debugchoice = {}
23 debugchoice = {}
24 for e in table.keys():
24 for e in table.keys():
25 aliases = e.lstrip("^").split("|")
25 aliases = e.lstrip("^").split("|")
26 found = None
26 found = None
27 if cmd in aliases:
27 if cmd in aliases:
28 found = cmd
28 found = cmd
29 elif not strict:
29 elif not strict:
30 for a in aliases:
30 for a in aliases:
31 if a.startswith(cmd):
31 if a.startswith(cmd):
32 found = a
32 found = a
33 break
33 break
34 if found is not None:
34 if found is not None:
35 if aliases[0].startswith("debug") or found.startswith("debug"):
35 if aliases[0].startswith("debug") or found.startswith("debug"):
36 debugchoice[found] = (aliases, table[e])
36 debugchoice[found] = (aliases, table[e])
37 else:
37 else:
38 choice[found] = (aliases, table[e])
38 choice[found] = (aliases, table[e])
39
39
40 if not choice and debugchoice:
40 if not choice and debugchoice:
41 choice = debugchoice
41 choice = debugchoice
42
42
43 return choice
43 return choice
44
44
45 def findcmd(cmd, table, strict=True):
45 def findcmd(cmd, table, strict=True):
46 """Return (aliases, command table entry) for command string."""
46 """Return (aliases, command table entry) for command string."""
47 choice = findpossible(cmd, table, strict)
47 choice = findpossible(cmd, table, strict)
48
48
49 if cmd in choice:
49 if cmd in choice:
50 return choice[cmd]
50 return choice[cmd]
51
51
52 if len(choice) > 1:
52 if len(choice) > 1:
53 clist = choice.keys()
53 clist = choice.keys()
54 clist.sort()
54 clist.sort()
55 raise error.AmbiguousCommand(cmd, clist)
55 raise error.AmbiguousCommand(cmd, clist)
56
56
57 if choice:
57 if choice:
58 return choice.values()[0]
58 return choice.values()[0]
59
59
60 raise error.UnknownCommand(cmd)
60 raise error.UnknownCommand(cmd)
61
61
62 def bail_if_changed(repo):
62 def bail_if_changed(repo):
63 if repo.dirstate.parents()[1] != nullid:
63 if repo.dirstate.parents()[1] != nullid:
64 raise util.Abort(_('outstanding uncommitted merge'))
64 raise util.Abort(_('outstanding uncommitted merge'))
65 modified, added, removed, deleted = repo.status()[:4]
65 modified, added, removed, deleted = repo.status()[:4]
66 if modified or added or removed or deleted:
66 if modified or added or removed or deleted:
67 raise util.Abort(_("outstanding uncommitted changes"))
67 raise util.Abort(_("outstanding uncommitted changes"))
68
68
69 def logmessage(opts):
69 def logmessage(opts):
70 """ get the log message according to -m and -l option """
70 """ get the log message according to -m and -l option """
71 message = opts.get('message')
71 message = opts.get('message')
72 logfile = opts.get('logfile')
72 logfile = opts.get('logfile')
73
73
74 if message and logfile:
74 if message and logfile:
75 raise util.Abort(_('options --message and --logfile are mutually '
75 raise util.Abort(_('options --message and --logfile are mutually '
76 'exclusive'))
76 'exclusive'))
77 if not message and logfile:
77 if not message and logfile:
78 try:
78 try:
79 if logfile == '-':
79 if logfile == '-':
80 message = sys.stdin.read()
80 message = sys.stdin.read()
81 else:
81 else:
82 message = open(logfile).read()
82 message = open(logfile).read()
83 except IOError, inst:
83 except IOError, inst:
84 raise util.Abort(_("can't read commit message '%s': %s") %
84 raise util.Abort(_("can't read commit message '%s': %s") %
85 (logfile, inst.strerror))
85 (logfile, inst.strerror))
86 return message
86 return message
87
87
88 def loglimit(opts):
88 def loglimit(opts):
89 """get the log limit according to option -l/--limit"""
89 """get the log limit according to option -l/--limit"""
90 limit = opts.get('limit')
90 limit = opts.get('limit')
91 if limit:
91 if limit:
92 try:
92 try:
93 limit = int(limit)
93 limit = int(limit)
94 except ValueError:
94 except ValueError:
95 raise util.Abort(_('limit must be a positive integer'))
95 raise util.Abort(_('limit must be a positive integer'))
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
97 else:
97 else:
98 limit = sys.maxint
98 limit = sys.maxint
99 return limit
99 return limit
100
100
101 def remoteui(src, opts):
101 def remoteui(src, opts):
102 'build a remote ui from ui or repo and opts'
102 'build a remote ui from ui or repo and opts'
103 if hasattr(src, 'baseui'): # looks like a repository
103 if hasattr(src, 'baseui'): # looks like a repository
104 dst = src.baseui # drop repo-specific config
104 dst = src.baseui # drop repo-specific config
105 src = src.ui # copy target options from repo
105 src = src.ui # copy target options from repo
106 else: # assume it's a global ui object
106 else: # assume it's a global ui object
107 dst = src # keep all global options
107 dst = src # keep all global options
108
108
109 # copy ssh-specific options
109 # copy ssh-specific options
110 for o in 'ssh', 'remotecmd':
110 for o in 'ssh', 'remotecmd':
111 v = opts.get(o) or src.config('ui', o)
111 v = opts.get(o) or src.config('ui', o)
112 if v:
112 if v:
113 dst.setconfig("ui", o, v)
113 dst.setconfig("ui", o, v)
114 # copy bundle-specific options
114 # copy bundle-specific options
115 r = src.config('bundle', 'mainreporoot')
115 r = src.config('bundle', 'mainreporoot')
116 if r:
116 if r:
117 dst.setconfig('bundle', 'mainreporoot', r)
117 dst.setconfig('bundle', 'mainreporoot', r)
118
118
119 return dst
119 return dst
120
120
121 def revpair(repo, revs):
121 def revpair(repo, revs):
122 '''return pair of nodes, given list of revisions. second item can
122 '''return pair of nodes, given list of revisions. second item can
123 be None, meaning use working dir.'''
123 be None, meaning use working dir.'''
124
124
125 def revfix(repo, val, defval):
125 def revfix(repo, val, defval):
126 if not val and val != 0 and defval is not None:
126 if not val and val != 0 and defval is not None:
127 val = defval
127 val = defval
128 return repo.lookup(val)
128 return repo.lookup(val)
129
129
130 if not revs:
130 if not revs:
131 return repo.dirstate.parents()[0], None
131 return repo.dirstate.parents()[0], None
132 end = None
132 end = None
133 if len(revs) == 1:
133 if len(revs) == 1:
134 if revrangesep in revs[0]:
134 if revrangesep in revs[0]:
135 start, end = revs[0].split(revrangesep, 1)
135 start, end = revs[0].split(revrangesep, 1)
136 start = revfix(repo, start, 0)
136 start = revfix(repo, start, 0)
137 end = revfix(repo, end, len(repo) - 1)
137 end = revfix(repo, end, len(repo) - 1)
138 else:
138 else:
139 start = revfix(repo, revs[0], None)
139 start = revfix(repo, revs[0], None)
140 elif len(revs) == 2:
140 elif len(revs) == 2:
141 if revrangesep in revs[0] or revrangesep in revs[1]:
141 if revrangesep in revs[0] or revrangesep in revs[1]:
142 raise util.Abort(_('too many revisions specified'))
142 raise util.Abort(_('too many revisions specified'))
143 start = revfix(repo, revs[0], None)
143 start = revfix(repo, revs[0], None)
144 end = revfix(repo, revs[1], None)
144 end = revfix(repo, revs[1], None)
145 else:
145 else:
146 raise util.Abort(_('too many revisions specified'))
146 raise util.Abort(_('too many revisions specified'))
147 return start, end
147 return start, end
148
148
149 def revrange(repo, revs):
149 def revrange(repo, revs):
150 """Yield revision as strings from a list of revision specifications."""
150 """Yield revision as strings from a list of revision specifications."""
151
151
152 def revfix(repo, val, defval):
152 def revfix(repo, val, defval):
153 if not val and val != 0 and defval is not None:
153 if not val and val != 0 and defval is not None:
154 return defval
154 return defval
155 return repo.changelog.rev(repo.lookup(val))
155 return repo.changelog.rev(repo.lookup(val))
156
156
157 seen, l = {}, []
157 seen, l = {}, []
158 for spec in revs:
158 for spec in revs:
159 if revrangesep in spec:
159 if revrangesep in spec:
160 start, end = spec.split(revrangesep, 1)
160 start, end = spec.split(revrangesep, 1)
161 start = revfix(repo, start, 0)
161 start = revfix(repo, start, 0)
162 end = revfix(repo, end, len(repo) - 1)
162 end = revfix(repo, end, len(repo) - 1)
163 step = start > end and -1 or 1
163 step = start > end and -1 or 1
164 for rev in xrange(start, end+step, step):
164 for rev in xrange(start, end+step, step):
165 if rev in seen:
165 if rev in seen:
166 continue
166 continue
167 seen[rev] = 1
167 seen[rev] = 1
168 l.append(rev)
168 l.append(rev)
169 else:
169 else:
170 rev = revfix(repo, spec, None)
170 rev = revfix(repo, spec, None)
171 if rev in seen:
171 if rev in seen:
172 continue
172 continue
173 seen[rev] = 1
173 seen[rev] = 1
174 l.append(rev)
174 l.append(rev)
175
175
176 return l
176 return l
177
177
178 def make_filename(repo, pat, node,
178 def make_filename(repo, pat, node,
179 total=None, seqno=None, revwidth=None, pathname=None):
179 total=None, seqno=None, revwidth=None, pathname=None):
180 node_expander = {
180 node_expander = {
181 'H': lambda: hex(node),
181 'H': lambda: hex(node),
182 'R': lambda: str(repo.changelog.rev(node)),
182 'R': lambda: str(repo.changelog.rev(node)),
183 'h': lambda: short(node),
183 'h': lambda: short(node),
184 }
184 }
185 expander = {
185 expander = {
186 '%': lambda: '%',
186 '%': lambda: '%',
187 'b': lambda: os.path.basename(repo.root),
187 'b': lambda: os.path.basename(repo.root),
188 }
188 }
189
189
190 try:
190 try:
191 if node:
191 if node:
192 expander.update(node_expander)
192 expander.update(node_expander)
193 if node:
193 if node:
194 expander['r'] = (lambda:
194 expander['r'] = (lambda:
195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
196 if total is not None:
196 if total is not None:
197 expander['N'] = lambda: str(total)
197 expander['N'] = lambda: str(total)
198 if seqno is not None:
198 if seqno is not None:
199 expander['n'] = lambda: str(seqno)
199 expander['n'] = lambda: str(seqno)
200 if total is not None and seqno is not None:
200 if total is not None and seqno is not None:
201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
202 if pathname is not None:
202 if pathname is not None:
203 expander['s'] = lambda: os.path.basename(pathname)
203 expander['s'] = lambda: os.path.basename(pathname)
204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
205 expander['p'] = lambda: pathname
205 expander['p'] = lambda: pathname
206
206
207 newname = []
207 newname = []
208 patlen = len(pat)
208 patlen = len(pat)
209 i = 0
209 i = 0
210 while i < patlen:
210 while i < patlen:
211 c = pat[i]
211 c = pat[i]
212 if c == '%':
212 if c == '%':
213 i += 1
213 i += 1
214 c = pat[i]
214 c = pat[i]
215 c = expander[c]()
215 c = expander[c]()
216 newname.append(c)
216 newname.append(c)
217 i += 1
217 i += 1
218 return ''.join(newname)
218 return ''.join(newname)
219 except KeyError, inst:
219 except KeyError, inst:
220 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
220 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
221 inst.args[0])
221 inst.args[0])
222
222
223 def make_file(repo, pat, node=None,
223 def make_file(repo, pat, node=None,
224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
225
225
226 writable = 'w' in mode or 'a' in mode
226 writable = 'w' in mode or 'a' in mode
227
227
228 if not pat or pat == '-':
228 if not pat or pat == '-':
229 return writable and sys.stdout or sys.stdin
229 return writable and sys.stdout or sys.stdin
230 if hasattr(pat, 'write') and writable:
230 if hasattr(pat, 'write') and writable:
231 return pat
231 return pat
232 if hasattr(pat, 'read') and 'r' in mode:
232 if hasattr(pat, 'read') and 'r' in mode:
233 return pat
233 return pat
234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
235 pathname),
235 pathname),
236 mode)
236 mode)
237
237
238 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
238 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
239 if not globbed and default == 'relpath':
239 if not globbed and default == 'relpath':
240 pats = util.expand_glob(pats or [])
240 pats = util.expand_glob(pats or [])
241 m = _match.match(repo.root, repo.getcwd(), pats,
241 m = _match.match(repo.root, repo.getcwd(), pats,
242 opts.get('include'), opts.get('exclude'), default)
242 opts.get('include'), opts.get('exclude'), default)
243 def badfn(f, msg):
243 def badfn(f, msg):
244 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
244 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
245 return False
245 return False
246 m.bad = badfn
246 m.bad = badfn
247 return m
247 return m
248
248
249 def matchall(repo):
249 def matchall(repo):
250 return _match.always(repo.root, repo.getcwd())
250 return _match.always(repo.root, repo.getcwd())
251
251
252 def matchfiles(repo, files):
252 def matchfiles(repo, files):
253 return _match.exact(repo.root, repo.getcwd(), files)
253 return _match.exact(repo.root, repo.getcwd(), files)
254
254
255 def findrenames(repo, added=None, removed=None, threshold=0.5):
255 def findrenames(repo, added=None, removed=None, threshold=0.5):
256 '''find renamed files -- yields (before, after, score) tuples'''
256 '''find renamed files -- yields (before, after, score) tuples'''
257 if added is None or removed is None:
257 if added is None or removed is None:
258 added, removed = repo.status()[1:3]
258 added, removed = repo.status()[1:3]
259 ctx = repo['.']
259 ctx = repo['.']
260 for a in added:
260 for a in added:
261 aa = repo.wread(a)
261 aa = repo.wread(a)
262 bestname, bestscore = None, threshold
262 bestname, bestscore = None, threshold
263 for r in removed:
263 for r in removed:
264 rr = ctx.filectx(r).data()
264 rr = ctx.filectx(r).data()
265
265
266 # bdiff.blocks() returns blocks of matching lines
266 # bdiff.blocks() returns blocks of matching lines
267 # count the number of bytes in each
267 # count the number of bytes in each
268 equal = 0
268 equal = 0
269 alines = mdiff.splitnewlines(aa)
269 alines = mdiff.splitnewlines(aa)
270 matches = bdiff.blocks(aa, rr)
270 matches = bdiff.blocks(aa, rr)
271 for x1,x2,y1,y2 in matches:
271 for x1,x2,y1,y2 in matches:
272 for line in alines[x1:x2]:
272 for line in alines[x1:x2]:
273 equal += len(line)
273 equal += len(line)
274
274
275 lengths = len(aa) + len(rr)
275 lengths = len(aa) + len(rr)
276 if lengths:
276 if lengths:
277 myscore = equal*2.0 / lengths
277 myscore = equal*2.0 / lengths
278 if myscore >= bestscore:
278 if myscore >= bestscore:
279 bestname, bestscore = r, myscore
279 bestname, bestscore = r, myscore
280 if bestname:
280 if bestname:
281 yield bestname, a, bestscore
281 yield bestname, a, bestscore
282
282
283 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
283 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
284 if dry_run is None:
284 if dry_run is None:
285 dry_run = opts.get('dry_run')
285 dry_run = opts.get('dry_run')
286 if similarity is None:
286 if similarity is None:
287 similarity = float(opts.get('similarity') or 0)
287 similarity = float(opts.get('similarity') or 0)
288 add, remove = [], []
288 add, remove = [], []
289 mapping = {}
289 mapping = {}
290 audit_path = util.path_auditor(repo.root)
290 audit_path = util.path_auditor(repo.root)
291 m = match(repo, pats, opts)
291 m = match(repo, pats, opts)
292 for abs in repo.walk(m):
292 for abs in repo.walk(m):
293 target = repo.wjoin(abs)
293 target = repo.wjoin(abs)
294 good = True
294 good = True
295 try:
295 try:
296 audit_path(abs)
296 audit_path(abs)
297 except:
297 except:
298 good = False
298 good = False
299 rel = m.rel(abs)
299 rel = m.rel(abs)
300 exact = m.exact(abs)
300 exact = m.exact(abs)
301 if good and abs not in repo.dirstate:
301 if good and abs not in repo.dirstate:
302 add.append(abs)
302 add.append(abs)
303 mapping[abs] = rel, m.exact(abs)
303 mapping[abs] = rel, m.exact(abs)
304 if repo.ui.verbose or not exact:
304 if repo.ui.verbose or not exact:
305 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
305 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
306 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
306 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
307 or (os.path.isdir(target) and not os.path.islink(target))):
307 or (os.path.isdir(target) and not os.path.islink(target))):
308 remove.append(abs)
308 remove.append(abs)
309 mapping[abs] = rel, exact
309 mapping[abs] = rel, exact
310 if repo.ui.verbose or not exact:
310 if repo.ui.verbose or not exact:
311 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
311 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
312 if not dry_run:
312 if not dry_run:
313 repo.remove(remove)
313 repo.remove(remove)
314 repo.add(add)
314 repo.add(add)
315 if similarity > 0:
315 if similarity > 0:
316 for old, new, score in findrenames(repo, add, remove, similarity):
316 for old, new, score in findrenames(repo, add, remove, similarity):
317 oldrel, oldexact = mapping[old]
317 oldrel, oldexact = mapping[old]
318 newrel, newexact = mapping[new]
318 newrel, newexact = mapping[new]
319 if repo.ui.verbose or not oldexact or not newexact:
319 if repo.ui.verbose or not oldexact or not newexact:
320 repo.ui.status(_('recording removal of %s as rename to %s '
320 repo.ui.status(_('recording removal of %s as rename to %s '
321 '(%d%% similar)\n') %
321 '(%d%% similar)\n') %
322 (oldrel, newrel, score * 100))
322 (oldrel, newrel, score * 100))
323 if not dry_run:
323 if not dry_run:
324 repo.copy(old, new)
324 repo.copy(old, new)
325
325
326 def copy(ui, repo, pats, opts, rename=False):
326 def copy(ui, repo, pats, opts, rename=False):
327 # called with the repo lock held
327 # called with the repo lock held
328 #
328 #
329 # hgsep => pathname that uses "/" to separate directories
329 # hgsep => pathname that uses "/" to separate directories
330 # ossep => pathname that uses os.sep to separate directories
330 # ossep => pathname that uses os.sep to separate directories
331 cwd = repo.getcwd()
331 cwd = repo.getcwd()
332 targets = {}
332 targets = {}
333 after = opts.get("after")
333 after = opts.get("after")
334 dryrun = opts.get("dry_run")
334 dryrun = opts.get("dry_run")
335
335
336 def walkpat(pat):
336 def walkpat(pat):
337 srcs = []
337 srcs = []
338 m = match(repo, [pat], opts, globbed=True)
338 m = match(repo, [pat], opts, globbed=True)
339 for abs in repo.walk(m):
339 for abs in repo.walk(m):
340 state = repo.dirstate[abs]
340 state = repo.dirstate[abs]
341 rel = m.rel(abs)
341 rel = m.rel(abs)
342 exact = m.exact(abs)
342 exact = m.exact(abs)
343 if state in '?r':
343 if state in '?r':
344 if exact and state == '?':
344 if exact and state == '?':
345 ui.warn(_('%s: not copying - file is not managed\n') % rel)
345 ui.warn(_('%s: not copying - file is not managed\n') % rel)
346 if exact and state == 'r':
346 if exact and state == 'r':
347 ui.warn(_('%s: not copying - file has been marked for'
347 ui.warn(_('%s: not copying - file has been marked for'
348 ' remove\n') % rel)
348 ' remove\n') % rel)
349 continue
349 continue
350 # abs: hgsep
350 # abs: hgsep
351 # rel: ossep
351 # rel: ossep
352 srcs.append((abs, rel, exact))
352 srcs.append((abs, rel, exact))
353 return srcs
353 return srcs
354
354
355 # abssrc: hgsep
355 # abssrc: hgsep
356 # relsrc: ossep
356 # relsrc: ossep
357 # otarget: ossep
357 # otarget: ossep
358 def copyfile(abssrc, relsrc, otarget, exact):
358 def copyfile(abssrc, relsrc, otarget, exact):
359 abstarget = util.canonpath(repo.root, cwd, otarget)
359 abstarget = util.canonpath(repo.root, cwd, otarget)
360 reltarget = repo.pathto(abstarget, cwd)
360 reltarget = repo.pathto(abstarget, cwd)
361 target = repo.wjoin(abstarget)
361 target = repo.wjoin(abstarget)
362 src = repo.wjoin(abssrc)
362 src = repo.wjoin(abssrc)
363 state = repo.dirstate[abstarget]
363 state = repo.dirstate[abstarget]
364
364
365 # check for collisions
365 # check for collisions
366 prevsrc = targets.get(abstarget)
366 prevsrc = targets.get(abstarget)
367 if prevsrc is not None:
367 if prevsrc is not None:
368 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
368 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
369 (reltarget, repo.pathto(abssrc, cwd),
369 (reltarget, repo.pathto(abssrc, cwd),
370 repo.pathto(prevsrc, cwd)))
370 repo.pathto(prevsrc, cwd)))
371 return
371 return
372
372
373 # check for overwrites
373 # check for overwrites
374 exists = os.path.exists(target)
374 exists = os.path.exists(target)
375 if not after and exists or after and state in 'mn':
375 if not after and exists or after and state in 'mn':
376 if not opts['force']:
376 if not opts['force']:
377 ui.warn(_('%s: not overwriting - file exists\n') %
377 ui.warn(_('%s: not overwriting - file exists\n') %
378 reltarget)
378 reltarget)
379 return
379 return
380
380
381 if after:
381 if after:
382 if not exists:
382 if not exists:
383 return
383 return
384 elif not dryrun:
384 elif not dryrun:
385 try:
385 try:
386 if exists:
386 if exists:
387 os.unlink(target)
387 os.unlink(target)
388 targetdir = os.path.dirname(target) or '.'
388 targetdir = os.path.dirname(target) or '.'
389 if not os.path.isdir(targetdir):
389 if not os.path.isdir(targetdir):
390 os.makedirs(targetdir)
390 os.makedirs(targetdir)
391 util.copyfile(src, target)
391 util.copyfile(src, target)
392 except IOError, inst:
392 except IOError, inst:
393 if inst.errno == errno.ENOENT:
393 if inst.errno == errno.ENOENT:
394 ui.warn(_('%s: deleted in working copy\n') % relsrc)
394 ui.warn(_('%s: deleted in working copy\n') % relsrc)
395 else:
395 else:
396 ui.warn(_('%s: cannot copy - %s\n') %
396 ui.warn(_('%s: cannot copy - %s\n') %
397 (relsrc, inst.strerror))
397 (relsrc, inst.strerror))
398 return True # report a failure
398 return True # report a failure
399
399
400 if ui.verbose or not exact:
400 if ui.verbose or not exact:
401 if rename:
401 if rename:
402 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
402 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
403 else:
403 else:
404 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
404 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
405
405
406 targets[abstarget] = abssrc
406 targets[abstarget] = abssrc
407
407
408 # fix up dirstate
408 # fix up dirstate
409 origsrc = repo.dirstate.copied(abssrc) or abssrc
409 origsrc = repo.dirstate.copied(abssrc) or abssrc
410 if abstarget == origsrc: # copying back a copy?
410 if abstarget == origsrc: # copying back a copy?
411 if state not in 'mn' and not dryrun:
411 if state not in 'mn' and not dryrun:
412 repo.dirstate.normallookup(abstarget)
412 repo.dirstate.normallookup(abstarget)
413 else:
413 else:
414 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
414 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
415 if not ui.quiet:
415 if not ui.quiet:
416 ui.warn(_("%s has not been committed yet, so no copy "
416 ui.warn(_("%s has not been committed yet, so no copy "
417 "data will be stored for %s.\n")
417 "data will be stored for %s.\n")
418 % (repo.pathto(origsrc, cwd), reltarget))
418 % (repo.pathto(origsrc, cwd), reltarget))
419 if repo.dirstate[abstarget] in '?r' and not dryrun:
419 if repo.dirstate[abstarget] in '?r' and not dryrun:
420 repo.add([abstarget])
420 repo.add([abstarget])
421 elif not dryrun:
421 elif not dryrun:
422 repo.copy(origsrc, abstarget)
422 repo.copy(origsrc, abstarget)
423
423
424 if rename and not dryrun:
424 if rename and not dryrun:
425 repo.remove([abssrc], not after)
425 repo.remove([abssrc], not after)
426
426
427 # pat: ossep
427 # pat: ossep
428 # dest ossep
428 # dest ossep
429 # srcs: list of (hgsep, hgsep, ossep, bool)
429 # srcs: list of (hgsep, hgsep, ossep, bool)
430 # return: function that takes hgsep and returns ossep
430 # return: function that takes hgsep and returns ossep
431 def targetpathfn(pat, dest, srcs):
431 def targetpathfn(pat, dest, srcs):
432 if os.path.isdir(pat):
432 if os.path.isdir(pat):
433 abspfx = util.canonpath(repo.root, cwd, pat)
433 abspfx = util.canonpath(repo.root, cwd, pat)
434 abspfx = util.localpath(abspfx)
434 abspfx = util.localpath(abspfx)
435 if destdirexists:
435 if destdirexists:
436 striplen = len(os.path.split(abspfx)[0])
436 striplen = len(os.path.split(abspfx)[0])
437 else:
437 else:
438 striplen = len(abspfx)
438 striplen = len(abspfx)
439 if striplen:
439 if striplen:
440 striplen += len(os.sep)
440 striplen += len(os.sep)
441 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
441 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
442 elif destdirexists:
442 elif destdirexists:
443 res = lambda p: os.path.join(dest,
443 res = lambda p: os.path.join(dest,
444 os.path.basename(util.localpath(p)))
444 os.path.basename(util.localpath(p)))
445 else:
445 else:
446 res = lambda p: dest
446 res = lambda p: dest
447 return res
447 return res
448
448
449 # pat: ossep
449 # pat: ossep
450 # dest ossep
450 # dest ossep
451 # srcs: list of (hgsep, hgsep, ossep, bool)
451 # srcs: list of (hgsep, hgsep, ossep, bool)
452 # return: function that takes hgsep and returns ossep
452 # return: function that takes hgsep and returns ossep
453 def targetpathafterfn(pat, dest, srcs):
453 def targetpathafterfn(pat, dest, srcs):
454 if util.patkind(pat, None)[0]:
454 if util.patkind(pat, None)[0]:
455 # a mercurial pattern
455 # a mercurial pattern
456 res = lambda p: os.path.join(dest,
456 res = lambda p: os.path.join(dest,
457 os.path.basename(util.localpath(p)))
457 os.path.basename(util.localpath(p)))
458 else:
458 else:
459 abspfx = util.canonpath(repo.root, cwd, pat)
459 abspfx = util.canonpath(repo.root, cwd, pat)
460 if len(abspfx) < len(srcs[0][0]):
460 if len(abspfx) < len(srcs[0][0]):
461 # A directory. Either the target path contains the last
461 # A directory. Either the target path contains the last
462 # component of the source path or it does not.
462 # component of the source path or it does not.
463 def evalpath(striplen):
463 def evalpath(striplen):
464 score = 0
464 score = 0
465 for s in srcs:
465 for s in srcs:
466 t = os.path.join(dest, util.localpath(s[0])[striplen:])
466 t = os.path.join(dest, util.localpath(s[0])[striplen:])
467 if os.path.exists(t):
467 if os.path.exists(t):
468 score += 1
468 score += 1
469 return score
469 return score
470
470
471 abspfx = util.localpath(abspfx)
471 abspfx = util.localpath(abspfx)
472 striplen = len(abspfx)
472 striplen = len(abspfx)
473 if striplen:
473 if striplen:
474 striplen += len(os.sep)
474 striplen += len(os.sep)
475 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
475 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
476 score = evalpath(striplen)
476 score = evalpath(striplen)
477 striplen1 = len(os.path.split(abspfx)[0])
477 striplen1 = len(os.path.split(abspfx)[0])
478 if striplen1:
478 if striplen1:
479 striplen1 += len(os.sep)
479 striplen1 += len(os.sep)
480 if evalpath(striplen1) > score:
480 if evalpath(striplen1) > score:
481 striplen = striplen1
481 striplen = striplen1
482 res = lambda p: os.path.join(dest,
482 res = lambda p: os.path.join(dest,
483 util.localpath(p)[striplen:])
483 util.localpath(p)[striplen:])
484 else:
484 else:
485 # a file
485 # a file
486 if destdirexists:
486 if destdirexists:
487 res = lambda p: os.path.join(dest,
487 res = lambda p: os.path.join(dest,
488 os.path.basename(util.localpath(p)))
488 os.path.basename(util.localpath(p)))
489 else:
489 else:
490 res = lambda p: dest
490 res = lambda p: dest
491 return res
491 return res
492
492
493
493
494 pats = util.expand_glob(pats)
494 pats = util.expand_glob(pats)
495 if not pats:
495 if not pats:
496 raise util.Abort(_('no source or destination specified'))
496 raise util.Abort(_('no source or destination specified'))
497 if len(pats) == 1:
497 if len(pats) == 1:
498 raise util.Abort(_('no destination specified'))
498 raise util.Abort(_('no destination specified'))
499 dest = pats.pop()
499 dest = pats.pop()
500 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
500 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
501 if not destdirexists:
501 if not destdirexists:
502 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
502 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
503 raise util.Abort(_('with multiple sources, destination must be an '
503 raise util.Abort(_('with multiple sources, destination must be an '
504 'existing directory'))
504 'existing directory'))
505 if util.endswithsep(dest):
505 if util.endswithsep(dest):
506 raise util.Abort(_('destination %s is not a directory') % dest)
506 raise util.Abort(_('destination %s is not a directory') % dest)
507
507
508 tfn = targetpathfn
508 tfn = targetpathfn
509 if after:
509 if after:
510 tfn = targetpathafterfn
510 tfn = targetpathafterfn
511 copylist = []
511 copylist = []
512 for pat in pats:
512 for pat in pats:
513 srcs = walkpat(pat)
513 srcs = walkpat(pat)
514 if not srcs:
514 if not srcs:
515 continue
515 continue
516 copylist.append((tfn(pat, dest, srcs), srcs))
516 copylist.append((tfn(pat, dest, srcs), srcs))
517 if not copylist:
517 if not copylist:
518 raise util.Abort(_('no files to copy'))
518 raise util.Abort(_('no files to copy'))
519
519
520 errors = 0
520 errors = 0
521 for targetpath, srcs in copylist:
521 for targetpath, srcs in copylist:
522 for abssrc, relsrc, exact in srcs:
522 for abssrc, relsrc, exact in srcs:
523 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
523 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
524 errors += 1
524 errors += 1
525
525
526 if errors:
526 if errors:
527 ui.warn(_('(consider using --after)\n'))
527 ui.warn(_('(consider using --after)\n'))
528
528
529 return errors
529 return errors
530
530
531 def service(opts, parentfn=None, initfn=None, runfn=None):
531 def service(opts, parentfn=None, initfn=None, runfn=None):
532 '''Run a command as a service.'''
532 '''Run a command as a service.'''
533
533
534 if opts['daemon'] and not opts['daemon_pipefds']:
534 if opts['daemon'] and not opts['daemon_pipefds']:
535 rfd, wfd = os.pipe()
535 rfd, wfd = os.pipe()
536 args = sys.argv[:]
536 args = sys.argv[:]
537 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
537 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
538 # Don't pass --cwd to the child process, because we've already
538 # Don't pass --cwd to the child process, because we've already
539 # changed directory.
539 # changed directory.
540 for i in xrange(1,len(args)):
540 for i in xrange(1,len(args)):
541 if args[i].startswith('--cwd='):
541 if args[i].startswith('--cwd='):
542 del args[i]
542 del args[i]
543 break
543 break
544 elif args[i].startswith('--cwd'):
544 elif args[i].startswith('--cwd'):
545 del args[i:i+2]
545 del args[i:i+2]
546 break
546 break
547 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
547 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
548 args[0], args)
548 args[0], args)
549 os.close(wfd)
549 os.close(wfd)
550 os.read(rfd, 1)
550 os.read(rfd, 1)
551 if parentfn:
551 if parentfn:
552 return parentfn(pid)
552 return parentfn(pid)
553 else:
553 else:
554 os._exit(0)
554 os._exit(0)
555
555
556 if initfn:
556 if initfn:
557 initfn()
557 initfn()
558
558
559 if opts['pid_file']:
559 if opts['pid_file']:
560 fp = open(opts['pid_file'], 'w')
560 fp = open(opts['pid_file'], 'w')
561 fp.write(str(os.getpid()) + '\n')
561 fp.write(str(os.getpid()) + '\n')
562 fp.close()
562 fp.close()
563
563
564 if opts['daemon_pipefds']:
564 if opts['daemon_pipefds']:
565 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
565 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
566 os.close(rfd)
566 os.close(rfd)
567 try:
567 try:
568 os.setsid()
568 os.setsid()
569 except AttributeError:
569 except AttributeError:
570 pass
570 pass
571 os.write(wfd, 'y')
571 os.write(wfd, 'y')
572 os.close(wfd)
572 os.close(wfd)
573 sys.stdout.flush()
573 sys.stdout.flush()
574 sys.stderr.flush()
574 sys.stderr.flush()
575 fd = os.open(util.nulldev, os.O_RDWR)
575 fd = os.open(util.nulldev, os.O_RDWR)
576 if fd != 0: os.dup2(fd, 0)
576 if fd != 0: os.dup2(fd, 0)
577 if fd != 1: os.dup2(fd, 1)
577 if fd != 1: os.dup2(fd, 1)
578 if fd != 2: os.dup2(fd, 2)
578 if fd != 2: os.dup2(fd, 2)
579 if fd not in (0, 1, 2): os.close(fd)
579 if fd not in (0, 1, 2): os.close(fd)
580
580
581 if runfn:
581 if runfn:
582 return runfn()
582 return runfn()
583
583
584 class changeset_printer(object):
584 class changeset_printer(object):
585 '''show changeset information when templating not requested.'''
585 '''show changeset information when templating not requested.'''
586
586
587 def __init__(self, ui, repo, patch, diffopts, buffered):
587 def __init__(self, ui, repo, patch, diffopts, buffered):
588 self.ui = ui
588 self.ui = ui
589 self.repo = repo
589 self.repo = repo
590 self.buffered = buffered
590 self.buffered = buffered
591 self.patch = patch
591 self.patch = patch
592 self.diffopts = diffopts
592 self.diffopts = diffopts
593 self.header = {}
593 self.header = {}
594 self.hunk = {}
594 self.hunk = {}
595 self.lastheader = None
595 self.lastheader = None
596
596
597 def flush(self, rev):
597 def flush(self, rev):
598 if rev in self.header:
598 if rev in self.header:
599 h = self.header[rev]
599 h = self.header[rev]
600 if h != self.lastheader:
600 if h != self.lastheader:
601 self.lastheader = h
601 self.lastheader = h
602 self.ui.write(h)
602 self.ui.write(h)
603 del self.header[rev]
603 del self.header[rev]
604 if rev in self.hunk:
604 if rev in self.hunk:
605 self.ui.write(self.hunk[rev])
605 self.ui.write(self.hunk[rev])
606 del self.hunk[rev]
606 del self.hunk[rev]
607 return 1
607 return 1
608 return 0
608 return 0
609
609
610 def show(self, ctx, copies=(), **props):
610 def show(self, ctx, copies=(), **props):
611 if self.buffered:
611 if self.buffered:
612 self.ui.pushbuffer()
612 self.ui.pushbuffer()
613 self._show(ctx, copies, props)
613 self._show(ctx, copies, props)
614 self.hunk[ctx.rev()] = self.ui.popbuffer()
614 self.hunk[ctx.rev()] = self.ui.popbuffer()
615 else:
615 else:
616 self._show(ctx, copies, props)
616 self._show(ctx, copies, props)
617
617
618 def _show(self, ctx, copies, props):
618 def _show(self, ctx, copies, props):
619 '''show a single changeset or file revision'''
619 '''show a single changeset or file revision'''
620 changenode = ctx.node()
620 changenode = ctx.node()
621 rev = ctx.rev()
621 rev = ctx.rev()
622
622
623 if self.ui.quiet:
623 if self.ui.quiet:
624 self.ui.write("%d:%s\n" % (rev, short(changenode)))
624 self.ui.write("%d:%s\n" % (rev, short(changenode)))
625 return
625 return
626
626
627 log = self.repo.changelog
627 log = self.repo.changelog
628 changes = log.read(changenode)
628 changes = log.read(changenode)
629 date = util.datestr(changes[2])
629 date = util.datestr(changes[2])
630 extra = changes[5]
630 extra = changes[5]
631 branch = extra.get("branch")
631 branch = extra.get("branch")
632
632
633 hexfunc = self.ui.debugflag and hex or short
633 hexfunc = self.ui.debugflag and hex or short
634
634
635 parents = [(p, hexfunc(log.node(p)))
635 parents = [(p, hexfunc(log.node(p)))
636 for p in self._meaningful_parentrevs(log, rev)]
636 for p in self._meaningful_parentrevs(log, rev)]
637
637
638 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
638 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
639
639
640 # don't show the default branch name
640 # don't show the default branch name
641 if branch != 'default':
641 if branch != 'default':
642 branch = encoding.tolocal(branch)
642 branch = encoding.tolocal(branch)
643 self.ui.write(_("branch: %s\n") % branch)
643 self.ui.write(_("branch: %s\n") % branch)
644 for tag in self.repo.nodetags(changenode):
644 for tag in self.repo.nodetags(changenode):
645 self.ui.write(_("tag: %s\n") % tag)
645 self.ui.write(_("tag: %s\n") % tag)
646 for parent in parents:
646 for parent in parents:
647 self.ui.write(_("parent: %d:%s\n") % parent)
647 self.ui.write(_("parent: %d:%s\n") % parent)
648
648
649 if self.ui.debugflag:
649 if self.ui.debugflag:
650 self.ui.write(_("manifest: %d:%s\n") %
650 self.ui.write(_("manifest: %d:%s\n") %
651 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
651 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
652 self.ui.write(_("user: %s\n") % changes[1])
652 self.ui.write(_("user: %s\n") % changes[1])
653 self.ui.write(_("date: %s\n") % date)
653 self.ui.write(_("date: %s\n") % date)
654
654
655 if self.ui.debugflag:
655 if self.ui.debugflag:
656 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
656 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
657 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
657 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
658 files):
658 files):
659 if value:
659 if value:
660 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
660 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
661 elif changes[3] and self.ui.verbose:
661 elif changes[3] and self.ui.verbose:
662 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
662 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
663 if copies and self.ui.verbose:
663 if copies and self.ui.verbose:
664 copies = ['%s (%s)' % c for c in copies]
664 copies = ['%s (%s)' % c for c in copies]
665 self.ui.write(_("copies: %s\n") % ' '.join(copies))
665 self.ui.write(_("copies: %s\n") % ' '.join(copies))
666
666
667 if extra and self.ui.debugflag:
667 if extra and self.ui.debugflag:
668 for key, value in sorted(extra.items()):
668 for key, value in sorted(extra.items()):
669 self.ui.write(_("extra: %s=%s\n")
669 self.ui.write(_("extra: %s=%s\n")
670 % (key, value.encode('string_escape')))
670 % (key, value.encode('string_escape')))
671
671
672 description = changes[4].strip()
672 description = changes[4].strip()
673 if description:
673 if description:
674 if self.ui.verbose:
674 if self.ui.verbose:
675 self.ui.write(_("description:\n"))
675 self.ui.write(_("description:\n"))
676 self.ui.write(description)
676 self.ui.write(description)
677 self.ui.write("\n\n")
677 self.ui.write("\n\n")
678 else:
678 else:
679 self.ui.write(_("summary: %s\n") %
679 self.ui.write(_("summary: %s\n") %
680 description.splitlines()[0])
680 description.splitlines()[0])
681 self.ui.write("\n")
681 self.ui.write("\n")
682
682
683 self.showpatch(changenode)
683 self.showpatch(changenode)
684
684
685 def showpatch(self, node):
685 def showpatch(self, node):
686 if self.patch:
686 if self.patch:
687 prev = self.repo.changelog.parents(node)[0]
687 prev = self.repo.changelog.parents(node)[0]
688 chunks = patch.diff(self.repo, prev, node, match=self.patch,
688 chunks = patch.diff(self.repo, prev, node, match=self.patch,
689 opts=patch.diffopts(self.ui, self.diffopts))
689 opts=patch.diffopts(self.ui, self.diffopts))
690 for chunk in chunks:
690 for chunk in chunks:
691 self.ui.write(chunk)
691 self.ui.write(chunk)
692 self.ui.write("\n")
692 self.ui.write("\n")
693
693
694 def _meaningful_parentrevs(self, log, rev):
694 def _meaningful_parentrevs(self, log, rev):
695 """Return list of meaningful (or all if debug) parentrevs for rev.
695 """Return list of meaningful (or all if debug) parentrevs for rev.
696
696
697 For merges (two non-nullrev revisions) both parents are meaningful.
697 For merges (two non-nullrev revisions) both parents are meaningful.
698 Otherwise the first parent revision is considered meaningful if it
698 Otherwise the first parent revision is considered meaningful if it
699 is not the preceding revision.
699 is not the preceding revision.
700 """
700 """
701 parents = log.parentrevs(rev)
701 parents = log.parentrevs(rev)
702 if not self.ui.debugflag and parents[1] == nullrev:
702 if not self.ui.debugflag and parents[1] == nullrev:
703 if parents[0] >= rev - 1:
703 if parents[0] >= rev - 1:
704 parents = []
704 parents = []
705 else:
705 else:
706 parents = [parents[0]]
706 parents = [parents[0]]
707 return parents
707 return parents
708
708
709
709
710 class changeset_templater(changeset_printer):
710 class changeset_templater(changeset_printer):
711 '''format changeset information.'''
711 '''format changeset information.'''
712
712
713 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
713 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
714 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
714 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
715 filters = templatefilters.filters.copy()
715 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
716 filters['formatnode'] = (ui.debugflag and (lambda x: x)
716 self.t = templater.templater(mapfile, {'formatnode': formatnode},
717 or (lambda x: x[:12]))
718 self.t = templater.templater(mapfile, filters,
719 cache={
717 cache={
720 'parent': '{rev}:{node|formatnode} ',
718 'parent': '{rev}:{node|formatnode} ',
721 'manifest': '{rev}:{node|formatnode}',
719 'manifest': '{rev}:{node|formatnode}',
722 'filecopy': '{name} ({source})'})
720 'filecopy': '{name} ({source})'})
723
721
724 def use_template(self, t):
722 def use_template(self, t):
725 '''set template string to use'''
723 '''set template string to use'''
726 self.t.cache['changeset'] = t
724 self.t.cache['changeset'] = t
727
725
728 def _meaningful_parentrevs(self, ctx):
726 def _meaningful_parentrevs(self, ctx):
729 """Return list of meaningful (or all if debug) parentrevs for rev.
727 """Return list of meaningful (or all if debug) parentrevs for rev.
730 """
728 """
731 parents = ctx.parents()
729 parents = ctx.parents()
732 if len(parents) > 1:
730 if len(parents) > 1:
733 return parents
731 return parents
734 if self.ui.debugflag:
732 if self.ui.debugflag:
735 return [parents[0], self.repo['null']]
733 return [parents[0], self.repo['null']]
736 if parents[0].rev() >= ctx.rev() - 1:
734 if parents[0].rev() >= ctx.rev() - 1:
737 return []
735 return []
738 return parents
736 return parents
739
737
740 def _show(self, ctx, copies, props):
738 def _show(self, ctx, copies, props):
741 '''show a single changeset or file revision'''
739 '''show a single changeset or file revision'''
742
740
743 def showlist(name, values, plural=None, **args):
741 def showlist(name, values, plural=None, **args):
744 '''expand set of values.
742 '''expand set of values.
745 name is name of key in template map.
743 name is name of key in template map.
746 values is list of strings or dicts.
744 values is list of strings or dicts.
747 plural is plural of name, if not simply name + 's'.
745 plural is plural of name, if not simply name + 's'.
748
746
749 expansion works like this, given name 'foo'.
747 expansion works like this, given name 'foo'.
750
748
751 if values is empty, expand 'no_foos'.
749 if values is empty, expand 'no_foos'.
752
750
753 if 'foo' not in template map, return values as a string,
751 if 'foo' not in template map, return values as a string,
754 joined by space.
752 joined by space.
755
753
756 expand 'start_foos'.
754 expand 'start_foos'.
757
755
758 for each value, expand 'foo'. if 'last_foo' in template
756 for each value, expand 'foo'. if 'last_foo' in template
759 map, expand it instead of 'foo' for last key.
757 map, expand it instead of 'foo' for last key.
760
758
761 expand 'end_foos'.
759 expand 'end_foos'.
762 '''
760 '''
763 if plural: names = plural
761 if plural: names = plural
764 else: names = name + 's'
762 else: names = name + 's'
765 if not values:
763 if not values:
766 noname = 'no_' + names
764 noname = 'no_' + names
767 if noname in self.t:
765 if noname in self.t:
768 yield self.t(noname, **args)
766 yield self.t(noname, **args)
769 return
767 return
770 if name not in self.t:
768 if name not in self.t:
771 if isinstance(values[0], str):
769 if isinstance(values[0], str):
772 yield ' '.join(values)
770 yield ' '.join(values)
773 else:
771 else:
774 for v in values:
772 for v in values:
775 yield dict(v, **args)
773 yield dict(v, **args)
776 return
774 return
777 startname = 'start_' + names
775 startname = 'start_' + names
778 if startname in self.t:
776 if startname in self.t:
779 yield self.t(startname, **args)
777 yield self.t(startname, **args)
780 vargs = args.copy()
778 vargs = args.copy()
781 def one(v, tag=name):
779 def one(v, tag=name):
782 try:
780 try:
783 vargs.update(v)
781 vargs.update(v)
784 except (AttributeError, ValueError):
782 except (AttributeError, ValueError):
785 try:
783 try:
786 for a, b in v:
784 for a, b in v:
787 vargs[a] = b
785 vargs[a] = b
788 except ValueError:
786 except ValueError:
789 vargs[name] = v
787 vargs[name] = v
790 return self.t(tag, **vargs)
788 return self.t(tag, **vargs)
791 lastname = 'last_' + name
789 lastname = 'last_' + name
792 if lastname in self.t:
790 if lastname in self.t:
793 last = values.pop()
791 last = values.pop()
794 else:
792 else:
795 last = None
793 last = None
796 for v in values:
794 for v in values:
797 yield one(v)
795 yield one(v)
798 if last is not None:
796 if last is not None:
799 yield one(last, tag=lastname)
797 yield one(last, tag=lastname)
800 endname = 'end_' + names
798 endname = 'end_' + names
801 if endname in self.t:
799 if endname in self.t:
802 yield self.t(endname, **args)
800 yield self.t(endname, **args)
803
801
804 def showbranches(**args):
802 def showbranches(**args):
805 branch = ctx.branch()
803 branch = ctx.branch()
806 if branch != 'default':
804 if branch != 'default':
807 branch = encoding.tolocal(branch)
805 branch = encoding.tolocal(branch)
808 return showlist('branch', [branch], plural='branches', **args)
806 return showlist('branch', [branch], plural='branches', **args)
809
807
810 def showparents(**args):
808 def showparents(**args):
811 parents = [[('rev', p.rev()), ('node', p.hex())]
809 parents = [[('rev', p.rev()), ('node', p.hex())]
812 for p in self._meaningful_parentrevs(ctx)]
810 for p in self._meaningful_parentrevs(ctx)]
813 return showlist('parent', parents, **args)
811 return showlist('parent', parents, **args)
814
812
815 def showtags(**args):
813 def showtags(**args):
816 return showlist('tag', ctx.tags(), **args)
814 return showlist('tag', ctx.tags(), **args)
817
815
818 def showextras(**args):
816 def showextras(**args):
819 for key, value in sorted(ctx.extra().items()):
817 for key, value in sorted(ctx.extra().items()):
820 args = args.copy()
818 args = args.copy()
821 args.update(dict(key=key, value=value))
819 args.update(dict(key=key, value=value))
822 yield self.t('extra', **args)
820 yield self.t('extra', **args)
823
821
824 def showcopies(**args):
822 def showcopies(**args):
825 c = [{'name': x[0], 'source': x[1]} for x in copies]
823 c = [{'name': x[0], 'source': x[1]} for x in copies]
826 return showlist('file_copy', c, plural='file_copies', **args)
824 return showlist('file_copy', c, plural='file_copies', **args)
827
825
828 files = []
826 files = []
829 def getfiles():
827 def getfiles():
830 if not files:
828 if not files:
831 files[:] = self.repo.status(ctx.parents()[0].node(),
829 files[:] = self.repo.status(ctx.parents()[0].node(),
832 ctx.node())[:3]
830 ctx.node())[:3]
833 return files
831 return files
834 def showfiles(**args):
832 def showfiles(**args):
835 return showlist('file', ctx.files(), **args)
833 return showlist('file', ctx.files(), **args)
836 def showmods(**args):
834 def showmods(**args):
837 return showlist('file_mod', getfiles()[0], **args)
835 return showlist('file_mod', getfiles()[0], **args)
838 def showadds(**args):
836 def showadds(**args):
839 return showlist('file_add', getfiles()[1], **args)
837 return showlist('file_add', getfiles()[1], **args)
840 def showdels(**args):
838 def showdels(**args):
841 return showlist('file_del', getfiles()[2], **args)
839 return showlist('file_del', getfiles()[2], **args)
842 def showmanifest(**args):
840 def showmanifest(**args):
843 args = args.copy()
841 args = args.copy()
844 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
842 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
845 node=hex(ctx.changeset()[0])))
843 node=hex(ctx.changeset()[0])))
846 return self.t('manifest', **args)
844 return self.t('manifest', **args)
847
845
848 def showdiffstat(**args):
846 def showdiffstat(**args):
849 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
847 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
850 files, adds, removes = 0, 0, 0
848 files, adds, removes = 0, 0, 0
851 for i in patch.diffstatdata(util.iterlines(diff)):
849 for i in patch.diffstatdata(util.iterlines(diff)):
852 files += 1
850 files += 1
853 adds += i[1]
851 adds += i[1]
854 removes += i[2]
852 removes += i[2]
855 return '%s: +%s/-%s' % (files, adds, removes)
853 return '%s: +%s/-%s' % (files, adds, removes)
856
854
857 defprops = {
855 defprops = {
858 'author': ctx.user(),
856 'author': ctx.user(),
859 'branches': showbranches,
857 'branches': showbranches,
860 'date': ctx.date(),
858 'date': ctx.date(),
861 'desc': ctx.description().strip(),
859 'desc': ctx.description().strip(),
862 'file_adds': showadds,
860 'file_adds': showadds,
863 'file_dels': showdels,
861 'file_dels': showdels,
864 'file_mods': showmods,
862 'file_mods': showmods,
865 'files': showfiles,
863 'files': showfiles,
866 'file_copies': showcopies,
864 'file_copies': showcopies,
867 'manifest': showmanifest,
865 'manifest': showmanifest,
868 'node': ctx.hex(),
866 'node': ctx.hex(),
869 'parents': showparents,
867 'parents': showparents,
870 'rev': ctx.rev(),
868 'rev': ctx.rev(),
871 'tags': showtags,
869 'tags': showtags,
872 'extras': showextras,
870 'extras': showextras,
873 'diffstat': showdiffstat,
871 'diffstat': showdiffstat,
874 }
872 }
875 props = props.copy()
873 props = props.copy()
876 props.update(defprops)
874 props.update(defprops)
877
875
878 # find correct templates for current mode
876 # find correct templates for current mode
879
877
880 tmplmodes = [
878 tmplmodes = [
881 (True, None),
879 (True, None),
882 (self.ui.verbose, 'verbose'),
880 (self.ui.verbose, 'verbose'),
883 (self.ui.quiet, 'quiet'),
881 (self.ui.quiet, 'quiet'),
884 (self.ui.debugflag, 'debug'),
882 (self.ui.debugflag, 'debug'),
885 ]
883 ]
886
884
887 types = {'header': '', 'changeset': 'changeset'}
885 types = {'header': '', 'changeset': 'changeset'}
888 for mode, postfix in tmplmodes:
886 for mode, postfix in tmplmodes:
889 for type in types:
887 for type in types:
890 cur = postfix and ('%s_%s' % (type, postfix)) or type
888 cur = postfix and ('%s_%s' % (type, postfix)) or type
891 if mode and cur in self.t:
889 if mode and cur in self.t:
892 types[type] = cur
890 types[type] = cur
893
891
894 try:
892 try:
895
893
896 # write header
894 # write header
897 if types['header']:
895 if types['header']:
898 h = templater.stringify(self.t(types['header'], **props))
896 h = templater.stringify(self.t(types['header'], **props))
899 if self.buffered:
897 if self.buffered:
900 self.header[ctx.rev()] = h
898 self.header[ctx.rev()] = h
901 else:
899 else:
902 self.ui.write(h)
900 self.ui.write(h)
903
901
904 # write changeset metadata, then patch if requested
902 # write changeset metadata, then patch if requested
905 key = types['changeset']
903 key = types['changeset']
906 self.ui.write(templater.stringify(self.t(key, **props)))
904 self.ui.write(templater.stringify(self.t(key, **props)))
907 self.showpatch(ctx.node())
905 self.showpatch(ctx.node())
908
906
909 except KeyError, inst:
907 except KeyError, inst:
910 msg = _("%s: no key named '%s'")
908 msg = _("%s: no key named '%s'")
911 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
909 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
912 except SyntaxError, inst:
910 except SyntaxError, inst:
913 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
911 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
914
912
915 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
913 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
916 """show one changeset using template or regular display.
914 """show one changeset using template or regular display.
917
915
918 Display format will be the first non-empty hit of:
916 Display format will be the first non-empty hit of:
919 1. option 'template'
917 1. option 'template'
920 2. option 'style'
918 2. option 'style'
921 3. [ui] setting 'logtemplate'
919 3. [ui] setting 'logtemplate'
922 4. [ui] setting 'style'
920 4. [ui] setting 'style'
923 If all of these values are either the unset or the empty string,
921 If all of these values are either the unset or the empty string,
924 regular display via changeset_printer() is done.
922 regular display via changeset_printer() is done.
925 """
923 """
926 # options
924 # options
927 patch = False
925 patch = False
928 if opts.get('patch'):
926 if opts.get('patch'):
929 patch = matchfn or matchall(repo)
927 patch = matchfn or matchall(repo)
930
928
931 tmpl = opts.get('template')
929 tmpl = opts.get('template')
932 style = None
930 style = None
933 if tmpl:
931 if tmpl:
934 tmpl = templater.parsestring(tmpl, quoted=False)
932 tmpl = templater.parsestring(tmpl, quoted=False)
935 else:
933 else:
936 style = opts.get('style')
934 style = opts.get('style')
937
935
938 # ui settings
936 # ui settings
939 if not (tmpl or style):
937 if not (tmpl or style):
940 tmpl = ui.config('ui', 'logtemplate')
938 tmpl = ui.config('ui', 'logtemplate')
941 if tmpl:
939 if tmpl:
942 tmpl = templater.parsestring(tmpl)
940 tmpl = templater.parsestring(tmpl)
943 else:
941 else:
944 style = ui.config('ui', 'style')
942 style = ui.config('ui', 'style')
945
943
946 if not (tmpl or style):
944 if not (tmpl or style):
947 return changeset_printer(ui, repo, patch, opts, buffered)
945 return changeset_printer(ui, repo, patch, opts, buffered)
948
946
949 mapfile = None
947 mapfile = None
950 if style and not tmpl:
948 if style and not tmpl:
951 mapfile = style
949 mapfile = style
952 if not os.path.split(mapfile)[0]:
950 if not os.path.split(mapfile)[0]:
953 mapname = (templater.templatepath('map-cmdline.' + mapfile)
951 mapname = (templater.templatepath('map-cmdline.' + mapfile)
954 or templater.templatepath(mapfile))
952 or templater.templatepath(mapfile))
955 if mapname: mapfile = mapname
953 if mapname: mapfile = mapname
956
954
957 try:
955 try:
958 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
956 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
959 except SyntaxError, inst:
957 except SyntaxError, inst:
960 raise util.Abort(inst.args[0])
958 raise util.Abort(inst.args[0])
961 if tmpl: t.use_template(tmpl)
959 if tmpl: t.use_template(tmpl)
962 return t
960 return t
963
961
964 def finddate(ui, repo, date):
962 def finddate(ui, repo, date):
965 """Find the tipmost changeset that matches the given date spec"""
963 """Find the tipmost changeset that matches the given date spec"""
966 df = util.matchdate(date)
964 df = util.matchdate(date)
967 get = util.cachefunc(lambda r: repo[r].changeset())
965 get = util.cachefunc(lambda r: repo[r].changeset())
968 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
966 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
969 results = {}
967 results = {}
970 for st, rev, fns in changeiter:
968 for st, rev, fns in changeiter:
971 if st == 'add':
969 if st == 'add':
972 d = get(rev)[2]
970 d = get(rev)[2]
973 if df(d[0]):
971 if df(d[0]):
974 results[rev] = d
972 results[rev] = d
975 elif st == 'iter':
973 elif st == 'iter':
976 if rev in results:
974 if rev in results:
977 ui.status(_("Found revision %s from %s\n") %
975 ui.status(_("Found revision %s from %s\n") %
978 (rev, util.datestr(results[rev])))
976 (rev, util.datestr(results[rev])))
979 return str(rev)
977 return str(rev)
980
978
981 raise util.Abort(_("revision matching date not found"))
979 raise util.Abort(_("revision matching date not found"))
982
980
983 def walkchangerevs(ui, repo, pats, change, opts):
981 def walkchangerevs(ui, repo, pats, change, opts):
984 '''Iterate over files and the revs in which they changed.
982 '''Iterate over files and the revs in which they changed.
985
983
986 Callers most commonly need to iterate backwards over the history
984 Callers most commonly need to iterate backwards over the history
987 in which they are interested. Doing so has awful (quadratic-looking)
985 in which they are interested. Doing so has awful (quadratic-looking)
988 performance, so we use iterators in a "windowed" way.
986 performance, so we use iterators in a "windowed" way.
989
987
990 We walk a window of revisions in the desired order. Within the
988 We walk a window of revisions in the desired order. Within the
991 window, we first walk forwards to gather data, then in the desired
989 window, we first walk forwards to gather data, then in the desired
992 order (usually backwards) to display it.
990 order (usually backwards) to display it.
993
991
994 This function returns an (iterator, matchfn) tuple. The iterator
992 This function returns an (iterator, matchfn) tuple. The iterator
995 yields 3-tuples. They will be of one of the following forms:
993 yields 3-tuples. They will be of one of the following forms:
996
994
997 "window", incrementing, lastrev: stepping through a window,
995 "window", incrementing, lastrev: stepping through a window,
998 positive if walking forwards through revs, last rev in the
996 positive if walking forwards through revs, last rev in the
999 sequence iterated over - use to reset state for the current window
997 sequence iterated over - use to reset state for the current window
1000
998
1001 "add", rev, fns: out-of-order traversal of the given file names
999 "add", rev, fns: out-of-order traversal of the given file names
1002 fns, which changed during revision rev - use to gather data for
1000 fns, which changed during revision rev - use to gather data for
1003 possible display
1001 possible display
1004
1002
1005 "iter", rev, None: in-order traversal of the revs earlier iterated
1003 "iter", rev, None: in-order traversal of the revs earlier iterated
1006 over with "add" - use to display data'''
1004 over with "add" - use to display data'''
1007
1005
1008 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1006 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1009 if start < end:
1007 if start < end:
1010 while start < end:
1008 while start < end:
1011 yield start, min(windowsize, end-start)
1009 yield start, min(windowsize, end-start)
1012 start += windowsize
1010 start += windowsize
1013 if windowsize < sizelimit:
1011 if windowsize < sizelimit:
1014 windowsize *= 2
1012 windowsize *= 2
1015 else:
1013 else:
1016 while start > end:
1014 while start > end:
1017 yield start, min(windowsize, start-end-1)
1015 yield start, min(windowsize, start-end-1)
1018 start -= windowsize
1016 start -= windowsize
1019 if windowsize < sizelimit:
1017 if windowsize < sizelimit:
1020 windowsize *= 2
1018 windowsize *= 2
1021
1019
1022 m = match(repo, pats, opts)
1020 m = match(repo, pats, opts)
1023 follow = opts.get('follow') or opts.get('follow_first')
1021 follow = opts.get('follow') or opts.get('follow_first')
1024
1022
1025 if not len(repo):
1023 if not len(repo):
1026 return [], m
1024 return [], m
1027
1025
1028 if follow:
1026 if follow:
1029 defrange = '%s:0' % repo['.'].rev()
1027 defrange = '%s:0' % repo['.'].rev()
1030 else:
1028 else:
1031 defrange = '-1:0'
1029 defrange = '-1:0'
1032 revs = revrange(repo, opts['rev'] or [defrange])
1030 revs = revrange(repo, opts['rev'] or [defrange])
1033 wanted = set()
1031 wanted = set()
1034 slowpath = m.anypats() or (m.files() and opts.get('removed'))
1032 slowpath = m.anypats() or (m.files() and opts.get('removed'))
1035 fncache = {}
1033 fncache = {}
1036
1034
1037 if not slowpath and not m.files():
1035 if not slowpath and not m.files():
1038 # No files, no patterns. Display all revs.
1036 # No files, no patterns. Display all revs.
1039 wanted = set(revs)
1037 wanted = set(revs)
1040 copies = []
1038 copies = []
1041 if not slowpath:
1039 if not slowpath:
1042 # Only files, no patterns. Check the history of each file.
1040 # Only files, no patterns. Check the history of each file.
1043 def filerevgen(filelog, node):
1041 def filerevgen(filelog, node):
1044 cl_count = len(repo)
1042 cl_count = len(repo)
1045 if node is None:
1043 if node is None:
1046 last = len(filelog) - 1
1044 last = len(filelog) - 1
1047 else:
1045 else:
1048 last = filelog.rev(node)
1046 last = filelog.rev(node)
1049 for i, window in increasing_windows(last, nullrev):
1047 for i, window in increasing_windows(last, nullrev):
1050 revs = []
1048 revs = []
1051 for j in xrange(i - window, i + 1):
1049 for j in xrange(i - window, i + 1):
1052 n = filelog.node(j)
1050 n = filelog.node(j)
1053 revs.append((filelog.linkrev(j),
1051 revs.append((filelog.linkrev(j),
1054 follow and filelog.renamed(n)))
1052 follow and filelog.renamed(n)))
1055 for rev in reversed(revs):
1053 for rev in reversed(revs):
1056 # only yield rev for which we have the changelog, it can
1054 # only yield rev for which we have the changelog, it can
1057 # happen while doing "hg log" during a pull or commit
1055 # happen while doing "hg log" during a pull or commit
1058 if rev[0] < cl_count:
1056 if rev[0] < cl_count:
1059 yield rev
1057 yield rev
1060 def iterfiles():
1058 def iterfiles():
1061 for filename in m.files():
1059 for filename in m.files():
1062 yield filename, None
1060 yield filename, None
1063 for filename_node in copies:
1061 for filename_node in copies:
1064 yield filename_node
1062 yield filename_node
1065 minrev, maxrev = min(revs), max(revs)
1063 minrev, maxrev = min(revs), max(revs)
1066 for file_, node in iterfiles():
1064 for file_, node in iterfiles():
1067 filelog = repo.file(file_)
1065 filelog = repo.file(file_)
1068 if not len(filelog):
1066 if not len(filelog):
1069 if node is None:
1067 if node is None:
1070 # A zero count may be a directory or deleted file, so
1068 # A zero count may be a directory or deleted file, so
1071 # try to find matching entries on the slow path.
1069 # try to find matching entries on the slow path.
1072 if follow:
1070 if follow:
1073 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1071 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1074 slowpath = True
1072 slowpath = True
1075 break
1073 break
1076 else:
1074 else:
1077 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1075 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1078 % (file_, short(node)))
1076 % (file_, short(node)))
1079 continue
1077 continue
1080 for rev, copied in filerevgen(filelog, node):
1078 for rev, copied in filerevgen(filelog, node):
1081 if rev <= maxrev:
1079 if rev <= maxrev:
1082 if rev < minrev:
1080 if rev < minrev:
1083 break
1081 break
1084 fncache.setdefault(rev, [])
1082 fncache.setdefault(rev, [])
1085 fncache[rev].append(file_)
1083 fncache[rev].append(file_)
1086 wanted.add(rev)
1084 wanted.add(rev)
1087 if follow and copied:
1085 if follow and copied:
1088 copies.append(copied)
1086 copies.append(copied)
1089 if slowpath:
1087 if slowpath:
1090 if follow:
1088 if follow:
1091 raise util.Abort(_('can only follow copies/renames for explicit '
1089 raise util.Abort(_('can only follow copies/renames for explicit '
1092 'file names'))
1090 'file names'))
1093
1091
1094 # The slow path checks files modified in every changeset.
1092 # The slow path checks files modified in every changeset.
1095 def changerevgen():
1093 def changerevgen():
1096 for i, window in increasing_windows(len(repo) - 1, nullrev):
1094 for i, window in increasing_windows(len(repo) - 1, nullrev):
1097 for j in xrange(i - window, i + 1):
1095 for j in xrange(i - window, i + 1):
1098 yield j, change(j)[3]
1096 yield j, change(j)[3]
1099
1097
1100 for rev, changefiles in changerevgen():
1098 for rev, changefiles in changerevgen():
1101 matches = filter(m, changefiles)
1099 matches = filter(m, changefiles)
1102 if matches:
1100 if matches:
1103 fncache[rev] = matches
1101 fncache[rev] = matches
1104 wanted.add(rev)
1102 wanted.add(rev)
1105
1103
1106 class followfilter:
1104 class followfilter:
1107 def __init__(self, onlyfirst=False):
1105 def __init__(self, onlyfirst=False):
1108 self.startrev = nullrev
1106 self.startrev = nullrev
1109 self.roots = []
1107 self.roots = []
1110 self.onlyfirst = onlyfirst
1108 self.onlyfirst = onlyfirst
1111
1109
1112 def match(self, rev):
1110 def match(self, rev):
1113 def realparents(rev):
1111 def realparents(rev):
1114 if self.onlyfirst:
1112 if self.onlyfirst:
1115 return repo.changelog.parentrevs(rev)[0:1]
1113 return repo.changelog.parentrevs(rev)[0:1]
1116 else:
1114 else:
1117 return filter(lambda x: x != nullrev,
1115 return filter(lambda x: x != nullrev,
1118 repo.changelog.parentrevs(rev))
1116 repo.changelog.parentrevs(rev))
1119
1117
1120 if self.startrev == nullrev:
1118 if self.startrev == nullrev:
1121 self.startrev = rev
1119 self.startrev = rev
1122 return True
1120 return True
1123
1121
1124 if rev > self.startrev:
1122 if rev > self.startrev:
1125 # forward: all descendants
1123 # forward: all descendants
1126 if not self.roots:
1124 if not self.roots:
1127 self.roots.append(self.startrev)
1125 self.roots.append(self.startrev)
1128 for parent in realparents(rev):
1126 for parent in realparents(rev):
1129 if parent in self.roots:
1127 if parent in self.roots:
1130 self.roots.append(rev)
1128 self.roots.append(rev)
1131 return True
1129 return True
1132 else:
1130 else:
1133 # backwards: all parents
1131 # backwards: all parents
1134 if not self.roots:
1132 if not self.roots:
1135 self.roots.extend(realparents(self.startrev))
1133 self.roots.extend(realparents(self.startrev))
1136 if rev in self.roots:
1134 if rev in self.roots:
1137 self.roots.remove(rev)
1135 self.roots.remove(rev)
1138 self.roots.extend(realparents(rev))
1136 self.roots.extend(realparents(rev))
1139 return True
1137 return True
1140
1138
1141 return False
1139 return False
1142
1140
1143 # it might be worthwhile to do this in the iterator if the rev range
1141 # it might be worthwhile to do this in the iterator if the rev range
1144 # is descending and the prune args are all within that range
1142 # is descending and the prune args are all within that range
1145 for rev in opts.get('prune', ()):
1143 for rev in opts.get('prune', ()):
1146 rev = repo.changelog.rev(repo.lookup(rev))
1144 rev = repo.changelog.rev(repo.lookup(rev))
1147 ff = followfilter()
1145 ff = followfilter()
1148 stop = min(revs[0], revs[-1])
1146 stop = min(revs[0], revs[-1])
1149 for x in xrange(rev, stop-1, -1):
1147 for x in xrange(rev, stop-1, -1):
1150 if ff.match(x):
1148 if ff.match(x):
1151 wanted.discard(x)
1149 wanted.discard(x)
1152
1150
1153 def iterate():
1151 def iterate():
1154 if follow and not m.files():
1152 if follow and not m.files():
1155 ff = followfilter(onlyfirst=opts.get('follow_first'))
1153 ff = followfilter(onlyfirst=opts.get('follow_first'))
1156 def want(rev):
1154 def want(rev):
1157 return ff.match(rev) and rev in wanted
1155 return ff.match(rev) and rev in wanted
1158 else:
1156 else:
1159 def want(rev):
1157 def want(rev):
1160 return rev in wanted
1158 return rev in wanted
1161
1159
1162 for i, window in increasing_windows(0, len(revs)):
1160 for i, window in increasing_windows(0, len(revs)):
1163 yield 'window', revs[0] < revs[-1], revs[-1]
1161 yield 'window', revs[0] < revs[-1], revs[-1]
1164 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1162 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1165 for rev in sorted(nrevs):
1163 for rev in sorted(nrevs):
1166 fns = fncache.get(rev)
1164 fns = fncache.get(rev)
1167 if not fns:
1165 if not fns:
1168 def fns_generator():
1166 def fns_generator():
1169 for f in change(rev)[3]:
1167 for f in change(rev)[3]:
1170 if m(f):
1168 if m(f):
1171 yield f
1169 yield f
1172 fns = fns_generator()
1170 fns = fns_generator()
1173 yield 'add', rev, fns
1171 yield 'add', rev, fns
1174 for rev in nrevs:
1172 for rev in nrevs:
1175 yield 'iter', rev, None
1173 yield 'iter', rev, None
1176 return iterate(), m
1174 return iterate(), m
1177
1175
1178 def commit(ui, repo, commitfunc, pats, opts):
1176 def commit(ui, repo, commitfunc, pats, opts):
1179 '''commit the specified files or all outstanding changes'''
1177 '''commit the specified files or all outstanding changes'''
1180 date = opts.get('date')
1178 date = opts.get('date')
1181 if date:
1179 if date:
1182 opts['date'] = util.parsedate(date)
1180 opts['date'] = util.parsedate(date)
1183 message = logmessage(opts)
1181 message = logmessage(opts)
1184
1182
1185 # extract addremove carefully -- this function can be called from a command
1183 # extract addremove carefully -- this function can be called from a command
1186 # that doesn't support addremove
1184 # that doesn't support addremove
1187 if opts.get('addremove'):
1185 if opts.get('addremove'):
1188 addremove(repo, pats, opts)
1186 addremove(repo, pats, opts)
1189
1187
1190 m = match(repo, pats, opts)
1188 m = match(repo, pats, opts)
1191 if pats:
1189 if pats:
1192 modified, added, removed = repo.status(match=m)[:3]
1190 modified, added, removed = repo.status(match=m)[:3]
1193 files = sorted(modified + added + removed)
1191 files = sorted(modified + added + removed)
1194
1192
1195 def is_dir(f):
1193 def is_dir(f):
1196 name = f + '/'
1194 name = f + '/'
1197 i = bisect.bisect(files, name)
1195 i = bisect.bisect(files, name)
1198 return i < len(files) and files[i].startswith(name)
1196 return i < len(files) and files[i].startswith(name)
1199
1197
1200 for f in m.files():
1198 for f in m.files():
1201 if f == '.':
1199 if f == '.':
1202 continue
1200 continue
1203 if f not in files:
1201 if f not in files:
1204 rf = repo.wjoin(f)
1202 rf = repo.wjoin(f)
1205 rel = repo.pathto(f)
1203 rel = repo.pathto(f)
1206 try:
1204 try:
1207 mode = os.lstat(rf)[stat.ST_MODE]
1205 mode = os.lstat(rf)[stat.ST_MODE]
1208 except OSError:
1206 except OSError:
1209 if is_dir(f): # deleted directory ?
1207 if is_dir(f): # deleted directory ?
1210 continue
1208 continue
1211 raise util.Abort(_("file %s not found!") % rel)
1209 raise util.Abort(_("file %s not found!") % rel)
1212 if stat.S_ISDIR(mode):
1210 if stat.S_ISDIR(mode):
1213 if not is_dir(f):
1211 if not is_dir(f):
1214 raise util.Abort(_("no match under directory %s!")
1212 raise util.Abort(_("no match under directory %s!")
1215 % rel)
1213 % rel)
1216 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1214 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1217 raise util.Abort(_("can't commit %s: "
1215 raise util.Abort(_("can't commit %s: "
1218 "unsupported file type!") % rel)
1216 "unsupported file type!") % rel)
1219 elif f not in repo.dirstate:
1217 elif f not in repo.dirstate:
1220 raise util.Abort(_("file %s not tracked!") % rel)
1218 raise util.Abort(_("file %s not tracked!") % rel)
1221 m = matchfiles(repo, files)
1219 m = matchfiles(repo, files)
1222 try:
1220 try:
1223 return commitfunc(ui, repo, message, m, opts)
1221 return commitfunc(ui, repo, message, m, opts)
1224 except ValueError, inst:
1222 except ValueError, inst:
1225 raise util.Abort(str(inst))
1223 raise util.Abort(str(inst))
@@ -1,312 +1,311 b''
1 # hgweb/hgweb_mod.py - Web interface for a repository.
1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 import os
9 import os
10 from mercurial import ui, hg, util, hook, error, encoding
10 from mercurial import ui, hg, util, hook, error, encoding, templater
11 from mercurial import templater, templatefilters
12 from common import get_mtime, ErrorResponse
11 from common import get_mtime, ErrorResponse
13 from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
12 from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
14 from common import HTTP_UNAUTHORIZED, HTTP_METHOD_NOT_ALLOWED
13 from common import HTTP_UNAUTHORIZED, HTTP_METHOD_NOT_ALLOWED
15 from request import wsgirequest
14 from request import wsgirequest
16 import webcommands, protocol, webutil
15 import webcommands, protocol, webutil
17
16
18 perms = {
17 perms = {
19 'changegroup': 'pull',
18 'changegroup': 'pull',
20 'changegroupsubset': 'pull',
19 'changegroupsubset': 'pull',
21 'unbundle': 'push',
20 'unbundle': 'push',
22 'stream_out': 'pull',
21 'stream_out': 'pull',
23 }
22 }
24
23
25 class hgweb(object):
24 class hgweb(object):
26 def __init__(self, repo, name=None):
25 def __init__(self, repo, name=None):
27 if isinstance(repo, str):
26 if isinstance(repo, str):
28 u = ui.ui()
27 u = ui.ui()
29 u.setconfig('ui', 'report_untrusted', 'off')
28 u.setconfig('ui', 'report_untrusted', 'off')
30 u.setconfig('ui', 'interactive', 'off')
29 u.setconfig('ui', 'interactive', 'off')
31 self.repo = hg.repository(u, repo)
30 self.repo = hg.repository(u, repo)
32 else:
31 else:
33 self.repo = repo
32 self.repo = repo
34
33
35 hook.redirect(True)
34 hook.redirect(True)
36 self.mtime = -1
35 self.mtime = -1
37 self.reponame = name
36 self.reponame = name
38 self.archives = 'zip', 'gz', 'bz2'
37 self.archives = 'zip', 'gz', 'bz2'
39 self.stripecount = 1
38 self.stripecount = 1
40 # a repo owner may set web.templates in .hg/hgrc to get any file
39 # a repo owner may set web.templates in .hg/hgrc to get any file
41 # readable by the user running the CGI script
40 # readable by the user running the CGI script
42 self.templatepath = self.config('web', 'templates')
41 self.templatepath = self.config('web', 'templates')
43
42
44 # The CGI scripts are often run by a user different from the repo owner.
43 # The CGI scripts are often run by a user different from the repo owner.
45 # Trust the settings from the .hg/hgrc files by default.
44 # Trust the settings from the .hg/hgrc files by default.
46 def config(self, section, name, default=None, untrusted=True):
45 def config(self, section, name, default=None, untrusted=True):
47 return self.repo.ui.config(section, name, default,
46 return self.repo.ui.config(section, name, default,
48 untrusted=untrusted)
47 untrusted=untrusted)
49
48
50 def configbool(self, section, name, default=False, untrusted=True):
49 def configbool(self, section, name, default=False, untrusted=True):
51 return self.repo.ui.configbool(section, name, default,
50 return self.repo.ui.configbool(section, name, default,
52 untrusted=untrusted)
51 untrusted=untrusted)
53
52
54 def configlist(self, section, name, default=None, untrusted=True):
53 def configlist(self, section, name, default=None, untrusted=True):
55 return self.repo.ui.configlist(section, name, default,
54 return self.repo.ui.configlist(section, name, default,
56 untrusted=untrusted)
55 untrusted=untrusted)
57
56
58 def refresh(self):
57 def refresh(self):
59 mtime = get_mtime(self.repo.root)
58 mtime = get_mtime(self.repo.root)
60 if mtime != self.mtime:
59 if mtime != self.mtime:
61 self.mtime = mtime
60 self.mtime = mtime
62 self.repo = hg.repository(self.repo.ui, self.repo.root)
61 self.repo = hg.repository(self.repo.ui, self.repo.root)
63 self.maxchanges = int(self.config("web", "maxchanges", 10))
62 self.maxchanges = int(self.config("web", "maxchanges", 10))
64 self.stripecount = int(self.config("web", "stripes", 1))
63 self.stripecount = int(self.config("web", "stripes", 1))
65 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
64 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
66 self.maxfiles = int(self.config("web", "maxfiles", 10))
65 self.maxfiles = int(self.config("web", "maxfiles", 10))
67 self.allowpull = self.configbool("web", "allowpull", True)
66 self.allowpull = self.configbool("web", "allowpull", True)
68 self.encoding = self.config("web", "encoding", encoding.encoding)
67 self.encoding = self.config("web", "encoding", encoding.encoding)
69
68
70 def run(self):
69 def run(self):
71 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
70 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
72 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
71 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
73 import mercurial.hgweb.wsgicgi as wsgicgi
72 import mercurial.hgweb.wsgicgi as wsgicgi
74 wsgicgi.launch(self)
73 wsgicgi.launch(self)
75
74
76 def __call__(self, env, respond):
75 def __call__(self, env, respond):
77 req = wsgirequest(env, respond)
76 req = wsgirequest(env, respond)
78 return self.run_wsgi(req)
77 return self.run_wsgi(req)
79
78
80 def run_wsgi(self, req):
79 def run_wsgi(self, req):
81
80
82 self.refresh()
81 self.refresh()
83
82
84 # process this if it's a protocol request
83 # process this if it's a protocol request
85 # protocol bits don't need to create any URLs
84 # protocol bits don't need to create any URLs
86 # and the clients always use the old URL structure
85 # and the clients always use the old URL structure
87
86
88 cmd = req.form.get('cmd', [''])[0]
87 cmd = req.form.get('cmd', [''])[0]
89 if cmd and cmd in protocol.__all__:
88 if cmd and cmd in protocol.__all__:
90 try:
89 try:
91 if cmd in perms:
90 if cmd in perms:
92 try:
91 try:
93 self.check_perm(req, perms[cmd])
92 self.check_perm(req, perms[cmd])
94 except ErrorResponse, inst:
93 except ErrorResponse, inst:
95 if cmd == 'unbundle':
94 if cmd == 'unbundle':
96 req.drain()
95 req.drain()
97 raise
96 raise
98 method = getattr(protocol, cmd)
97 method = getattr(protocol, cmd)
99 return method(self.repo, req)
98 return method(self.repo, req)
100 except ErrorResponse, inst:
99 except ErrorResponse, inst:
101 req.respond(inst, protocol.HGTYPE)
100 req.respond(inst, protocol.HGTYPE)
102 if not inst.message:
101 if not inst.message:
103 return []
102 return []
104 return '0\n%s\n' % inst.message,
103 return '0\n%s\n' % inst.message,
105
104
106 # work with CGI variables to create coherent structure
105 # work with CGI variables to create coherent structure
107 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
106 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
108
107
109 req.url = req.env['SCRIPT_NAME']
108 req.url = req.env['SCRIPT_NAME']
110 if not req.url.endswith('/'):
109 if not req.url.endswith('/'):
111 req.url += '/'
110 req.url += '/'
112 if 'REPO_NAME' in req.env:
111 if 'REPO_NAME' in req.env:
113 req.url += req.env['REPO_NAME'] + '/'
112 req.url += req.env['REPO_NAME'] + '/'
114
113
115 if 'PATH_INFO' in req.env:
114 if 'PATH_INFO' in req.env:
116 parts = req.env['PATH_INFO'].strip('/').split('/')
115 parts = req.env['PATH_INFO'].strip('/').split('/')
117 repo_parts = req.env.get('REPO_NAME', '').split('/')
116 repo_parts = req.env.get('REPO_NAME', '').split('/')
118 if parts[:len(repo_parts)] == repo_parts:
117 if parts[:len(repo_parts)] == repo_parts:
119 parts = parts[len(repo_parts):]
118 parts = parts[len(repo_parts):]
120 query = '/'.join(parts)
119 query = '/'.join(parts)
121 else:
120 else:
122 query = req.env['QUERY_STRING'].split('&', 1)[0]
121 query = req.env['QUERY_STRING'].split('&', 1)[0]
123 query = query.split(';', 1)[0]
122 query = query.split(';', 1)[0]
124
123
125 # translate user-visible url structure to internal structure
124 # translate user-visible url structure to internal structure
126
125
127 args = query.split('/', 2)
126 args = query.split('/', 2)
128 if 'cmd' not in req.form and args and args[0]:
127 if 'cmd' not in req.form and args and args[0]:
129
128
130 cmd = args.pop(0)
129 cmd = args.pop(0)
131 style = cmd.rfind('-')
130 style = cmd.rfind('-')
132 if style != -1:
131 if style != -1:
133 req.form['style'] = [cmd[:style]]
132 req.form['style'] = [cmd[:style]]
134 cmd = cmd[style+1:]
133 cmd = cmd[style+1:]
135
134
136 # avoid accepting e.g. style parameter as command
135 # avoid accepting e.g. style parameter as command
137 if hasattr(webcommands, cmd):
136 if hasattr(webcommands, cmd):
138 req.form['cmd'] = [cmd]
137 req.form['cmd'] = [cmd]
139 else:
138 else:
140 cmd = ''
139 cmd = ''
141
140
142 if cmd == 'static':
141 if cmd == 'static':
143 req.form['file'] = ['/'.join(args)]
142 req.form['file'] = ['/'.join(args)]
144 else:
143 else:
145 if args and args[0]:
144 if args and args[0]:
146 node = args.pop(0)
145 node = args.pop(0)
147 req.form['node'] = [node]
146 req.form['node'] = [node]
148 if args:
147 if args:
149 req.form['file'] = args
148 req.form['file'] = args
150
149
151 if cmd == 'archive':
150 if cmd == 'archive':
152 fn = req.form['node'][0]
151 fn = req.form['node'][0]
153 for type_, spec in self.archive_specs.iteritems():
152 for type_, spec in self.archive_specs.iteritems():
154 ext = spec[2]
153 ext = spec[2]
155 if fn.endswith(ext):
154 if fn.endswith(ext):
156 req.form['node'] = [fn[:-len(ext)]]
155 req.form['node'] = [fn[:-len(ext)]]
157 req.form['type'] = [type_]
156 req.form['type'] = [type_]
158
157
159 # process the web interface request
158 # process the web interface request
160
159
161 try:
160 try:
162 tmpl = self.templater(req)
161 tmpl = self.templater(req)
163 ctype = tmpl('mimetype', encoding=self.encoding)
162 ctype = tmpl('mimetype', encoding=self.encoding)
164 ctype = templater.stringify(ctype)
163 ctype = templater.stringify(ctype)
165
164
166 # check read permissions non-static content
165 # check read permissions non-static content
167 if cmd != 'static':
166 if cmd != 'static':
168 self.check_perm(req, None)
167 self.check_perm(req, None)
169
168
170 if cmd == '':
169 if cmd == '':
171 req.form['cmd'] = [tmpl.cache['default']]
170 req.form['cmd'] = [tmpl.cache['default']]
172 cmd = req.form['cmd'][0]
171 cmd = req.form['cmd'][0]
173
172
174 if cmd not in webcommands.__all__:
173 if cmd not in webcommands.__all__:
175 msg = 'no such method: %s' % cmd
174 msg = 'no such method: %s' % cmd
176 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
175 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
177 elif cmd == 'file' and 'raw' in req.form.get('style', []):
176 elif cmd == 'file' and 'raw' in req.form.get('style', []):
178 self.ctype = ctype
177 self.ctype = ctype
179 content = webcommands.rawfile(self, req, tmpl)
178 content = webcommands.rawfile(self, req, tmpl)
180 else:
179 else:
181 content = getattr(webcommands, cmd)(self, req, tmpl)
180 content = getattr(webcommands, cmd)(self, req, tmpl)
182 req.respond(HTTP_OK, ctype)
181 req.respond(HTTP_OK, ctype)
183
182
184 return content
183 return content
185
184
186 except error.LookupError, err:
185 except error.LookupError, err:
187 req.respond(HTTP_NOT_FOUND, ctype)
186 req.respond(HTTP_NOT_FOUND, ctype)
188 msg = str(err)
187 msg = str(err)
189 if 'manifest' not in msg:
188 if 'manifest' not in msg:
190 msg = 'revision not found: %s' % err.name
189 msg = 'revision not found: %s' % err.name
191 return tmpl('error', error=msg)
190 return tmpl('error', error=msg)
192 except (error.RepoError, error.RevlogError), inst:
191 except (error.RepoError, error.RevlogError), inst:
193 req.respond(HTTP_SERVER_ERROR, ctype)
192 req.respond(HTTP_SERVER_ERROR, ctype)
194 return tmpl('error', error=str(inst))
193 return tmpl('error', error=str(inst))
195 except ErrorResponse, inst:
194 except ErrorResponse, inst:
196 req.respond(inst, ctype)
195 req.respond(inst, ctype)
197 return tmpl('error', error=inst.message)
196 return tmpl('error', error=inst.message)
198
197
199 def templater(self, req):
198 def templater(self, req):
200
199
201 # determine scheme, port and server name
200 # determine scheme, port and server name
202 # this is needed to create absolute urls
201 # this is needed to create absolute urls
203
202
204 proto = req.env.get('wsgi.url_scheme')
203 proto = req.env.get('wsgi.url_scheme')
205 if proto == 'https':
204 if proto == 'https':
206 proto = 'https'
205 proto = 'https'
207 default_port = "443"
206 default_port = "443"
208 else:
207 else:
209 proto = 'http'
208 proto = 'http'
210 default_port = "80"
209 default_port = "80"
211
210
212 port = req.env["SERVER_PORT"]
211 port = req.env["SERVER_PORT"]
213 port = port != default_port and (":" + port) or ""
212 port = port != default_port and (":" + port) or ""
214 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
213 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
215 staticurl = self.config("web", "staticurl") or req.url + 'static/'
214 staticurl = self.config("web", "staticurl") or req.url + 'static/'
216 if not staticurl.endswith('/'):
215 if not staticurl.endswith('/'):
217 staticurl += '/'
216 staticurl += '/'
218
217
219 # some functions for the templater
218 # some functions for the templater
220
219
221 def header(**map):
220 def header(**map):
222 yield tmpl('header', encoding=self.encoding, **map)
221 yield tmpl('header', encoding=self.encoding, **map)
223
222
224 def footer(**map):
223 def footer(**map):
225 yield tmpl("footer", **map)
224 yield tmpl("footer", **map)
226
225
227 def motd(**map):
226 def motd(**map):
228 yield self.config("web", "motd", "")
227 yield self.config("web", "motd", "")
229
228
230 # figure out which style to use
229 # figure out which style to use
231
230
232 vars = {}
231 vars = {}
233 style = self.config("web", "style", "paper")
232 style = self.config("web", "style", "paper")
234 if 'style' in req.form:
233 if 'style' in req.form:
235 style = req.form['style'][0]
234 style = req.form['style'][0]
236 vars['style'] = style
235 vars['style'] = style
237
236
238 start = req.url[-1] == '?' and '&' or '?'
237 start = req.url[-1] == '?' and '&' or '?'
239 sessionvars = webutil.sessionvars(vars, start)
238 sessionvars = webutil.sessionvars(vars, start)
240 mapfile = templater.stylemap(style, self.templatepath)
239 mapfile = templater.stylemap(style, self.templatepath)
241
240
242 if not self.reponame:
241 if not self.reponame:
243 self.reponame = (self.config("web", "name")
242 self.reponame = (self.config("web", "name")
244 or req.env.get('REPO_NAME')
243 or req.env.get('REPO_NAME')
245 or req.url.strip('/') or self.repo.root)
244 or req.url.strip('/') or self.repo.root)
246
245
247 # create the templater
246 # create the templater
248
247
249 tmpl = templater.templater(mapfile, templatefilters.filters,
248 tmpl = templater.templater(mapfile,
250 defaults={"url": req.url,
249 defaults={"url": req.url,
251 "staticurl": staticurl,
250 "staticurl": staticurl,
252 "urlbase": urlbase,
251 "urlbase": urlbase,
253 "repo": self.reponame,
252 "repo": self.reponame,
254 "header": header,
253 "header": header,
255 "footer": footer,
254 "footer": footer,
256 "motd": motd,
255 "motd": motd,
257 "sessionvars": sessionvars
256 "sessionvars": sessionvars
258 })
257 })
259 return tmpl
258 return tmpl
260
259
261 def archivelist(self, nodeid):
260 def archivelist(self, nodeid):
262 allowed = self.configlist("web", "allow_archive")
261 allowed = self.configlist("web", "allow_archive")
263 for i, spec in self.archive_specs.iteritems():
262 for i, spec in self.archive_specs.iteritems():
264 if i in allowed or self.configbool("web", "allow" + i):
263 if i in allowed or self.configbool("web", "allow" + i):
265 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
264 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
266
265
267 archive_specs = {
266 archive_specs = {
268 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
267 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
269 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
268 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
270 'zip': ('application/zip', 'zip', '.zip', None),
269 'zip': ('application/zip', 'zip', '.zip', None),
271 }
270 }
272
271
273 def check_perm(self, req, op):
272 def check_perm(self, req, op):
274 '''Check permission for operation based on request data (including
273 '''Check permission for operation based on request data (including
275 authentication info). Return if op allowed, else raise an ErrorResponse
274 authentication info). Return if op allowed, else raise an ErrorResponse
276 exception.'''
275 exception.'''
277
276
278 user = req.env.get('REMOTE_USER')
277 user = req.env.get('REMOTE_USER')
279
278
280 deny_read = self.configlist('web', 'deny_read')
279 deny_read = self.configlist('web', 'deny_read')
281 if deny_read and (not user or deny_read == ['*'] or user in deny_read):
280 if deny_read and (not user or deny_read == ['*'] or user in deny_read):
282 raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized')
281 raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized')
283
282
284 allow_read = self.configlist('web', 'allow_read')
283 allow_read = self.configlist('web', 'allow_read')
285 result = (not allow_read) or (allow_read == ['*'])
284 result = (not allow_read) or (allow_read == ['*'])
286 if not (result or user in allow_read):
285 if not (result or user in allow_read):
287 raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized')
286 raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized')
288
287
289 if op == 'pull' and not self.allowpull:
288 if op == 'pull' and not self.allowpull:
290 raise ErrorResponse(HTTP_UNAUTHORIZED, 'pull not authorized')
289 raise ErrorResponse(HTTP_UNAUTHORIZED, 'pull not authorized')
291 elif op == 'pull' or op is None: # op is None for interface requests
290 elif op == 'pull' or op is None: # op is None for interface requests
292 return
291 return
293
292
294 # enforce that you can only push using POST requests
293 # enforce that you can only push using POST requests
295 if req.env['REQUEST_METHOD'] != 'POST':
294 if req.env['REQUEST_METHOD'] != 'POST':
296 msg = 'push requires POST request'
295 msg = 'push requires POST request'
297 raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg)
296 raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg)
298
297
299 # require ssl by default for pushing, auth info cannot be sniffed
298 # require ssl by default for pushing, auth info cannot be sniffed
300 # and replayed
299 # and replayed
301 scheme = req.env.get('wsgi.url_scheme')
300 scheme = req.env.get('wsgi.url_scheme')
302 if self.configbool('web', 'push_ssl', True) and scheme != 'https':
301 if self.configbool('web', 'push_ssl', True) and scheme != 'https':
303 raise ErrorResponse(HTTP_OK, 'ssl required')
302 raise ErrorResponse(HTTP_OK, 'ssl required')
304
303
305 deny = self.configlist('web', 'deny_push')
304 deny = self.configlist('web', 'deny_push')
306 if deny and (not user or deny == ['*'] or user in deny):
305 if deny and (not user or deny == ['*'] or user in deny):
307 raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
306 raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
308
307
309 allow = self.configlist('web', 'allow_push')
308 allow = self.configlist('web', 'allow_push')
310 result = allow and (allow == ['*'] or user in allow)
309 result = allow and (allow == ['*'] or user in allow)
311 if not result:
310 if not result:
312 raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
311 raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
@@ -1,314 +1,314 b''
1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 import os
9 import os
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial import ui, hg, util, templater, templatefilters
11 from mercurial import ui, hg, util, templater
12 from mercurial import error, encoding
12 from mercurial import error, encoding
13 from common import ErrorResponse, get_mtime, staticfile, paritygen,\
13 from common import ErrorResponse, get_mtime, staticfile, paritygen,\
14 get_contact, HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
14 get_contact, HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
15 from hgweb_mod import hgweb
15 from hgweb_mod import hgweb
16 from request import wsgirequest
16 from request import wsgirequest
17 import webutil
17 import webutil
18
18
19 def cleannames(items):
19 def cleannames(items):
20 return [(util.pconvert(name).strip('/'), path) for name, path in items]
20 return [(util.pconvert(name).strip('/'), path) for name, path in items]
21
21
22 class hgwebdir(object):
22 class hgwebdir(object):
23
23
24 def __init__(self, conf, baseui=None):
24 def __init__(self, conf, baseui=None):
25
25
26 if baseui:
26 if baseui:
27 self.ui = baseui.copy()
27 self.ui = baseui.copy()
28 else:
28 else:
29 self.ui = ui.ui()
29 self.ui = ui.ui()
30 self.ui.setconfig('ui', 'report_untrusted', 'off')
30 self.ui.setconfig('ui', 'report_untrusted', 'off')
31 self.ui.setconfig('ui', 'interactive', 'off')
31 self.ui.setconfig('ui', 'interactive', 'off')
32
32
33 if isinstance(conf, (list, tuple)):
33 if isinstance(conf, (list, tuple)):
34 self.repos = cleannames(conf)
34 self.repos = cleannames(conf)
35 elif isinstance(conf, dict):
35 elif isinstance(conf, dict):
36 self.repos = sorted(cleannames(conf.items()))
36 self.repos = sorted(cleannames(conf.items()))
37 else:
37 else:
38 self.ui.readconfig(conf, remap={'paths': 'hgweb-paths'}, trust=True)
38 self.ui.readconfig(conf, remap={'paths': 'hgweb-paths'}, trust=True)
39 self.repos = []
39 self.repos = []
40
40
41 self.motd = self.ui.config('web', 'motd')
41 self.motd = self.ui.config('web', 'motd')
42 self.style = self.ui.config('web', 'style', 'paper')
42 self.style = self.ui.config('web', 'style', 'paper')
43 self.stripecount = self.ui.config('web', 'stripes', 1)
43 self.stripecount = self.ui.config('web', 'stripes', 1)
44 if self.stripecount:
44 if self.stripecount:
45 self.stripecount = int(self.stripecount)
45 self.stripecount = int(self.stripecount)
46 self._baseurl = self.ui.config('web', 'baseurl')
46 self._baseurl = self.ui.config('web', 'baseurl')
47
47
48 if self.repos:
48 if self.repos:
49 return
49 return
50
50
51 for prefix, root in cleannames(self.ui.configitems('hgweb-paths')):
51 for prefix, root in cleannames(self.ui.configitems('hgweb-paths')):
52 roothead, roottail = os.path.split(root)
52 roothead, roottail = os.path.split(root)
53 # "foo = /bar/*" makes every subrepo of /bar/ to be
53 # "foo = /bar/*" makes every subrepo of /bar/ to be
54 # mounted as foo/subrepo
54 # mounted as foo/subrepo
55 # and "foo = /bar/**" also recurses into the subdirectories,
55 # and "foo = /bar/**" also recurses into the subdirectories,
56 # remember to use it without working dir.
56 # remember to use it without working dir.
57 try:
57 try:
58 recurse = {'*': False, '**': True}[roottail]
58 recurse = {'*': False, '**': True}[roottail]
59 except KeyError:
59 except KeyError:
60 self.repos.append((prefix, root))
60 self.repos.append((prefix, root))
61 continue
61 continue
62 roothead = os.path.normpath(roothead)
62 roothead = os.path.normpath(roothead)
63 for path in util.walkrepos(roothead, followsym=True,
63 for path in util.walkrepos(roothead, followsym=True,
64 recurse=recurse):
64 recurse=recurse):
65 path = os.path.normpath(path)
65 path = os.path.normpath(path)
66 name = util.pconvert(path[len(roothead):]).strip('/')
66 name = util.pconvert(path[len(roothead):]).strip('/')
67 if prefix:
67 if prefix:
68 name = prefix + '/' + name
68 name = prefix + '/' + name
69 self.repos.append((name, path))
69 self.repos.append((name, path))
70
70
71 for prefix, root in self.ui.configitems('collections'):
71 for prefix, root in self.ui.configitems('collections'):
72 for path in util.walkrepos(root, followsym=True):
72 for path in util.walkrepos(root, followsym=True):
73 repo = os.path.normpath(path)
73 repo = os.path.normpath(path)
74 name = repo
74 name = repo
75 if name.startswith(prefix):
75 if name.startswith(prefix):
76 name = name[len(prefix):]
76 name = name[len(prefix):]
77 self.repos.append((name.lstrip(os.sep), repo))
77 self.repos.append((name.lstrip(os.sep), repo))
78
78
79 self.repos.sort()
79 self.repos.sort()
80
80
81 def run(self):
81 def run(self):
82 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
82 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
83 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
83 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
84 import mercurial.hgweb.wsgicgi as wsgicgi
84 import mercurial.hgweb.wsgicgi as wsgicgi
85 wsgicgi.launch(self)
85 wsgicgi.launch(self)
86
86
87 def __call__(self, env, respond):
87 def __call__(self, env, respond):
88 req = wsgirequest(env, respond)
88 req = wsgirequest(env, respond)
89 return self.run_wsgi(req)
89 return self.run_wsgi(req)
90
90
91 def read_allowed(self, ui, req):
91 def read_allowed(self, ui, req):
92 """Check allow_read and deny_read config options of a repo's ui object
92 """Check allow_read and deny_read config options of a repo's ui object
93 to determine user permissions. By default, with neither option set (or
93 to determine user permissions. By default, with neither option set (or
94 both empty), allow all users to read the repo. There are two ways a
94 both empty), allow all users to read the repo. There are two ways a
95 user can be denied read access: (1) deny_read is not empty, and the
95 user can be denied read access: (1) deny_read is not empty, and the
96 user is unauthenticated or deny_read contains user (or *), and (2)
96 user is unauthenticated or deny_read contains user (or *), and (2)
97 allow_read is not empty and the user is not in allow_read. Return True
97 allow_read is not empty and the user is not in allow_read. Return True
98 if user is allowed to read the repo, else return False."""
98 if user is allowed to read the repo, else return False."""
99
99
100 user = req.env.get('REMOTE_USER')
100 user = req.env.get('REMOTE_USER')
101
101
102 deny_read = ui.configlist('web', 'deny_read', untrusted=True)
102 deny_read = ui.configlist('web', 'deny_read', untrusted=True)
103 if deny_read and (not user or deny_read == ['*'] or user in deny_read):
103 if deny_read and (not user or deny_read == ['*'] or user in deny_read):
104 return False
104 return False
105
105
106 allow_read = ui.configlist('web', 'allow_read', untrusted=True)
106 allow_read = ui.configlist('web', 'allow_read', untrusted=True)
107 # by default, allow reading if no allow_read option has been set
107 # by default, allow reading if no allow_read option has been set
108 if (not allow_read) or (allow_read == ['*']) or (user in allow_read):
108 if (not allow_read) or (allow_read == ['*']) or (user in allow_read):
109 return True
109 return True
110
110
111 return False
111 return False
112
112
113 def run_wsgi(self, req):
113 def run_wsgi(self, req):
114
114
115 try:
115 try:
116 try:
116 try:
117
117
118 virtual = req.env.get("PATH_INFO", "").strip('/')
118 virtual = req.env.get("PATH_INFO", "").strip('/')
119 tmpl = self.templater(req)
119 tmpl = self.templater(req)
120 ctype = tmpl('mimetype', encoding=encoding.encoding)
120 ctype = tmpl('mimetype', encoding=encoding.encoding)
121 ctype = templater.stringify(ctype)
121 ctype = templater.stringify(ctype)
122
122
123 # a static file
123 # a static file
124 if virtual.startswith('static/') or 'static' in req.form:
124 if virtual.startswith('static/') or 'static' in req.form:
125 if virtual.startswith('static/'):
125 if virtual.startswith('static/'):
126 fname = virtual[7:]
126 fname = virtual[7:]
127 else:
127 else:
128 fname = req.form['static'][0]
128 fname = req.form['static'][0]
129 static = templater.templatepath('static')
129 static = templater.templatepath('static')
130 return (staticfile(static, fname, req),)
130 return (staticfile(static, fname, req),)
131
131
132 # top-level index
132 # top-level index
133 elif not virtual:
133 elif not virtual:
134 req.respond(HTTP_OK, ctype)
134 req.respond(HTTP_OK, ctype)
135 return self.makeindex(req, tmpl)
135 return self.makeindex(req, tmpl)
136
136
137 # nested indexes and hgwebs
137 # nested indexes and hgwebs
138
138
139 repos = dict(self.repos)
139 repos = dict(self.repos)
140 while virtual:
140 while virtual:
141 real = repos.get(virtual)
141 real = repos.get(virtual)
142 if real:
142 if real:
143 req.env['REPO_NAME'] = virtual
143 req.env['REPO_NAME'] = virtual
144 try:
144 try:
145 repo = hg.repository(self.ui, real)
145 repo = hg.repository(self.ui, real)
146 return hgweb(repo).run_wsgi(req)
146 return hgweb(repo).run_wsgi(req)
147 except IOError, inst:
147 except IOError, inst:
148 msg = inst.strerror
148 msg = inst.strerror
149 raise ErrorResponse(HTTP_SERVER_ERROR, msg)
149 raise ErrorResponse(HTTP_SERVER_ERROR, msg)
150 except error.RepoError, inst:
150 except error.RepoError, inst:
151 raise ErrorResponse(HTTP_SERVER_ERROR, str(inst))
151 raise ErrorResponse(HTTP_SERVER_ERROR, str(inst))
152
152
153 # browse subdirectories
153 # browse subdirectories
154 subdir = virtual + '/'
154 subdir = virtual + '/'
155 if [r for r in repos if r.startswith(subdir)]:
155 if [r for r in repos if r.startswith(subdir)]:
156 req.respond(HTTP_OK, ctype)
156 req.respond(HTTP_OK, ctype)
157 return self.makeindex(req, tmpl, subdir)
157 return self.makeindex(req, tmpl, subdir)
158
158
159 up = virtual.rfind('/')
159 up = virtual.rfind('/')
160 if up < 0:
160 if up < 0:
161 break
161 break
162 virtual = virtual[:up]
162 virtual = virtual[:up]
163
163
164 # prefixes not found
164 # prefixes not found
165 req.respond(HTTP_NOT_FOUND, ctype)
165 req.respond(HTTP_NOT_FOUND, ctype)
166 return tmpl("notfound", repo=virtual)
166 return tmpl("notfound", repo=virtual)
167
167
168 except ErrorResponse, err:
168 except ErrorResponse, err:
169 req.respond(err, ctype)
169 req.respond(err, ctype)
170 return tmpl('error', error=err.message or '')
170 return tmpl('error', error=err.message or '')
171 finally:
171 finally:
172 tmpl = None
172 tmpl = None
173
173
174 def makeindex(self, req, tmpl, subdir=""):
174 def makeindex(self, req, tmpl, subdir=""):
175
175
176 def archivelist(ui, nodeid, url):
176 def archivelist(ui, nodeid, url):
177 allowed = ui.configlist("web", "allow_archive", untrusted=True)
177 allowed = ui.configlist("web", "allow_archive", untrusted=True)
178 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
178 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
179 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
179 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
180 untrusted=True):
180 untrusted=True):
181 yield {"type" : i[0], "extension": i[1],
181 yield {"type" : i[0], "extension": i[1],
182 "node": nodeid, "url": url}
182 "node": nodeid, "url": url}
183
183
184 sortdefault = 'name', False
184 sortdefault = 'name', False
185 def entries(sortcolumn="", descending=False, subdir="", **map):
185 def entries(sortcolumn="", descending=False, subdir="", **map):
186 rows = []
186 rows = []
187 parity = paritygen(self.stripecount)
187 parity = paritygen(self.stripecount)
188 for name, path in self.repos:
188 for name, path in self.repos:
189 if not name.startswith(subdir):
189 if not name.startswith(subdir):
190 continue
190 continue
191 name = name[len(subdir):]
191 name = name[len(subdir):]
192
192
193 u = self.ui.copy()
193 u = self.ui.copy()
194 try:
194 try:
195 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
195 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
196 except Exception, e:
196 except Exception, e:
197 u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e))
197 u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e))
198 continue
198 continue
199 def get(section, name, default=None):
199 def get(section, name, default=None):
200 return u.config(section, name, default, untrusted=True)
200 return u.config(section, name, default, untrusted=True)
201
201
202 if u.configbool("web", "hidden", untrusted=True):
202 if u.configbool("web", "hidden", untrusted=True):
203 continue
203 continue
204
204
205 if not self.read_allowed(u, req):
205 if not self.read_allowed(u, req):
206 continue
206 continue
207
207
208 parts = [name]
208 parts = [name]
209 if 'PATH_INFO' in req.env:
209 if 'PATH_INFO' in req.env:
210 parts.insert(0, req.env['PATH_INFO'].rstrip('/'))
210 parts.insert(0, req.env['PATH_INFO'].rstrip('/'))
211 if req.env['SCRIPT_NAME']:
211 if req.env['SCRIPT_NAME']:
212 parts.insert(0, req.env['SCRIPT_NAME'])
212 parts.insert(0, req.env['SCRIPT_NAME'])
213 url = ('/'.join(parts).replace("//", "/")) + '/'
213 url = ('/'.join(parts).replace("//", "/")) + '/'
214
214
215 # update time with local timezone
215 # update time with local timezone
216 try:
216 try:
217 d = (get_mtime(path), util.makedate()[1])
217 d = (get_mtime(path), util.makedate()[1])
218 except OSError:
218 except OSError:
219 continue
219 continue
220
220
221 contact = get_contact(get)
221 contact = get_contact(get)
222 description = get("web", "description", "")
222 description = get("web", "description", "")
223 name = get("web", "name", name)
223 name = get("web", "name", name)
224 row = dict(contact=contact or "unknown",
224 row = dict(contact=contact or "unknown",
225 contact_sort=contact.upper() or "unknown",
225 contact_sort=contact.upper() or "unknown",
226 name=name,
226 name=name,
227 name_sort=name,
227 name_sort=name,
228 url=url,
228 url=url,
229 description=description or "unknown",
229 description=description or "unknown",
230 description_sort=description.upper() or "unknown",
230 description_sort=description.upper() or "unknown",
231 lastchange=d,
231 lastchange=d,
232 lastchange_sort=d[1]-d[0],
232 lastchange_sort=d[1]-d[0],
233 archives=archivelist(u, "tip", url))
233 archives=archivelist(u, "tip", url))
234 if (not sortcolumn or (sortcolumn, descending) == sortdefault):
234 if (not sortcolumn or (sortcolumn, descending) == sortdefault):
235 # fast path for unsorted output
235 # fast path for unsorted output
236 row['parity'] = parity.next()
236 row['parity'] = parity.next()
237 yield row
237 yield row
238 else:
238 else:
239 rows.append((row["%s_sort" % sortcolumn], row))
239 rows.append((row["%s_sort" % sortcolumn], row))
240 if rows:
240 if rows:
241 rows.sort()
241 rows.sort()
242 if descending:
242 if descending:
243 rows.reverse()
243 rows.reverse()
244 for key, row in rows:
244 for key, row in rows:
245 row['parity'] = parity.next()
245 row['parity'] = parity.next()
246 yield row
246 yield row
247
247
248 sortable = ["name", "description", "contact", "lastchange"]
248 sortable = ["name", "description", "contact", "lastchange"]
249 sortcolumn, descending = sortdefault
249 sortcolumn, descending = sortdefault
250 if 'sort' in req.form:
250 if 'sort' in req.form:
251 sortcolumn = req.form['sort'][0]
251 sortcolumn = req.form['sort'][0]
252 descending = sortcolumn.startswith('-')
252 descending = sortcolumn.startswith('-')
253 if descending:
253 if descending:
254 sortcolumn = sortcolumn[1:]
254 sortcolumn = sortcolumn[1:]
255 if sortcolumn not in sortable:
255 if sortcolumn not in sortable:
256 sortcolumn = ""
256 sortcolumn = ""
257
257
258 sort = [("sort_%s" % column,
258 sort = [("sort_%s" % column,
259 "%s%s" % ((not descending and column == sortcolumn)
259 "%s%s" % ((not descending and column == sortcolumn)
260 and "-" or "", column))
260 and "-" or "", column))
261 for column in sortable]
261 for column in sortable]
262
262
263 if self._baseurl is not None:
263 if self._baseurl is not None:
264 req.env['SCRIPT_NAME'] = self._baseurl
264 req.env['SCRIPT_NAME'] = self._baseurl
265
265
266 return tmpl("index", entries=entries, subdir=subdir,
266 return tmpl("index", entries=entries, subdir=subdir,
267 sortcolumn=sortcolumn, descending=descending,
267 sortcolumn=sortcolumn, descending=descending,
268 **dict(sort))
268 **dict(sort))
269
269
270 def templater(self, req):
270 def templater(self, req):
271
271
272 def header(**map):
272 def header(**map):
273 yield tmpl('header', encoding=encoding.encoding, **map)
273 yield tmpl('header', encoding=encoding.encoding, **map)
274
274
275 def footer(**map):
275 def footer(**map):
276 yield tmpl("footer", **map)
276 yield tmpl("footer", **map)
277
277
278 def motd(**map):
278 def motd(**map):
279 if self.motd is not None:
279 if self.motd is not None:
280 yield self.motd
280 yield self.motd
281 else:
281 else:
282 yield config('web', 'motd', '')
282 yield config('web', 'motd', '')
283
283
284 def config(section, name, default=None, untrusted=True):
284 def config(section, name, default=None, untrusted=True):
285 return self.ui.config(section, name, default, untrusted)
285 return self.ui.config(section, name, default, untrusted)
286
286
287 if self._baseurl is not None:
287 if self._baseurl is not None:
288 req.env['SCRIPT_NAME'] = self._baseurl
288 req.env['SCRIPT_NAME'] = self._baseurl
289
289
290 url = req.env.get('SCRIPT_NAME', '')
290 url = req.env.get('SCRIPT_NAME', '')
291 if not url.endswith('/'):
291 if not url.endswith('/'):
292 url += '/'
292 url += '/'
293
293
294 vars = {}
294 vars = {}
295 style = self.style
295 style = self.style
296 if 'style' in req.form:
296 if 'style' in req.form:
297 vars['style'] = style = req.form['style'][0]
297 vars['style'] = style = req.form['style'][0]
298 start = url[-1] == '?' and '&' or '?'
298 start = url[-1] == '?' and '&' or '?'
299 sessionvars = webutil.sessionvars(vars, start)
299 sessionvars = webutil.sessionvars(vars, start)
300
300
301 staticurl = config('web', 'staticurl') or url + 'static/'
301 staticurl = config('web', 'staticurl') or url + 'static/'
302 if not staticurl.endswith('/'):
302 if not staticurl.endswith('/'):
303 staticurl += '/'
303 staticurl += '/'
304
304
305 style = 'style' in req.form and req.form['style'][0] or self.style
305 style = 'style' in req.form and req.form['style'][0] or self.style
306 mapfile = templater.stylemap(style)
306 mapfile = templater.stylemap(style)
307 tmpl = templater.templater(mapfile, templatefilters.filters,
307 tmpl = templater.templater(mapfile,
308 defaults={"header": header,
308 defaults={"header": header,
309 "footer": footer,
309 "footer": footer,
310 "motd": motd,
310 "motd": motd,
311 "url": url,
311 "url": url,
312 "staticurl": staticurl,
312 "staticurl": staticurl,
313 "sessionvars": sessionvars})
313 "sessionvars": sessionvars})
314 return tmpl
314 return tmpl
@@ -1,203 +1,209 b''
1 # template-filters.py - common template expansion filters
1 # template-filters.py - common template expansion filters
2 #
2 #
3 # Copyright 2005-2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 import cgi, re, os, time, urllib, textwrap
8 import cgi, re, os, time, urllib, textwrap
9 import util, templater, encoding
9 import util, templater, encoding
10
10
11 def stringify(thing):
12 '''turn nested template iterator into string.'''
13 if hasattr(thing, '__iter__') and not isinstance(thing, str):
14 return "".join([stringify(t) for t in thing if t is not None])
15 return str(thing)
16
11 agescales = [("second", 1),
17 agescales = [("second", 1),
12 ("minute", 60),
18 ("minute", 60),
13 ("hour", 3600),
19 ("hour", 3600),
14 ("day", 3600 * 24),
20 ("day", 3600 * 24),
15 ("week", 3600 * 24 * 7),
21 ("week", 3600 * 24 * 7),
16 ("month", 3600 * 24 * 30),
22 ("month", 3600 * 24 * 30),
17 ("year", 3600 * 24 * 365)]
23 ("year", 3600 * 24 * 365)]
18
24
19 agescales.reverse()
25 agescales.reverse()
20
26
21 def age(date):
27 def age(date):
22 '''turn a (timestamp, tzoff) tuple into an age string.'''
28 '''turn a (timestamp, tzoff) tuple into an age string.'''
23
29
24 def plural(t, c):
30 def plural(t, c):
25 if c == 1:
31 if c == 1:
26 return t
32 return t
27 return t + "s"
33 return t + "s"
28 def fmt(t, c):
34 def fmt(t, c):
29 return "%d %s" % (c, plural(t, c))
35 return "%d %s" % (c, plural(t, c))
30
36
31 now = time.time()
37 now = time.time()
32 then = date[0]
38 then = date[0]
33 if then > now:
39 if then > now:
34 return 'in the future'
40 return 'in the future'
35
41
36 delta = max(1, int(now - then))
42 delta = max(1, int(now - then))
37 for t, s in agescales:
43 for t, s in agescales:
38 n = delta / s
44 n = delta / s
39 if n >= 2 or s == 1:
45 if n >= 2 or s == 1:
40 return fmt(t, n)
46 return fmt(t, n)
41
47
42 para_re = None
48 para_re = None
43 space_re = None
49 space_re = None
44
50
45 def fill(text, width):
51 def fill(text, width):
46 '''fill many paragraphs.'''
52 '''fill many paragraphs.'''
47 global para_re, space_re
53 global para_re, space_re
48 if para_re is None:
54 if para_re is None:
49 para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
55 para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
50 space_re = re.compile(r' +')
56 space_re = re.compile(r' +')
51
57
52 def findparas():
58 def findparas():
53 start = 0
59 start = 0
54 while True:
60 while True:
55 m = para_re.search(text, start)
61 m = para_re.search(text, start)
56 if not m:
62 if not m:
57 w = len(text)
63 w = len(text)
58 while w > start and text[w-1].isspace(): w -= 1
64 while w > start and text[w-1].isspace(): w -= 1
59 yield text[start:w], text[w:]
65 yield text[start:w], text[w:]
60 break
66 break
61 yield text[start:m.start(0)], m.group(1)
67 yield text[start:m.start(0)], m.group(1)
62 start = m.end(1)
68 start = m.end(1)
63
69
64 return "".join([space_re.sub(' ', textwrap.fill(para, width)) + rest
70 return "".join([space_re.sub(' ', textwrap.fill(para, width)) + rest
65 for para, rest in findparas()])
71 for para, rest in findparas()])
66
72
67 def firstline(text):
73 def firstline(text):
68 '''return the first line of text'''
74 '''return the first line of text'''
69 try:
75 try:
70 return text.splitlines(1)[0].rstrip('\r\n')
76 return text.splitlines(1)[0].rstrip('\r\n')
71 except IndexError:
77 except IndexError:
72 return ''
78 return ''
73
79
74 def nl2br(text):
80 def nl2br(text):
75 '''replace raw newlines with xhtml line breaks.'''
81 '''replace raw newlines with xhtml line breaks.'''
76 return text.replace('\n', '<br/>\n')
82 return text.replace('\n', '<br/>\n')
77
83
78 def obfuscate(text):
84 def obfuscate(text):
79 text = unicode(text, encoding.encoding, 'replace')
85 text = unicode(text, encoding.encoding, 'replace')
80 return ''.join(['&#%d;' % ord(c) for c in text])
86 return ''.join(['&#%d;' % ord(c) for c in text])
81
87
82 def domain(author):
88 def domain(author):
83 '''get domain of author, or empty string if none.'''
89 '''get domain of author, or empty string if none.'''
84 f = author.find('@')
90 f = author.find('@')
85 if f == -1: return ''
91 if f == -1: return ''
86 author = author[f+1:]
92 author = author[f+1:]
87 f = author.find('>')
93 f = author.find('>')
88 if f >= 0: author = author[:f]
94 if f >= 0: author = author[:f]
89 return author
95 return author
90
96
91 def person(author):
97 def person(author):
92 '''get name of author, or else username.'''
98 '''get name of author, or else username.'''
93 f = author.find('<')
99 f = author.find('<')
94 if f == -1: return util.shortuser(author)
100 if f == -1: return util.shortuser(author)
95 return author[:f].rstrip()
101 return author[:f].rstrip()
96
102
97 def indent(text, prefix):
103 def indent(text, prefix):
98 '''indent each non-empty line of text after first with prefix.'''
104 '''indent each non-empty line of text after first with prefix.'''
99 lines = text.splitlines()
105 lines = text.splitlines()
100 num_lines = len(lines)
106 num_lines = len(lines)
101 def indenter():
107 def indenter():
102 for i in xrange(num_lines):
108 for i in xrange(num_lines):
103 l = lines[i]
109 l = lines[i]
104 if i and l.strip():
110 if i and l.strip():
105 yield prefix
111 yield prefix
106 yield l
112 yield l
107 if i < num_lines - 1 or text.endswith('\n'):
113 if i < num_lines - 1 or text.endswith('\n'):
108 yield '\n'
114 yield '\n'
109 return "".join(indenter())
115 return "".join(indenter())
110
116
111 def permissions(flags):
117 def permissions(flags):
112 if "l" in flags:
118 if "l" in flags:
113 return "lrwxrwxrwx"
119 return "lrwxrwxrwx"
114 if "x" in flags:
120 if "x" in flags:
115 return "-rwxr-xr-x"
121 return "-rwxr-xr-x"
116 return "-rw-r--r--"
122 return "-rw-r--r--"
117
123
118 def xmlescape(text):
124 def xmlescape(text):
119 text = (text
125 text = (text
120 .replace('&', '&amp;')
126 .replace('&', '&amp;')
121 .replace('<', '&lt;')
127 .replace('<', '&lt;')
122 .replace('>', '&gt;')
128 .replace('>', '&gt;')
123 .replace('"', '&quot;')
129 .replace('"', '&quot;')
124 .replace("'", '&#39;')) # &apos; invalid in HTML
130 .replace("'", '&#39;')) # &apos; invalid in HTML
125 return re.sub('[\x00-\x08\x0B\x0C\x0E-\x1F]', ' ', text)
131 return re.sub('[\x00-\x08\x0B\x0C\x0E-\x1F]', ' ', text)
126
132
127 _escapes = [
133 _escapes = [
128 ('\\', '\\\\'), ('"', '\\"'), ('\t', '\\t'), ('\n', '\\n'),
134 ('\\', '\\\\'), ('"', '\\"'), ('\t', '\\t'), ('\n', '\\n'),
129 ('\r', '\\r'), ('\f', '\\f'), ('\b', '\\b'),
135 ('\r', '\\r'), ('\f', '\\f'), ('\b', '\\b'),
130 ]
136 ]
131
137
132 def jsonescape(s):
138 def jsonescape(s):
133 for k, v in _escapes:
139 for k, v in _escapes:
134 s = s.replace(k, v)
140 s = s.replace(k, v)
135 return s
141 return s
136
142
137 def json(obj):
143 def json(obj):
138 if obj is None or obj is False or obj is True:
144 if obj is None or obj is False or obj is True:
139 return {None: 'null', False: 'false', True: 'true'}[obj]
145 return {None: 'null', False: 'false', True: 'true'}[obj]
140 elif isinstance(obj, int) or isinstance(obj, float):
146 elif isinstance(obj, int) or isinstance(obj, float):
141 return str(obj)
147 return str(obj)
142 elif isinstance(obj, str):
148 elif isinstance(obj, str):
143 return '"%s"' % jsonescape(obj)
149 return '"%s"' % jsonescape(obj)
144 elif isinstance(obj, unicode):
150 elif isinstance(obj, unicode):
145 return json(obj.encode('utf-8'))
151 return json(obj.encode('utf-8'))
146 elif hasattr(obj, 'keys'):
152 elif hasattr(obj, 'keys'):
147 out = []
153 out = []
148 for k, v in obj.iteritems():
154 for k, v in obj.iteritems():
149 s = '%s: %s' % (json(k), json(v))
155 s = '%s: %s' % (json(k), json(v))
150 out.append(s)
156 out.append(s)
151 return '{' + ', '.join(out) + '}'
157 return '{' + ', '.join(out) + '}'
152 elif hasattr(obj, '__iter__'):
158 elif hasattr(obj, '__iter__'):
153 out = []
159 out = []
154 for i in obj:
160 for i in obj:
155 out.append(json(i))
161 out.append(json(i))
156 return '[' + ', '.join(out) + ']'
162 return '[' + ', '.join(out) + ']'
157 else:
163 else:
158 raise TypeError('cannot encode type %s' % obj.__class__.__name__)
164 raise TypeError('cannot encode type %s' % obj.__class__.__name__)
159
165
160 def stripdir(text):
166 def stripdir(text):
161 '''Treat the text as path and strip a directory level, if possible.'''
167 '''Treat the text as path and strip a directory level, if possible.'''
162 dir = os.path.dirname(text)
168 dir = os.path.dirname(text)
163 if dir == "":
169 if dir == "":
164 return os.path.basename(text)
170 return os.path.basename(text)
165 else:
171 else:
166 return dir
172 return dir
167
173
168 def nonempty(str):
174 def nonempty(str):
169 return str or "(none)"
175 return str or "(none)"
170
176
171 filters = {
177 filters = {
172 "addbreaks": nl2br,
178 "addbreaks": nl2br,
173 "basename": os.path.basename,
179 "basename": os.path.basename,
174 "stripdir": stripdir,
180 "stripdir": stripdir,
175 "age": age,
181 "age": age,
176 "date": lambda x: util.datestr(x),
182 "date": lambda x: util.datestr(x),
177 "domain": domain,
183 "domain": domain,
178 "email": util.email,
184 "email": util.email,
179 "escape": lambda x: cgi.escape(x, True),
185 "escape": lambda x: cgi.escape(x, True),
180 "fill68": lambda x: fill(x, width=68),
186 "fill68": lambda x: fill(x, width=68),
181 "fill76": lambda x: fill(x, width=76),
187 "fill76": lambda x: fill(x, width=76),
182 "firstline": firstline,
188 "firstline": firstline,
183 "tabindent": lambda x: indent(x, '\t'),
189 "tabindent": lambda x: indent(x, '\t'),
184 "hgdate": lambda x: "%d %d" % x,
190 "hgdate": lambda x: "%d %d" % x,
185 "isodate": lambda x: util.datestr(x, '%Y-%m-%d %H:%M %1%2'),
191 "isodate": lambda x: util.datestr(x, '%Y-%m-%d %H:%M %1%2'),
186 "isodatesec": lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2'),
192 "isodatesec": lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2'),
187 "json": json,
193 "json": json,
188 "jsonescape": jsonescape,
194 "jsonescape": jsonescape,
189 "nonempty": nonempty,
195 "nonempty": nonempty,
190 "obfuscate": obfuscate,
196 "obfuscate": obfuscate,
191 "permissions": permissions,
197 "permissions": permissions,
192 "person": person,
198 "person": person,
193 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2"),
199 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2"),
194 "rfc3339date": lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S%1:%2"),
200 "rfc3339date": lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S%1:%2"),
195 "short": lambda x: x[:12],
201 "short": lambda x: x[:12],
196 "shortdate": util.shortdate,
202 "shortdate": util.shortdate,
197 "stringify": templater.stringify,
203 "stringify": stringify,
198 "strip": lambda x: x.strip(),
204 "strip": lambda x: x.strip(),
199 "urlescape": lambda x: urllib.quote(x),
205 "urlescape": lambda x: urllib.quote(x),
200 "user": lambda x: util.shortuser(x),
206 "user": lambda x: util.shortuser(x),
201 "stringescape": lambda x: x.encode('string_escape'),
207 "stringescape": lambda x: x.encode('string_escape'),
202 "xmlescape": xmlescape,
208 "xmlescape": xmlescape,
203 }
209 }
@@ -1,215 +1,211 b''
1 # templater.py - template expansion for output
1 # templater.py - template expansion for output
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import re, sys, os
9 import re, sys, os
10 import util, config
10 import util, config, templatefilters
11
11
12 path = ['templates', '../templates']
12 path = ['templates', '../templates']
13 stringify = templatefilters.stringify
13
14
14 def parsestring(s, quoted=True):
15 def parsestring(s, quoted=True):
15 '''parse a string using simple c-like syntax.
16 '''parse a string using simple c-like syntax.
16 string must be in quotes if quoted is True.'''
17 string must be in quotes if quoted is True.'''
17 if quoted:
18 if quoted:
18 if len(s) < 2 or s[0] != s[-1]:
19 if len(s) < 2 or s[0] != s[-1]:
19 raise SyntaxError(_('unmatched quotes'))
20 raise SyntaxError(_('unmatched quotes'))
20 return s[1:-1].decode('string_escape')
21 return s[1:-1].decode('string_escape')
21
22
22 return s.decode('string_escape')
23 return s.decode('string_escape')
23
24
24 class engine(object):
25 class engine(object):
25 '''template expansion engine.
26 '''template expansion engine.
26
27
27 template expansion works like this. a map file contains key=value
28 template expansion works like this. a map file contains key=value
28 pairs. if value is quoted, it is treated as string. otherwise, it
29 pairs. if value is quoted, it is treated as string. otherwise, it
29 is treated as name of template file.
30 is treated as name of template file.
30
31
31 templater is asked to expand a key in map. it looks up key, and
32 templater is asked to expand a key in map. it looks up key, and
32 looks for strings like this: {foo}. it expands {foo} by looking up
33 looks for strings like this: {foo}. it expands {foo} by looking up
33 foo in map, and substituting it. expansion is recursive: it stops
34 foo in map, and substituting it. expansion is recursive: it stops
34 when there is no more {foo} to replace.
35 when there is no more {foo} to replace.
35
36
36 expansion also allows formatting and filtering.
37 expansion also allows formatting and filtering.
37
38
38 format uses key to expand each item in list. syntax is
39 format uses key to expand each item in list. syntax is
39 {key%format}.
40 {key%format}.
40
41
41 filter uses function to transform value. syntax is
42 filter uses function to transform value. syntax is
42 {key|filter1|filter2|...}.'''
43 {key|filter1|filter2|...}.'''
43
44
44 template_re = re.compile(r"(?:(?:#(?=[\w\|%]+#))|(?:{(?=[\w\|%]+})))"
45 template_re = re.compile(r"(?:(?:#(?=[\w\|%]+#))|(?:{(?=[\w\|%]+})))"
45 r"(\w+)(?:(?:%(\w+))|((?:\|\w+)*))[#}]")
46 r"(\w+)(?:(?:%(\w+))|((?:\|\w+)*))[#}]")
46
47
47 def __init__(self, loader, filters={}, defaults={}):
48 def __init__(self, loader, filters={}, defaults={}):
48 self.loader = loader
49 self.loader = loader
49 self.filters = filters
50 self.filters = filters
50 self.defaults = defaults
51 self.defaults = defaults
51
52
52 def process(self, t, map):
53 def process(self, t, map):
53 '''Perform expansion. t is name of map element to expand. map contains
54 '''Perform expansion. t is name of map element to expand. map contains
54 added elements for use during expansion. Is a generator.'''
55 added elements for use during expansion. Is a generator.'''
55 tmpl = self.loader(t)
56 tmpl = self.loader(t)
56 iters = [self._process(tmpl, map)]
57 iters = [self._process(tmpl, map)]
57 while iters:
58 while iters:
58 try:
59 try:
59 item = iters[0].next()
60 item = iters[0].next()
60 except StopIteration:
61 except StopIteration:
61 iters.pop(0)
62 iters.pop(0)
62 continue
63 continue
63 if isinstance(item, str):
64 if isinstance(item, str):
64 yield item
65 yield item
65 elif item is None:
66 elif item is None:
66 yield ''
67 yield ''
67 elif hasattr(item, '__iter__'):
68 elif hasattr(item, '__iter__'):
68 iters.insert(0, iter(item))
69 iters.insert(0, iter(item))
69 else:
70 else:
70 yield str(item)
71 yield str(item)
71
72
72 def _process(self, tmpl, map):
73 def _process(self, tmpl, map):
73 '''Render a template. Returns a generator.'''
74 '''Render a template. Returns a generator.'''
74 while tmpl:
75 while tmpl:
75 m = self.template_re.search(tmpl)
76 m = self.template_re.search(tmpl)
76 if not m:
77 if not m:
77 yield tmpl
78 yield tmpl
78 break
79 break
79
80
80 start, end = m.span(0)
81 start, end = m.span(0)
81 key, format, fl = m.groups()
82 key, format, fl = m.groups()
82
83
83 if start:
84 if start:
84 yield tmpl[:start]
85 yield tmpl[:start]
85 tmpl = tmpl[end:]
86 tmpl = tmpl[end:]
86
87
87 if key in map:
88 if key in map:
88 v = map[key]
89 v = map[key]
89 else:
90 else:
90 v = self.defaults.get(key, "")
91 v = self.defaults.get(key, "")
91 if callable(v):
92 if callable(v):
92 v = v(**map)
93 v = v(**map)
93 if format:
94 if format:
94 if not hasattr(v, '__iter__'):
95 if not hasattr(v, '__iter__'):
95 raise SyntaxError(_("Error expanding '%s%%%s'")
96 raise SyntaxError(_("Error expanding '%s%%%s'")
96 % (key, format))
97 % (key, format))
97 lm = map.copy()
98 lm = map.copy()
98 for i in v:
99 for i in v:
99 lm.update(i)
100 lm.update(i)
100 yield self.process(format, lm)
101 yield self.process(format, lm)
101 else:
102 else:
102 if fl:
103 if fl:
103 for f in fl.split("|")[1:]:
104 for f in fl.split("|")[1:]:
104 v = self.filters[f](v)
105 v = self.filters[f](v)
105 yield v
106 yield v
106
107
107 class templater(object):
108 class templater(object):
108
109
109 def __init__(self, mapfile, filters={}, defaults={}, cache={},
110 def __init__(self, mapfile, filters={}, defaults={}, cache={},
110 minchunk=1024, maxchunk=65536):
111 minchunk=1024, maxchunk=65536):
111 '''set up template engine.
112 '''set up template engine.
112 mapfile is name of file to read map definitions from.
113 mapfile is name of file to read map definitions from.
113 filters is dict of functions. each transforms a value into another.
114 filters is dict of functions. each transforms a value into another.
114 defaults is dict of default map definitions.'''
115 defaults is dict of default map definitions.'''
115 self.mapfile = mapfile or 'template'
116 self.mapfile = mapfile or 'template'
116 self.cache = cache.copy()
117 self.cache = cache.copy()
117 self.map = {}
118 self.map = {}
118 self.base = (mapfile and os.path.dirname(mapfile)) or ''
119 self.base = (mapfile and os.path.dirname(mapfile)) or ''
119 self.filters = filters
120 self.filters = templatefilters.filters.copy()
121 self.filters.update(filters)
120 self.defaults = defaults
122 self.defaults = defaults
121 self.minchunk, self.maxchunk = minchunk, maxchunk
123 self.minchunk, self.maxchunk = minchunk, maxchunk
122
124
123 if not mapfile:
125 if not mapfile:
124 return
126 return
125 if not os.path.exists(mapfile):
127 if not os.path.exists(mapfile):
126 raise util.Abort(_('style not found: %s') % mapfile)
128 raise util.Abort(_('style not found: %s') % mapfile)
127
129
128 conf = config.config()
130 conf = config.config()
129 conf.read(mapfile)
131 conf.read(mapfile)
130
132
131 for key, val in conf[''].items():
133 for key, val in conf[''].items():
132 if val[0] in "'\"":
134 if val[0] in "'\"":
133 try:
135 try:
134 self.cache[key] = parsestring(val)
136 self.cache[key] = parsestring(val)
135 except SyntaxError, inst:
137 except SyntaxError, inst:
136 raise SyntaxError('%s: %s' %
138 raise SyntaxError('%s: %s' %
137 (conf.source('', key), inst.args[0]))
139 (conf.source('', key), inst.args[0]))
138 else:
140 else:
139 self.map[key] = os.path.join(self.base, val)
141 self.map[key] = os.path.join(self.base, val)
140
142
141 def __contains__(self, key):
143 def __contains__(self, key):
142 return key in self.cache or key in self.map
144 return key in self.cache or key in self.map
143
145
144 def load(self, t):
146 def load(self, t):
145 '''Get the template for the given template name. Use a local cache.'''
147 '''Get the template for the given template name. Use a local cache.'''
146 if not t in self.cache:
148 if not t in self.cache:
147 try:
149 try:
148 self.cache[t] = file(self.map[t]).read()
150 self.cache[t] = file(self.map[t]).read()
149 except IOError, inst:
151 except IOError, inst:
150 raise IOError(inst.args[0], _('template file %s: %s') %
152 raise IOError(inst.args[0], _('template file %s: %s') %
151 (self.map[t], inst.args[1]))
153 (self.map[t], inst.args[1]))
152 return self.cache[t]
154 return self.cache[t]
153
155
154 def __call__(self, t, **map):
156 def __call__(self, t, **map):
155 proc = engine(self.load, self.filters, self.defaults)
157 proc = engine(self.load, self.filters, self.defaults)
156 stream = proc.process(t, map)
158 stream = proc.process(t, map)
157 if self.minchunk:
159 if self.minchunk:
158 stream = util.increasingchunks(stream, min=self.minchunk,
160 stream = util.increasingchunks(stream, min=self.minchunk,
159 max=self.maxchunk)
161 max=self.maxchunk)
160 return stream
162 return stream
161
163
162 def templatepath(name=None):
164 def templatepath(name=None):
163 '''return location of template file or directory (if no name).
165 '''return location of template file or directory (if no name).
164 returns None if not found.'''
166 returns None if not found.'''
165 normpaths = []
167 normpaths = []
166
168
167 # executable version (py2exe) doesn't support __file__
169 # executable version (py2exe) doesn't support __file__
168 if hasattr(sys, 'frozen'):
170 if hasattr(sys, 'frozen'):
169 module = sys.executable
171 module = sys.executable
170 else:
172 else:
171 module = __file__
173 module = __file__
172 for f in path:
174 for f in path:
173 if f.startswith('/'):
175 if f.startswith('/'):
174 p = f
176 p = f
175 else:
177 else:
176 fl = f.split('/')
178 fl = f.split('/')
177 p = os.path.join(os.path.dirname(module), *fl)
179 p = os.path.join(os.path.dirname(module), *fl)
178 if name:
180 if name:
179 p = os.path.join(p, name)
181 p = os.path.join(p, name)
180 if name and os.path.exists(p):
182 if name and os.path.exists(p):
181 return os.path.normpath(p)
183 return os.path.normpath(p)
182 elif os.path.isdir(p):
184 elif os.path.isdir(p):
183 normpaths.append(os.path.normpath(p))
185 normpaths.append(os.path.normpath(p))
184
186
185 return normpaths
187 return normpaths
186
188
187 def stylemap(style, paths=None):
189 def stylemap(style, paths=None):
188 """Return path to mapfile for a given style.
190 """Return path to mapfile for a given style.
189
191
190 Searches mapfile in the following locations:
192 Searches mapfile in the following locations:
191 1. templatepath/style/map
193 1. templatepath/style/map
192 2. templatepath/map-style
194 2. templatepath/map-style
193 3. templatepath/map
195 3. templatepath/map
194 """
196 """
195
197
196 if paths is None:
198 if paths is None:
197 paths = templatepath()
199 paths = templatepath()
198 elif isinstance(paths, str):
200 elif isinstance(paths, str):
199 paths = [paths]
201 paths = [paths]
200
202
201 locations = style and [os.path.join(style, "map"), "map-" + style] or []
203 locations = style and [os.path.join(style, "map"), "map-" + style] or []
202 locations.append("map")
204 locations.append("map")
203 for path in paths:
205 for path in paths:
204 for location in locations:
206 for location in locations:
205 mapfile = os.path.join(path, location)
207 mapfile = os.path.join(path, location)
206 if os.path.isfile(mapfile):
208 if os.path.isfile(mapfile):
207 return mapfile
209 return mapfile
208
210
209 raise RuntimeError("No hgweb templates found in %r" % paths)
211 raise RuntimeError("No hgweb templates found in %r" % paths)
210
211 def stringify(thing):
212 '''turn nested template iterator into string.'''
213 if hasattr(thing, '__iter__') and not isinstance(thing, str):
214 return "".join([stringify(t) for t in thing if t is not None])
215 return str(thing)
General Comments 0
You need to be logged in to leave comments. Login now