##// END OF EJS Templates
Consistently import foo as foomod when foo to avoid shadowing...
Martin Geisler -
r12085:6f833fc3 default
parent child Browse files
Show More
@@ -1,1280 +1,1280
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, errno, re, glob, tempfile
10 import os, sys, errno, re, glob, tempfile
11 import util, templater, patch, error, encoding, templatekw
11 import util, templater, patch, error, encoding, templatekw
12 import match as _match
12 import match as matchmod
13 import similar, revset
13 import similar, revset
14
14
15 revrangesep = ':'
15 revrangesep = ':'
16
16
17 def parsealiases(cmd):
17 def parsealiases(cmd):
18 return cmd.lstrip("^").split("|")
18 return cmd.lstrip("^").split("|")
19
19
20 def findpossible(cmd, table, strict=False):
20 def findpossible(cmd, table, strict=False):
21 """
21 """
22 Return cmd -> (aliases, command table entry)
22 Return cmd -> (aliases, command table entry)
23 for each matching command.
23 for each matching command.
24 Return debug commands (or their aliases) only if no normal command matches.
24 Return debug commands (or their aliases) only if no normal command matches.
25 """
25 """
26 choice = {}
26 choice = {}
27 debugchoice = {}
27 debugchoice = {}
28 for e in table.keys():
28 for e in table.keys():
29 aliases = parsealiases(e)
29 aliases = parsealiases(e)
30 found = None
30 found = None
31 if cmd in aliases:
31 if cmd in aliases:
32 found = cmd
32 found = cmd
33 elif not strict:
33 elif not strict:
34 for a in aliases:
34 for a in aliases:
35 if a.startswith(cmd):
35 if a.startswith(cmd):
36 found = a
36 found = a
37 break
37 break
38 if found is not None:
38 if found is not None:
39 if aliases[0].startswith("debug") or found.startswith("debug"):
39 if aliases[0].startswith("debug") or found.startswith("debug"):
40 debugchoice[found] = (aliases, table[e])
40 debugchoice[found] = (aliases, table[e])
41 else:
41 else:
42 choice[found] = (aliases, table[e])
42 choice[found] = (aliases, table[e])
43
43
44 if not choice and debugchoice:
44 if not choice and debugchoice:
45 choice = debugchoice
45 choice = debugchoice
46
46
47 return choice
47 return choice
48
48
49 def findcmd(cmd, table, strict=True):
49 def findcmd(cmd, table, strict=True):
50 """Return (aliases, command table entry) for command string."""
50 """Return (aliases, command table entry) for command string."""
51 choice = findpossible(cmd, table, strict)
51 choice = findpossible(cmd, table, strict)
52
52
53 if cmd in choice:
53 if cmd in choice:
54 return choice[cmd]
54 return choice[cmd]
55
55
56 if len(choice) > 1:
56 if len(choice) > 1:
57 clist = choice.keys()
57 clist = choice.keys()
58 clist.sort()
58 clist.sort()
59 raise error.AmbiguousCommand(cmd, clist)
59 raise error.AmbiguousCommand(cmd, clist)
60
60
61 if choice:
61 if choice:
62 return choice.values()[0]
62 return choice.values()[0]
63
63
64 raise error.UnknownCommand(cmd)
64 raise error.UnknownCommand(cmd)
65
65
66 def findrepo(p):
66 def findrepo(p):
67 while not os.path.isdir(os.path.join(p, ".hg")):
67 while not os.path.isdir(os.path.join(p, ".hg")):
68 oldp, p = p, os.path.dirname(p)
68 oldp, p = p, os.path.dirname(p)
69 if p == oldp:
69 if p == oldp:
70 return None
70 return None
71
71
72 return p
72 return p
73
73
74 def bail_if_changed(repo):
74 def bail_if_changed(repo):
75 if repo.dirstate.parents()[1] != nullid:
75 if repo.dirstate.parents()[1] != nullid:
76 raise util.Abort(_('outstanding uncommitted merge'))
76 raise util.Abort(_('outstanding uncommitted merge'))
77 modified, added, removed, deleted = repo.status()[:4]
77 modified, added, removed, deleted = repo.status()[:4]
78 if modified or added or removed or deleted:
78 if modified or added or removed or deleted:
79 raise util.Abort(_("outstanding uncommitted changes"))
79 raise util.Abort(_("outstanding uncommitted changes"))
80
80
81 def logmessage(opts):
81 def logmessage(opts):
82 """ get the log message according to -m and -l option """
82 """ get the log message according to -m and -l option """
83 message = opts.get('message')
83 message = opts.get('message')
84 logfile = opts.get('logfile')
84 logfile = opts.get('logfile')
85
85
86 if message and logfile:
86 if message and logfile:
87 raise util.Abort(_('options --message and --logfile are mutually '
87 raise util.Abort(_('options --message and --logfile are mutually '
88 'exclusive'))
88 'exclusive'))
89 if not message and logfile:
89 if not message and logfile:
90 try:
90 try:
91 if logfile == '-':
91 if logfile == '-':
92 message = sys.stdin.read()
92 message = sys.stdin.read()
93 else:
93 else:
94 message = open(logfile).read()
94 message = open(logfile).read()
95 except IOError, inst:
95 except IOError, inst:
96 raise util.Abort(_("can't read commit message '%s': %s") %
96 raise util.Abort(_("can't read commit message '%s': %s") %
97 (logfile, inst.strerror))
97 (logfile, inst.strerror))
98 return message
98 return message
99
99
100 def loglimit(opts):
100 def loglimit(opts):
101 """get the log limit according to option -l/--limit"""
101 """get the log limit according to option -l/--limit"""
102 limit = opts.get('limit')
102 limit = opts.get('limit')
103 if limit:
103 if limit:
104 try:
104 try:
105 limit = int(limit)
105 limit = int(limit)
106 except ValueError:
106 except ValueError:
107 raise util.Abort(_('limit must be a positive integer'))
107 raise util.Abort(_('limit must be a positive integer'))
108 if limit <= 0:
108 if limit <= 0:
109 raise util.Abort(_('limit must be positive'))
109 raise util.Abort(_('limit must be positive'))
110 else:
110 else:
111 limit = None
111 limit = None
112 return limit
112 return limit
113
113
114 def revpair(repo, revs):
114 def revpair(repo, revs):
115 '''return pair of nodes, given list of revisions. second item can
115 '''return pair of nodes, given list of revisions. second item can
116 be None, meaning use working dir.'''
116 be None, meaning use working dir.'''
117
117
118 def revfix(repo, val, defval):
118 def revfix(repo, val, defval):
119 if not val and val != 0 and defval is not None:
119 if not val and val != 0 and defval is not None:
120 val = defval
120 val = defval
121 return repo.lookup(val)
121 return repo.lookup(val)
122
122
123 if not revs:
123 if not revs:
124 return repo.dirstate.parents()[0], None
124 return repo.dirstate.parents()[0], None
125 end = None
125 end = None
126 if len(revs) == 1:
126 if len(revs) == 1:
127 if revrangesep in revs[0]:
127 if revrangesep in revs[0]:
128 start, end = revs[0].split(revrangesep, 1)
128 start, end = revs[0].split(revrangesep, 1)
129 start = revfix(repo, start, 0)
129 start = revfix(repo, start, 0)
130 end = revfix(repo, end, len(repo) - 1)
130 end = revfix(repo, end, len(repo) - 1)
131 else:
131 else:
132 start = revfix(repo, revs[0], None)
132 start = revfix(repo, revs[0], None)
133 elif len(revs) == 2:
133 elif len(revs) == 2:
134 if revrangesep in revs[0] or revrangesep in revs[1]:
134 if revrangesep in revs[0] or revrangesep in revs[1]:
135 raise util.Abort(_('too many revisions specified'))
135 raise util.Abort(_('too many revisions specified'))
136 start = revfix(repo, revs[0], None)
136 start = revfix(repo, revs[0], None)
137 end = revfix(repo, revs[1], None)
137 end = revfix(repo, revs[1], None)
138 else:
138 else:
139 raise util.Abort(_('too many revisions specified'))
139 raise util.Abort(_('too many revisions specified'))
140 return start, end
140 return start, end
141
141
142 def revrange(repo, revs):
142 def revrange(repo, revs):
143 """Yield revision as strings from a list of revision specifications."""
143 """Yield revision as strings from a list of revision specifications."""
144
144
145 def revfix(repo, val, defval):
145 def revfix(repo, val, defval):
146 if not val and val != 0 and defval is not None:
146 if not val and val != 0 and defval is not None:
147 return defval
147 return defval
148 return repo.changelog.rev(repo.lookup(val))
148 return repo.changelog.rev(repo.lookup(val))
149
149
150 seen, l = set(), []
150 seen, l = set(), []
151 for spec in revs:
151 for spec in revs:
152 # attempt to parse old-style ranges first to deal with
152 # attempt to parse old-style ranges first to deal with
153 # things like old-tag which contain query metacharacters
153 # things like old-tag which contain query metacharacters
154 try:
154 try:
155 if revrangesep in spec:
155 if revrangesep in spec:
156 start, end = spec.split(revrangesep, 1)
156 start, end = spec.split(revrangesep, 1)
157 start = revfix(repo, start, 0)
157 start = revfix(repo, start, 0)
158 end = revfix(repo, end, len(repo) - 1)
158 end = revfix(repo, end, len(repo) - 1)
159 step = start > end and -1 or 1
159 step = start > end and -1 or 1
160 for rev in xrange(start, end + step, step):
160 for rev in xrange(start, end + step, step):
161 if rev in seen:
161 if rev in seen:
162 continue
162 continue
163 seen.add(rev)
163 seen.add(rev)
164 l.append(rev)
164 l.append(rev)
165 continue
165 continue
166 elif spec and spec in repo: # single unquoted rev
166 elif spec and spec in repo: # single unquoted rev
167 rev = revfix(repo, spec, None)
167 rev = revfix(repo, spec, None)
168 if rev in seen:
168 if rev in seen:
169 continue
169 continue
170 seen.add(rev)
170 seen.add(rev)
171 l.append(rev)
171 l.append(rev)
172 continue
172 continue
173 except error.RepoLookupError:
173 except error.RepoLookupError:
174 pass
174 pass
175
175
176 # fall through to new-style queries if old-style fails
176 # fall through to new-style queries if old-style fails
177 m = revset.match(spec)
177 m = revset.match(spec)
178 for r in m(repo, range(len(repo))):
178 for r in m(repo, range(len(repo))):
179 if r not in seen:
179 if r not in seen:
180 l.append(r)
180 l.append(r)
181 seen.update(l)
181 seen.update(l)
182
182
183 return l
183 return l
184
184
185 def make_filename(repo, pat, node,
185 def make_filename(repo, pat, node,
186 total=None, seqno=None, revwidth=None, pathname=None):
186 total=None, seqno=None, revwidth=None, pathname=None):
187 node_expander = {
187 node_expander = {
188 'H': lambda: hex(node),
188 'H': lambda: hex(node),
189 'R': lambda: str(repo.changelog.rev(node)),
189 'R': lambda: str(repo.changelog.rev(node)),
190 'h': lambda: short(node),
190 'h': lambda: short(node),
191 }
191 }
192 expander = {
192 expander = {
193 '%': lambda: '%',
193 '%': lambda: '%',
194 'b': lambda: os.path.basename(repo.root),
194 'b': lambda: os.path.basename(repo.root),
195 }
195 }
196
196
197 try:
197 try:
198 if node:
198 if node:
199 expander.update(node_expander)
199 expander.update(node_expander)
200 if node:
200 if node:
201 expander['r'] = (lambda:
201 expander['r'] = (lambda:
202 str(repo.changelog.rev(node)).zfill(revwidth or 0))
202 str(repo.changelog.rev(node)).zfill(revwidth or 0))
203 if total is not None:
203 if total is not None:
204 expander['N'] = lambda: str(total)
204 expander['N'] = lambda: str(total)
205 if seqno is not None:
205 if seqno is not None:
206 expander['n'] = lambda: str(seqno)
206 expander['n'] = lambda: str(seqno)
207 if total is not None and seqno is not None:
207 if total is not None and seqno is not None:
208 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
208 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
209 if pathname is not None:
209 if pathname is not None:
210 expander['s'] = lambda: os.path.basename(pathname)
210 expander['s'] = lambda: os.path.basename(pathname)
211 expander['d'] = lambda: os.path.dirname(pathname) or '.'
211 expander['d'] = lambda: os.path.dirname(pathname) or '.'
212 expander['p'] = lambda: pathname
212 expander['p'] = lambda: pathname
213
213
214 newname = []
214 newname = []
215 patlen = len(pat)
215 patlen = len(pat)
216 i = 0
216 i = 0
217 while i < patlen:
217 while i < patlen:
218 c = pat[i]
218 c = pat[i]
219 if c == '%':
219 if c == '%':
220 i += 1
220 i += 1
221 c = pat[i]
221 c = pat[i]
222 c = expander[c]()
222 c = expander[c]()
223 newname.append(c)
223 newname.append(c)
224 i += 1
224 i += 1
225 return ''.join(newname)
225 return ''.join(newname)
226 except KeyError, inst:
226 except KeyError, inst:
227 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
227 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
228 inst.args[0])
228 inst.args[0])
229
229
230 def make_file(repo, pat, node=None,
230 def make_file(repo, pat, node=None,
231 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
231 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
232
232
233 writable = 'w' in mode or 'a' in mode
233 writable = 'w' in mode or 'a' in mode
234
234
235 if not pat or pat == '-':
235 if not pat or pat == '-':
236 return writable and sys.stdout or sys.stdin
236 return writable and sys.stdout or sys.stdin
237 if hasattr(pat, 'write') and writable:
237 if hasattr(pat, 'write') and writable:
238 return pat
238 return pat
239 if hasattr(pat, 'read') and 'r' in mode:
239 if hasattr(pat, 'read') and 'r' in mode:
240 return pat
240 return pat
241 return open(make_filename(repo, pat, node, total, seqno, revwidth,
241 return open(make_filename(repo, pat, node, total, seqno, revwidth,
242 pathname),
242 pathname),
243 mode)
243 mode)
244
244
245 def expandpats(pats):
245 def expandpats(pats):
246 if not util.expandglobs:
246 if not util.expandglobs:
247 return list(pats)
247 return list(pats)
248 ret = []
248 ret = []
249 for p in pats:
249 for p in pats:
250 kind, name = _match._patsplit(p, None)
250 kind, name = matchmod._patsplit(p, None)
251 if kind is None:
251 if kind is None:
252 try:
252 try:
253 globbed = glob.glob(name)
253 globbed = glob.glob(name)
254 except re.error:
254 except re.error:
255 globbed = [name]
255 globbed = [name]
256 if globbed:
256 if globbed:
257 ret.extend(globbed)
257 ret.extend(globbed)
258 continue
258 continue
259 ret.append(p)
259 ret.append(p)
260 return ret
260 return ret
261
261
262 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
262 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
263 if not globbed and default == 'relpath':
263 if not globbed and default == 'relpath':
264 pats = expandpats(pats or [])
264 pats = expandpats(pats or [])
265 m = _match.match(repo.root, repo.getcwd(), pats,
265 m = matchmod.match(repo.root, repo.getcwd(), pats,
266 opts.get('include'), opts.get('exclude'), default)
266 opts.get('include'), opts.get('exclude'), default)
267 def badfn(f, msg):
267 def badfn(f, msg):
268 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
268 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
269 m.bad = badfn
269 m.bad = badfn
270 return m
270 return m
271
271
272 def matchall(repo):
272 def matchall(repo):
273 return _match.always(repo.root, repo.getcwd())
273 return matchmod.always(repo.root, repo.getcwd())
274
274
275 def matchfiles(repo, files):
275 def matchfiles(repo, files):
276 return _match.exact(repo.root, repo.getcwd(), files)
276 return matchmod.exact(repo.root, repo.getcwd(), files)
277
277
278 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
278 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
279 if dry_run is None:
279 if dry_run is None:
280 dry_run = opts.get('dry_run')
280 dry_run = opts.get('dry_run')
281 if similarity is None:
281 if similarity is None:
282 similarity = float(opts.get('similarity') or 0)
282 similarity = float(opts.get('similarity') or 0)
283 # we'd use status here, except handling of symlinks and ignore is tricky
283 # we'd use status here, except handling of symlinks and ignore is tricky
284 added, unknown, deleted, removed = [], [], [], []
284 added, unknown, deleted, removed = [], [], [], []
285 audit_path = util.path_auditor(repo.root)
285 audit_path = util.path_auditor(repo.root)
286 m = match(repo, pats, opts)
286 m = match(repo, pats, opts)
287 for abs in repo.walk(m):
287 for abs in repo.walk(m):
288 target = repo.wjoin(abs)
288 target = repo.wjoin(abs)
289 good = True
289 good = True
290 try:
290 try:
291 audit_path(abs)
291 audit_path(abs)
292 except:
292 except:
293 good = False
293 good = False
294 rel = m.rel(abs)
294 rel = m.rel(abs)
295 exact = m.exact(abs)
295 exact = m.exact(abs)
296 if good and abs not in repo.dirstate:
296 if good and abs not in repo.dirstate:
297 unknown.append(abs)
297 unknown.append(abs)
298 if repo.ui.verbose or not exact:
298 if repo.ui.verbose or not exact:
299 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
299 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
300 elif repo.dirstate[abs] != 'r' and (not good or not os.path.lexists(target)
300 elif repo.dirstate[abs] != 'r' and (not good or not os.path.lexists(target)
301 or (os.path.isdir(target) and not os.path.islink(target))):
301 or (os.path.isdir(target) and not os.path.islink(target))):
302 deleted.append(abs)
302 deleted.append(abs)
303 if repo.ui.verbose or not exact:
303 if repo.ui.verbose or not exact:
304 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
304 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
305 # for finding renames
305 # for finding renames
306 elif repo.dirstate[abs] == 'r':
306 elif repo.dirstate[abs] == 'r':
307 removed.append(abs)
307 removed.append(abs)
308 elif repo.dirstate[abs] == 'a':
308 elif repo.dirstate[abs] == 'a':
309 added.append(abs)
309 added.append(abs)
310 copies = {}
310 copies = {}
311 if similarity > 0:
311 if similarity > 0:
312 for old, new, score in similar.findrenames(repo,
312 for old, new, score in similar.findrenames(repo,
313 added + unknown, removed + deleted, similarity):
313 added + unknown, removed + deleted, similarity):
314 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
314 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
315 repo.ui.status(_('recording removal of %s as rename to %s '
315 repo.ui.status(_('recording removal of %s as rename to %s '
316 '(%d%% similar)\n') %
316 '(%d%% similar)\n') %
317 (m.rel(old), m.rel(new), score * 100))
317 (m.rel(old), m.rel(new), score * 100))
318 copies[new] = old
318 copies[new] = old
319
319
320 if not dry_run:
320 if not dry_run:
321 wctx = repo[None]
321 wctx = repo[None]
322 wlock = repo.wlock()
322 wlock = repo.wlock()
323 try:
323 try:
324 wctx.remove(deleted)
324 wctx.remove(deleted)
325 wctx.add(unknown)
325 wctx.add(unknown)
326 for new, old in copies.iteritems():
326 for new, old in copies.iteritems():
327 wctx.copy(old, new)
327 wctx.copy(old, new)
328 finally:
328 finally:
329 wlock.release()
329 wlock.release()
330
330
331 def copy(ui, repo, pats, opts, rename=False):
331 def copy(ui, repo, pats, opts, rename=False):
332 # called with the repo lock held
332 # called with the repo lock held
333 #
333 #
334 # hgsep => pathname that uses "/" to separate directories
334 # hgsep => pathname that uses "/" to separate directories
335 # ossep => pathname that uses os.sep to separate directories
335 # ossep => pathname that uses os.sep to separate directories
336 cwd = repo.getcwd()
336 cwd = repo.getcwd()
337 targets = {}
337 targets = {}
338 after = opts.get("after")
338 after = opts.get("after")
339 dryrun = opts.get("dry_run")
339 dryrun = opts.get("dry_run")
340 wctx = repo[None]
340 wctx = repo[None]
341
341
342 def walkpat(pat):
342 def walkpat(pat):
343 srcs = []
343 srcs = []
344 badstates = after and '?' or '?r'
344 badstates = after and '?' or '?r'
345 m = match(repo, [pat], opts, globbed=True)
345 m = match(repo, [pat], opts, globbed=True)
346 for abs in repo.walk(m):
346 for abs in repo.walk(m):
347 state = repo.dirstate[abs]
347 state = repo.dirstate[abs]
348 rel = m.rel(abs)
348 rel = m.rel(abs)
349 exact = m.exact(abs)
349 exact = m.exact(abs)
350 if state in badstates:
350 if state in badstates:
351 if exact and state == '?':
351 if exact and state == '?':
352 ui.warn(_('%s: not copying - file is not managed\n') % rel)
352 ui.warn(_('%s: not copying - file is not managed\n') % rel)
353 if exact and state == 'r':
353 if exact and state == 'r':
354 ui.warn(_('%s: not copying - file has been marked for'
354 ui.warn(_('%s: not copying - file has been marked for'
355 ' remove\n') % rel)
355 ' remove\n') % rel)
356 continue
356 continue
357 # abs: hgsep
357 # abs: hgsep
358 # rel: ossep
358 # rel: ossep
359 srcs.append((abs, rel, exact))
359 srcs.append((abs, rel, exact))
360 return srcs
360 return srcs
361
361
362 # abssrc: hgsep
362 # abssrc: hgsep
363 # relsrc: ossep
363 # relsrc: ossep
364 # otarget: ossep
364 # otarget: ossep
365 def copyfile(abssrc, relsrc, otarget, exact):
365 def copyfile(abssrc, relsrc, otarget, exact):
366 abstarget = util.canonpath(repo.root, cwd, otarget)
366 abstarget = util.canonpath(repo.root, cwd, otarget)
367 reltarget = repo.pathto(abstarget, cwd)
367 reltarget = repo.pathto(abstarget, cwd)
368 target = repo.wjoin(abstarget)
368 target = repo.wjoin(abstarget)
369 src = repo.wjoin(abssrc)
369 src = repo.wjoin(abssrc)
370 state = repo.dirstate[abstarget]
370 state = repo.dirstate[abstarget]
371
371
372 # check for collisions
372 # check for collisions
373 prevsrc = targets.get(abstarget)
373 prevsrc = targets.get(abstarget)
374 if prevsrc is not None:
374 if prevsrc is not None:
375 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
375 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
376 (reltarget, repo.pathto(abssrc, cwd),
376 (reltarget, repo.pathto(abssrc, cwd),
377 repo.pathto(prevsrc, cwd)))
377 repo.pathto(prevsrc, cwd)))
378 return
378 return
379
379
380 # check for overwrites
380 # check for overwrites
381 exists = os.path.exists(target)
381 exists = os.path.exists(target)
382 if not after and exists or after and state in 'mn':
382 if not after and exists or after and state in 'mn':
383 if not opts['force']:
383 if not opts['force']:
384 ui.warn(_('%s: not overwriting - file exists\n') %
384 ui.warn(_('%s: not overwriting - file exists\n') %
385 reltarget)
385 reltarget)
386 return
386 return
387
387
388 if after:
388 if after:
389 if not exists:
389 if not exists:
390 if rename:
390 if rename:
391 ui.warn(_('%s: not recording move - %s does not exist\n') %
391 ui.warn(_('%s: not recording move - %s does not exist\n') %
392 (relsrc, reltarget))
392 (relsrc, reltarget))
393 else:
393 else:
394 ui.warn(_('%s: not recording copy - %s does not exist\n') %
394 ui.warn(_('%s: not recording copy - %s does not exist\n') %
395 (relsrc, reltarget))
395 (relsrc, reltarget))
396 return
396 return
397 elif not dryrun:
397 elif not dryrun:
398 try:
398 try:
399 if exists:
399 if exists:
400 os.unlink(target)
400 os.unlink(target)
401 targetdir = os.path.dirname(target) or '.'
401 targetdir = os.path.dirname(target) or '.'
402 if not os.path.isdir(targetdir):
402 if not os.path.isdir(targetdir):
403 os.makedirs(targetdir)
403 os.makedirs(targetdir)
404 util.copyfile(src, target)
404 util.copyfile(src, target)
405 except IOError, inst:
405 except IOError, inst:
406 if inst.errno == errno.ENOENT:
406 if inst.errno == errno.ENOENT:
407 ui.warn(_('%s: deleted in working copy\n') % relsrc)
407 ui.warn(_('%s: deleted in working copy\n') % relsrc)
408 else:
408 else:
409 ui.warn(_('%s: cannot copy - %s\n') %
409 ui.warn(_('%s: cannot copy - %s\n') %
410 (relsrc, inst.strerror))
410 (relsrc, inst.strerror))
411 return True # report a failure
411 return True # report a failure
412
412
413 if ui.verbose or not exact:
413 if ui.verbose or not exact:
414 if rename:
414 if rename:
415 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
415 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
416 else:
416 else:
417 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
417 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
418
418
419 targets[abstarget] = abssrc
419 targets[abstarget] = abssrc
420
420
421 # fix up dirstate
421 # fix up dirstate
422 origsrc = repo.dirstate.copied(abssrc) or abssrc
422 origsrc = repo.dirstate.copied(abssrc) or abssrc
423 if abstarget == origsrc: # copying back a copy?
423 if abstarget == origsrc: # copying back a copy?
424 if state not in 'mn' and not dryrun:
424 if state not in 'mn' and not dryrun:
425 repo.dirstate.normallookup(abstarget)
425 repo.dirstate.normallookup(abstarget)
426 else:
426 else:
427 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
427 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
428 if not ui.quiet:
428 if not ui.quiet:
429 ui.warn(_("%s has not been committed yet, so no copy "
429 ui.warn(_("%s has not been committed yet, so no copy "
430 "data will be stored for %s.\n")
430 "data will be stored for %s.\n")
431 % (repo.pathto(origsrc, cwd), reltarget))
431 % (repo.pathto(origsrc, cwd), reltarget))
432 if repo.dirstate[abstarget] in '?r' and not dryrun:
432 if repo.dirstate[abstarget] in '?r' and not dryrun:
433 wctx.add([abstarget])
433 wctx.add([abstarget])
434 elif not dryrun:
434 elif not dryrun:
435 wctx.copy(origsrc, abstarget)
435 wctx.copy(origsrc, abstarget)
436
436
437 if rename and not dryrun:
437 if rename and not dryrun:
438 wctx.remove([abssrc], not after)
438 wctx.remove([abssrc], not after)
439
439
440 # pat: ossep
440 # pat: ossep
441 # dest ossep
441 # dest ossep
442 # srcs: list of (hgsep, hgsep, ossep, bool)
442 # srcs: list of (hgsep, hgsep, ossep, bool)
443 # return: function that takes hgsep and returns ossep
443 # return: function that takes hgsep and returns ossep
444 def targetpathfn(pat, dest, srcs):
444 def targetpathfn(pat, dest, srcs):
445 if os.path.isdir(pat):
445 if os.path.isdir(pat):
446 abspfx = util.canonpath(repo.root, cwd, pat)
446 abspfx = util.canonpath(repo.root, cwd, pat)
447 abspfx = util.localpath(abspfx)
447 abspfx = util.localpath(abspfx)
448 if destdirexists:
448 if destdirexists:
449 striplen = len(os.path.split(abspfx)[0])
449 striplen = len(os.path.split(abspfx)[0])
450 else:
450 else:
451 striplen = len(abspfx)
451 striplen = len(abspfx)
452 if striplen:
452 if striplen:
453 striplen += len(os.sep)
453 striplen += len(os.sep)
454 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
454 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
455 elif destdirexists:
455 elif destdirexists:
456 res = lambda p: os.path.join(dest,
456 res = lambda p: os.path.join(dest,
457 os.path.basename(util.localpath(p)))
457 os.path.basename(util.localpath(p)))
458 else:
458 else:
459 res = lambda p: dest
459 res = lambda p: dest
460 return res
460 return res
461
461
462 # pat: ossep
462 # pat: ossep
463 # dest ossep
463 # dest ossep
464 # srcs: list of (hgsep, hgsep, ossep, bool)
464 # srcs: list of (hgsep, hgsep, ossep, bool)
465 # return: function that takes hgsep and returns ossep
465 # return: function that takes hgsep and returns ossep
466 def targetpathafterfn(pat, dest, srcs):
466 def targetpathafterfn(pat, dest, srcs):
467 if _match.patkind(pat):
467 if matchmod.patkind(pat):
468 # a mercurial pattern
468 # a mercurial pattern
469 res = lambda p: os.path.join(dest,
469 res = lambda p: os.path.join(dest,
470 os.path.basename(util.localpath(p)))
470 os.path.basename(util.localpath(p)))
471 else:
471 else:
472 abspfx = util.canonpath(repo.root, cwd, pat)
472 abspfx = util.canonpath(repo.root, cwd, pat)
473 if len(abspfx) < len(srcs[0][0]):
473 if len(abspfx) < len(srcs[0][0]):
474 # A directory. Either the target path contains the last
474 # A directory. Either the target path contains the last
475 # component of the source path or it does not.
475 # component of the source path or it does not.
476 def evalpath(striplen):
476 def evalpath(striplen):
477 score = 0
477 score = 0
478 for s in srcs:
478 for s in srcs:
479 t = os.path.join(dest, util.localpath(s[0])[striplen:])
479 t = os.path.join(dest, util.localpath(s[0])[striplen:])
480 if os.path.exists(t):
480 if os.path.exists(t):
481 score += 1
481 score += 1
482 return score
482 return score
483
483
484 abspfx = util.localpath(abspfx)
484 abspfx = util.localpath(abspfx)
485 striplen = len(abspfx)
485 striplen = len(abspfx)
486 if striplen:
486 if striplen:
487 striplen += len(os.sep)
487 striplen += len(os.sep)
488 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
488 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
489 score = evalpath(striplen)
489 score = evalpath(striplen)
490 striplen1 = len(os.path.split(abspfx)[0])
490 striplen1 = len(os.path.split(abspfx)[0])
491 if striplen1:
491 if striplen1:
492 striplen1 += len(os.sep)
492 striplen1 += len(os.sep)
493 if evalpath(striplen1) > score:
493 if evalpath(striplen1) > score:
494 striplen = striplen1
494 striplen = striplen1
495 res = lambda p: os.path.join(dest,
495 res = lambda p: os.path.join(dest,
496 util.localpath(p)[striplen:])
496 util.localpath(p)[striplen:])
497 else:
497 else:
498 # a file
498 # a file
499 if destdirexists:
499 if destdirexists:
500 res = lambda p: os.path.join(dest,
500 res = lambda p: os.path.join(dest,
501 os.path.basename(util.localpath(p)))
501 os.path.basename(util.localpath(p)))
502 else:
502 else:
503 res = lambda p: dest
503 res = lambda p: dest
504 return res
504 return res
505
505
506
506
507 pats = expandpats(pats)
507 pats = expandpats(pats)
508 if not pats:
508 if not pats:
509 raise util.Abort(_('no source or destination specified'))
509 raise util.Abort(_('no source or destination specified'))
510 if len(pats) == 1:
510 if len(pats) == 1:
511 raise util.Abort(_('no destination specified'))
511 raise util.Abort(_('no destination specified'))
512 dest = pats.pop()
512 dest = pats.pop()
513 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
513 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
514 if not destdirexists:
514 if not destdirexists:
515 if len(pats) > 1 or _match.patkind(pats[0]):
515 if len(pats) > 1 or matchmod.patkind(pats[0]):
516 raise util.Abort(_('with multiple sources, destination must be an '
516 raise util.Abort(_('with multiple sources, destination must be an '
517 'existing directory'))
517 'existing directory'))
518 if util.endswithsep(dest):
518 if util.endswithsep(dest):
519 raise util.Abort(_('destination %s is not a directory') % dest)
519 raise util.Abort(_('destination %s is not a directory') % dest)
520
520
521 tfn = targetpathfn
521 tfn = targetpathfn
522 if after:
522 if after:
523 tfn = targetpathafterfn
523 tfn = targetpathafterfn
524 copylist = []
524 copylist = []
525 for pat in pats:
525 for pat in pats:
526 srcs = walkpat(pat)
526 srcs = walkpat(pat)
527 if not srcs:
527 if not srcs:
528 continue
528 continue
529 copylist.append((tfn(pat, dest, srcs), srcs))
529 copylist.append((tfn(pat, dest, srcs), srcs))
530 if not copylist:
530 if not copylist:
531 raise util.Abort(_('no files to copy'))
531 raise util.Abort(_('no files to copy'))
532
532
533 errors = 0
533 errors = 0
534 for targetpath, srcs in copylist:
534 for targetpath, srcs in copylist:
535 for abssrc, relsrc, exact in srcs:
535 for abssrc, relsrc, exact in srcs:
536 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
536 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
537 errors += 1
537 errors += 1
538
538
539 if errors:
539 if errors:
540 ui.warn(_('(consider using --after)\n'))
540 ui.warn(_('(consider using --after)\n'))
541
541
542 return errors != 0
542 return errors != 0
543
543
544 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
544 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
545 runargs=None, appendpid=False):
545 runargs=None, appendpid=False):
546 '''Run a command as a service.'''
546 '''Run a command as a service.'''
547
547
548 if opts['daemon'] and not opts['daemon_pipefds']:
548 if opts['daemon'] and not opts['daemon_pipefds']:
549 # Signal child process startup with file removal
549 # Signal child process startup with file removal
550 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
550 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
551 os.close(lockfd)
551 os.close(lockfd)
552 try:
552 try:
553 if not runargs:
553 if not runargs:
554 runargs = util.hgcmd() + sys.argv[1:]
554 runargs = util.hgcmd() + sys.argv[1:]
555 runargs.append('--daemon-pipefds=%s' % lockpath)
555 runargs.append('--daemon-pipefds=%s' % lockpath)
556 # Don't pass --cwd to the child process, because we've already
556 # Don't pass --cwd to the child process, because we've already
557 # changed directory.
557 # changed directory.
558 for i in xrange(1, len(runargs)):
558 for i in xrange(1, len(runargs)):
559 if runargs[i].startswith('--cwd='):
559 if runargs[i].startswith('--cwd='):
560 del runargs[i]
560 del runargs[i]
561 break
561 break
562 elif runargs[i].startswith('--cwd'):
562 elif runargs[i].startswith('--cwd'):
563 del runargs[i:i + 2]
563 del runargs[i:i + 2]
564 break
564 break
565 def condfn():
565 def condfn():
566 return not os.path.exists(lockpath)
566 return not os.path.exists(lockpath)
567 pid = util.rundetached(runargs, condfn)
567 pid = util.rundetached(runargs, condfn)
568 if pid < 0:
568 if pid < 0:
569 raise util.Abort(_('child process failed to start'))
569 raise util.Abort(_('child process failed to start'))
570 finally:
570 finally:
571 try:
571 try:
572 os.unlink(lockpath)
572 os.unlink(lockpath)
573 except OSError, e:
573 except OSError, e:
574 if e.errno != errno.ENOENT:
574 if e.errno != errno.ENOENT:
575 raise
575 raise
576 if parentfn:
576 if parentfn:
577 return parentfn(pid)
577 return parentfn(pid)
578 else:
578 else:
579 return
579 return
580
580
581 if initfn:
581 if initfn:
582 initfn()
582 initfn()
583
583
584 if opts['pid_file']:
584 if opts['pid_file']:
585 mode = appendpid and 'a' or 'w'
585 mode = appendpid and 'a' or 'w'
586 fp = open(opts['pid_file'], mode)
586 fp = open(opts['pid_file'], mode)
587 fp.write(str(os.getpid()) + '\n')
587 fp.write(str(os.getpid()) + '\n')
588 fp.close()
588 fp.close()
589
589
590 if opts['daemon_pipefds']:
590 if opts['daemon_pipefds']:
591 lockpath = opts['daemon_pipefds']
591 lockpath = opts['daemon_pipefds']
592 try:
592 try:
593 os.setsid()
593 os.setsid()
594 except AttributeError:
594 except AttributeError:
595 pass
595 pass
596 os.unlink(lockpath)
596 os.unlink(lockpath)
597 util.hidewindow()
597 util.hidewindow()
598 sys.stdout.flush()
598 sys.stdout.flush()
599 sys.stderr.flush()
599 sys.stderr.flush()
600
600
601 nullfd = os.open(util.nulldev, os.O_RDWR)
601 nullfd = os.open(util.nulldev, os.O_RDWR)
602 logfilefd = nullfd
602 logfilefd = nullfd
603 if logfile:
603 if logfile:
604 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
604 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
605 os.dup2(nullfd, 0)
605 os.dup2(nullfd, 0)
606 os.dup2(logfilefd, 1)
606 os.dup2(logfilefd, 1)
607 os.dup2(logfilefd, 2)
607 os.dup2(logfilefd, 2)
608 if nullfd not in (0, 1, 2):
608 if nullfd not in (0, 1, 2):
609 os.close(nullfd)
609 os.close(nullfd)
610 if logfile and logfilefd not in (0, 1, 2):
610 if logfile and logfilefd not in (0, 1, 2):
611 os.close(logfilefd)
611 os.close(logfilefd)
612
612
613 if runfn:
613 if runfn:
614 return runfn()
614 return runfn()
615
615
616 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
616 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
617 opts=None):
617 opts=None):
618 '''export changesets as hg patches.'''
618 '''export changesets as hg patches.'''
619
619
620 total = len(revs)
620 total = len(revs)
621 revwidth = max([len(str(rev)) for rev in revs])
621 revwidth = max([len(str(rev)) for rev in revs])
622
622
623 def single(rev, seqno, fp):
623 def single(rev, seqno, fp):
624 ctx = repo[rev]
624 ctx = repo[rev]
625 node = ctx.node()
625 node = ctx.node()
626 parents = [p.node() for p in ctx.parents() if p]
626 parents = [p.node() for p in ctx.parents() if p]
627 branch = ctx.branch()
627 branch = ctx.branch()
628 if switch_parent:
628 if switch_parent:
629 parents.reverse()
629 parents.reverse()
630 prev = (parents and parents[0]) or nullid
630 prev = (parents and parents[0]) or nullid
631
631
632 if not fp:
632 if not fp:
633 fp = make_file(repo, template, node, total=total, seqno=seqno,
633 fp = make_file(repo, template, node, total=total, seqno=seqno,
634 revwidth=revwidth, mode='ab')
634 revwidth=revwidth, mode='ab')
635 if fp != sys.stdout and hasattr(fp, 'name'):
635 if fp != sys.stdout and hasattr(fp, 'name'):
636 repo.ui.note("%s\n" % fp.name)
636 repo.ui.note("%s\n" % fp.name)
637
637
638 fp.write("# HG changeset patch\n")
638 fp.write("# HG changeset patch\n")
639 fp.write("# User %s\n" % ctx.user())
639 fp.write("# User %s\n" % ctx.user())
640 fp.write("# Date %d %d\n" % ctx.date())
640 fp.write("# Date %d %d\n" % ctx.date())
641 if branch and branch != 'default':
641 if branch and branch != 'default':
642 fp.write("# Branch %s\n" % branch)
642 fp.write("# Branch %s\n" % branch)
643 fp.write("# Node ID %s\n" % hex(node))
643 fp.write("# Node ID %s\n" % hex(node))
644 fp.write("# Parent %s\n" % hex(prev))
644 fp.write("# Parent %s\n" % hex(prev))
645 if len(parents) > 1:
645 if len(parents) > 1:
646 fp.write("# Parent %s\n" % hex(parents[1]))
646 fp.write("# Parent %s\n" % hex(parents[1]))
647 fp.write(ctx.description().rstrip())
647 fp.write(ctx.description().rstrip())
648 fp.write("\n\n")
648 fp.write("\n\n")
649
649
650 for chunk in patch.diff(repo, prev, node, opts=opts):
650 for chunk in patch.diff(repo, prev, node, opts=opts):
651 fp.write(chunk)
651 fp.write(chunk)
652
652
653 for seqno, rev in enumerate(revs):
653 for seqno, rev in enumerate(revs):
654 single(rev, seqno + 1, fp)
654 single(rev, seqno + 1, fp)
655
655
656 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
656 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
657 changes=None, stat=False, fp=None):
657 changes=None, stat=False, fp=None):
658 '''show diff or diffstat.'''
658 '''show diff or diffstat.'''
659 if fp is None:
659 if fp is None:
660 write = ui.write
660 write = ui.write
661 else:
661 else:
662 def write(s, **kw):
662 def write(s, **kw):
663 fp.write(s)
663 fp.write(s)
664
664
665 if stat:
665 if stat:
666 diffopts = diffopts.copy(context=0)
666 diffopts = diffopts.copy(context=0)
667 width = 80
667 width = 80
668 if not ui.plain():
668 if not ui.plain():
669 width = util.termwidth()
669 width = util.termwidth()
670 chunks = patch.diff(repo, node1, node2, match, changes, diffopts)
670 chunks = patch.diff(repo, node1, node2, match, changes, diffopts)
671 for chunk, label in patch.diffstatui(util.iterlines(chunks),
671 for chunk, label in patch.diffstatui(util.iterlines(chunks),
672 width=width,
672 width=width,
673 git=diffopts.git):
673 git=diffopts.git):
674 write(chunk, label=label)
674 write(chunk, label=label)
675 else:
675 else:
676 for chunk, label in patch.diffui(repo, node1, node2, match,
676 for chunk, label in patch.diffui(repo, node1, node2, match,
677 changes, diffopts):
677 changes, diffopts):
678 write(chunk, label=label)
678 write(chunk, label=label)
679
679
680 class changeset_printer(object):
680 class changeset_printer(object):
681 '''show changeset information when templating not requested.'''
681 '''show changeset information when templating not requested.'''
682
682
683 def __init__(self, ui, repo, patch, diffopts, buffered):
683 def __init__(self, ui, repo, patch, diffopts, buffered):
684 self.ui = ui
684 self.ui = ui
685 self.repo = repo
685 self.repo = repo
686 self.buffered = buffered
686 self.buffered = buffered
687 self.patch = patch
687 self.patch = patch
688 self.diffopts = diffopts
688 self.diffopts = diffopts
689 self.header = {}
689 self.header = {}
690 self.hunk = {}
690 self.hunk = {}
691 self.lastheader = None
691 self.lastheader = None
692 self.footer = None
692 self.footer = None
693
693
694 def flush(self, rev):
694 def flush(self, rev):
695 if rev in self.header:
695 if rev in self.header:
696 h = self.header[rev]
696 h = self.header[rev]
697 if h != self.lastheader:
697 if h != self.lastheader:
698 self.lastheader = h
698 self.lastheader = h
699 self.ui.write(h)
699 self.ui.write(h)
700 del self.header[rev]
700 del self.header[rev]
701 if rev in self.hunk:
701 if rev in self.hunk:
702 self.ui.write(self.hunk[rev])
702 self.ui.write(self.hunk[rev])
703 del self.hunk[rev]
703 del self.hunk[rev]
704 return 1
704 return 1
705 return 0
705 return 0
706
706
707 def close(self):
707 def close(self):
708 if self.footer:
708 if self.footer:
709 self.ui.write(self.footer)
709 self.ui.write(self.footer)
710
710
711 def show(self, ctx, copies=None, matchfn=None, **props):
711 def show(self, ctx, copies=None, matchfn=None, **props):
712 if self.buffered:
712 if self.buffered:
713 self.ui.pushbuffer()
713 self.ui.pushbuffer()
714 self._show(ctx, copies, matchfn, props)
714 self._show(ctx, copies, matchfn, props)
715 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
715 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
716 else:
716 else:
717 self._show(ctx, copies, matchfn, props)
717 self._show(ctx, copies, matchfn, props)
718
718
719 def _show(self, ctx, copies, matchfn, props):
719 def _show(self, ctx, copies, matchfn, props):
720 '''show a single changeset or file revision'''
720 '''show a single changeset or file revision'''
721 changenode = ctx.node()
721 changenode = ctx.node()
722 rev = ctx.rev()
722 rev = ctx.rev()
723
723
724 if self.ui.quiet:
724 if self.ui.quiet:
725 self.ui.write("%d:%s\n" % (rev, short(changenode)),
725 self.ui.write("%d:%s\n" % (rev, short(changenode)),
726 label='log.node')
726 label='log.node')
727 return
727 return
728
728
729 log = self.repo.changelog
729 log = self.repo.changelog
730 date = util.datestr(ctx.date())
730 date = util.datestr(ctx.date())
731
731
732 hexfunc = self.ui.debugflag and hex or short
732 hexfunc = self.ui.debugflag and hex or short
733
733
734 parents = [(p, hexfunc(log.node(p)))
734 parents = [(p, hexfunc(log.node(p)))
735 for p in self._meaningful_parentrevs(log, rev)]
735 for p in self._meaningful_parentrevs(log, rev)]
736
736
737 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
737 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
738 label='log.changeset')
738 label='log.changeset')
739
739
740 branch = ctx.branch()
740 branch = ctx.branch()
741 # don't show the default branch name
741 # don't show the default branch name
742 if branch != 'default':
742 if branch != 'default':
743 branch = encoding.tolocal(branch)
743 branch = encoding.tolocal(branch)
744 self.ui.write(_("branch: %s\n") % branch,
744 self.ui.write(_("branch: %s\n") % branch,
745 label='log.branch')
745 label='log.branch')
746 for tag in self.repo.nodetags(changenode):
746 for tag in self.repo.nodetags(changenode):
747 self.ui.write(_("tag: %s\n") % tag,
747 self.ui.write(_("tag: %s\n") % tag,
748 label='log.tag')
748 label='log.tag')
749 for parent in parents:
749 for parent in parents:
750 self.ui.write(_("parent: %d:%s\n") % parent,
750 self.ui.write(_("parent: %d:%s\n") % parent,
751 label='log.parent')
751 label='log.parent')
752
752
753 if self.ui.debugflag:
753 if self.ui.debugflag:
754 mnode = ctx.manifestnode()
754 mnode = ctx.manifestnode()
755 self.ui.write(_("manifest: %d:%s\n") %
755 self.ui.write(_("manifest: %d:%s\n") %
756 (self.repo.manifest.rev(mnode), hex(mnode)),
756 (self.repo.manifest.rev(mnode), hex(mnode)),
757 label='ui.debug log.manifest')
757 label='ui.debug log.manifest')
758 self.ui.write(_("user: %s\n") % ctx.user(),
758 self.ui.write(_("user: %s\n") % ctx.user(),
759 label='log.user')
759 label='log.user')
760 self.ui.write(_("date: %s\n") % date,
760 self.ui.write(_("date: %s\n") % date,
761 label='log.date')
761 label='log.date')
762
762
763 if self.ui.debugflag:
763 if self.ui.debugflag:
764 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
764 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
765 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
765 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
766 files):
766 files):
767 if value:
767 if value:
768 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
768 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
769 label='ui.debug log.files')
769 label='ui.debug log.files')
770 elif ctx.files() and self.ui.verbose:
770 elif ctx.files() and self.ui.verbose:
771 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
771 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
772 label='ui.note log.files')
772 label='ui.note log.files')
773 if copies and self.ui.verbose:
773 if copies and self.ui.verbose:
774 copies = ['%s (%s)' % c for c in copies]
774 copies = ['%s (%s)' % c for c in copies]
775 self.ui.write(_("copies: %s\n") % ' '.join(copies),
775 self.ui.write(_("copies: %s\n") % ' '.join(copies),
776 label='ui.note log.copies')
776 label='ui.note log.copies')
777
777
778 extra = ctx.extra()
778 extra = ctx.extra()
779 if extra and self.ui.debugflag:
779 if extra and self.ui.debugflag:
780 for key, value in sorted(extra.items()):
780 for key, value in sorted(extra.items()):
781 self.ui.write(_("extra: %s=%s\n")
781 self.ui.write(_("extra: %s=%s\n")
782 % (key, value.encode('string_escape')),
782 % (key, value.encode('string_escape')),
783 label='ui.debug log.extra')
783 label='ui.debug log.extra')
784
784
785 description = ctx.description().strip()
785 description = ctx.description().strip()
786 if description:
786 if description:
787 if self.ui.verbose:
787 if self.ui.verbose:
788 self.ui.write(_("description:\n"),
788 self.ui.write(_("description:\n"),
789 label='ui.note log.description')
789 label='ui.note log.description')
790 self.ui.write(description,
790 self.ui.write(description,
791 label='ui.note log.description')
791 label='ui.note log.description')
792 self.ui.write("\n\n")
792 self.ui.write("\n\n")
793 else:
793 else:
794 self.ui.write(_("summary: %s\n") %
794 self.ui.write(_("summary: %s\n") %
795 description.splitlines()[0],
795 description.splitlines()[0],
796 label='log.summary')
796 label='log.summary')
797 self.ui.write("\n")
797 self.ui.write("\n")
798
798
799 self.showpatch(changenode, matchfn)
799 self.showpatch(changenode, matchfn)
800
800
801 def showpatch(self, node, matchfn):
801 def showpatch(self, node, matchfn):
802 if not matchfn:
802 if not matchfn:
803 matchfn = self.patch
803 matchfn = self.patch
804 if matchfn:
804 if matchfn:
805 stat = self.diffopts.get('stat')
805 stat = self.diffopts.get('stat')
806 diff = self.diffopts.get('patch')
806 diff = self.diffopts.get('patch')
807 diffopts = patch.diffopts(self.ui, self.diffopts)
807 diffopts = patch.diffopts(self.ui, self.diffopts)
808 prev = self.repo.changelog.parents(node)[0]
808 prev = self.repo.changelog.parents(node)[0]
809 if stat:
809 if stat:
810 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
810 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
811 match=matchfn, stat=True)
811 match=matchfn, stat=True)
812 if diff:
812 if diff:
813 if stat:
813 if stat:
814 self.ui.write("\n")
814 self.ui.write("\n")
815 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
815 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
816 match=matchfn, stat=False)
816 match=matchfn, stat=False)
817 self.ui.write("\n")
817 self.ui.write("\n")
818
818
819 def _meaningful_parentrevs(self, log, rev):
819 def _meaningful_parentrevs(self, log, rev):
820 """Return list of meaningful (or all if debug) parentrevs for rev.
820 """Return list of meaningful (or all if debug) parentrevs for rev.
821
821
822 For merges (two non-nullrev revisions) both parents are meaningful.
822 For merges (two non-nullrev revisions) both parents are meaningful.
823 Otherwise the first parent revision is considered meaningful if it
823 Otherwise the first parent revision is considered meaningful if it
824 is not the preceding revision.
824 is not the preceding revision.
825 """
825 """
826 parents = log.parentrevs(rev)
826 parents = log.parentrevs(rev)
827 if not self.ui.debugflag and parents[1] == nullrev:
827 if not self.ui.debugflag and parents[1] == nullrev:
828 if parents[0] >= rev - 1:
828 if parents[0] >= rev - 1:
829 parents = []
829 parents = []
830 else:
830 else:
831 parents = [parents[0]]
831 parents = [parents[0]]
832 return parents
832 return parents
833
833
834
834
835 class changeset_templater(changeset_printer):
835 class changeset_templater(changeset_printer):
836 '''format changeset information.'''
836 '''format changeset information.'''
837
837
838 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
838 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
839 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
839 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
840 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
840 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
841 defaulttempl = {
841 defaulttempl = {
842 'parent': '{rev}:{node|formatnode} ',
842 'parent': '{rev}:{node|formatnode} ',
843 'manifest': '{rev}:{node|formatnode}',
843 'manifest': '{rev}:{node|formatnode}',
844 'file_copy': '{name} ({source})',
844 'file_copy': '{name} ({source})',
845 'extra': '{key}={value|stringescape}'
845 'extra': '{key}={value|stringescape}'
846 }
846 }
847 # filecopy is preserved for compatibility reasons
847 # filecopy is preserved for compatibility reasons
848 defaulttempl['filecopy'] = defaulttempl['file_copy']
848 defaulttempl['filecopy'] = defaulttempl['file_copy']
849 self.t = templater.templater(mapfile, {'formatnode': formatnode},
849 self.t = templater.templater(mapfile, {'formatnode': formatnode},
850 cache=defaulttempl)
850 cache=defaulttempl)
851 self.cache = {}
851 self.cache = {}
852
852
853 def use_template(self, t):
853 def use_template(self, t):
854 '''set template string to use'''
854 '''set template string to use'''
855 self.t.cache['changeset'] = t
855 self.t.cache['changeset'] = t
856
856
857 def _meaningful_parentrevs(self, ctx):
857 def _meaningful_parentrevs(self, ctx):
858 """Return list of meaningful (or all if debug) parentrevs for rev.
858 """Return list of meaningful (or all if debug) parentrevs for rev.
859 """
859 """
860 parents = ctx.parents()
860 parents = ctx.parents()
861 if len(parents) > 1:
861 if len(parents) > 1:
862 return parents
862 return parents
863 if self.ui.debugflag:
863 if self.ui.debugflag:
864 return [parents[0], self.repo['null']]
864 return [parents[0], self.repo['null']]
865 if parents[0].rev() >= ctx.rev() - 1:
865 if parents[0].rev() >= ctx.rev() - 1:
866 return []
866 return []
867 return parents
867 return parents
868
868
869 def _show(self, ctx, copies, matchfn, props):
869 def _show(self, ctx, copies, matchfn, props):
870 '''show a single changeset or file revision'''
870 '''show a single changeset or file revision'''
871
871
872 showlist = templatekw.showlist
872 showlist = templatekw.showlist
873
873
874 # showparents() behaviour depends on ui trace level which
874 # showparents() behaviour depends on ui trace level which
875 # causes unexpected behaviours at templating level and makes
875 # causes unexpected behaviours at templating level and makes
876 # it harder to extract it in a standalone function. Its
876 # it harder to extract it in a standalone function. Its
877 # behaviour cannot be changed so leave it here for now.
877 # behaviour cannot be changed so leave it here for now.
878 def showparents(**args):
878 def showparents(**args):
879 ctx = args['ctx']
879 ctx = args['ctx']
880 parents = [[('rev', p.rev()), ('node', p.hex())]
880 parents = [[('rev', p.rev()), ('node', p.hex())]
881 for p in self._meaningful_parentrevs(ctx)]
881 for p in self._meaningful_parentrevs(ctx)]
882 return showlist('parent', parents, **args)
882 return showlist('parent', parents, **args)
883
883
884 props = props.copy()
884 props = props.copy()
885 props.update(templatekw.keywords)
885 props.update(templatekw.keywords)
886 props['parents'] = showparents
886 props['parents'] = showparents
887 props['templ'] = self.t
887 props['templ'] = self.t
888 props['ctx'] = ctx
888 props['ctx'] = ctx
889 props['repo'] = self.repo
889 props['repo'] = self.repo
890 props['revcache'] = {'copies': copies}
890 props['revcache'] = {'copies': copies}
891 props['cache'] = self.cache
891 props['cache'] = self.cache
892
892
893 # find correct templates for current mode
893 # find correct templates for current mode
894
894
895 tmplmodes = [
895 tmplmodes = [
896 (True, None),
896 (True, None),
897 (self.ui.verbose, 'verbose'),
897 (self.ui.verbose, 'verbose'),
898 (self.ui.quiet, 'quiet'),
898 (self.ui.quiet, 'quiet'),
899 (self.ui.debugflag, 'debug'),
899 (self.ui.debugflag, 'debug'),
900 ]
900 ]
901
901
902 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
902 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
903 for mode, postfix in tmplmodes:
903 for mode, postfix in tmplmodes:
904 for type in types:
904 for type in types:
905 cur = postfix and ('%s_%s' % (type, postfix)) or type
905 cur = postfix and ('%s_%s' % (type, postfix)) or type
906 if mode and cur in self.t:
906 if mode and cur in self.t:
907 types[type] = cur
907 types[type] = cur
908
908
909 try:
909 try:
910
910
911 # write header
911 # write header
912 if types['header']:
912 if types['header']:
913 h = templater.stringify(self.t(types['header'], **props))
913 h = templater.stringify(self.t(types['header'], **props))
914 if self.buffered:
914 if self.buffered:
915 self.header[ctx.rev()] = h
915 self.header[ctx.rev()] = h
916 else:
916 else:
917 if self.lastheader != h:
917 if self.lastheader != h:
918 self.lastheader = h
918 self.lastheader = h
919 self.ui.write(h)
919 self.ui.write(h)
920
920
921 # write changeset metadata, then patch if requested
921 # write changeset metadata, then patch if requested
922 key = types['changeset']
922 key = types['changeset']
923 self.ui.write(templater.stringify(self.t(key, **props)))
923 self.ui.write(templater.stringify(self.t(key, **props)))
924 self.showpatch(ctx.node(), matchfn)
924 self.showpatch(ctx.node(), matchfn)
925
925
926 if types['footer']:
926 if types['footer']:
927 if not self.footer:
927 if not self.footer:
928 self.footer = templater.stringify(self.t(types['footer'],
928 self.footer = templater.stringify(self.t(types['footer'],
929 **props))
929 **props))
930
930
931 except KeyError, inst:
931 except KeyError, inst:
932 msg = _("%s: no key named '%s'")
932 msg = _("%s: no key named '%s'")
933 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
933 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
934 except SyntaxError, inst:
934 except SyntaxError, inst:
935 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
935 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
936
936
937 def show_changeset(ui, repo, opts, buffered=False):
937 def show_changeset(ui, repo, opts, buffered=False):
938 """show one changeset using template or regular display.
938 """show one changeset using template or regular display.
939
939
940 Display format will be the first non-empty hit of:
940 Display format will be the first non-empty hit of:
941 1. option 'template'
941 1. option 'template'
942 2. option 'style'
942 2. option 'style'
943 3. [ui] setting 'logtemplate'
943 3. [ui] setting 'logtemplate'
944 4. [ui] setting 'style'
944 4. [ui] setting 'style'
945 If all of these values are either the unset or the empty string,
945 If all of these values are either the unset or the empty string,
946 regular display via changeset_printer() is done.
946 regular display via changeset_printer() is done.
947 """
947 """
948 # options
948 # options
949 patch = False
949 patch = False
950 if opts.get('patch') or opts.get('stat'):
950 if opts.get('patch') or opts.get('stat'):
951 patch = matchall(repo)
951 patch = matchall(repo)
952
952
953 tmpl = opts.get('template')
953 tmpl = opts.get('template')
954 style = None
954 style = None
955 if tmpl:
955 if tmpl:
956 tmpl = templater.parsestring(tmpl, quoted=False)
956 tmpl = templater.parsestring(tmpl, quoted=False)
957 else:
957 else:
958 style = opts.get('style')
958 style = opts.get('style')
959
959
960 # ui settings
960 # ui settings
961 if not (tmpl or style):
961 if not (tmpl or style):
962 tmpl = ui.config('ui', 'logtemplate')
962 tmpl = ui.config('ui', 'logtemplate')
963 if tmpl:
963 if tmpl:
964 tmpl = templater.parsestring(tmpl)
964 tmpl = templater.parsestring(tmpl)
965 else:
965 else:
966 style = util.expandpath(ui.config('ui', 'style', ''))
966 style = util.expandpath(ui.config('ui', 'style', ''))
967
967
968 if not (tmpl or style):
968 if not (tmpl or style):
969 return changeset_printer(ui, repo, patch, opts, buffered)
969 return changeset_printer(ui, repo, patch, opts, buffered)
970
970
971 mapfile = None
971 mapfile = None
972 if style and not tmpl:
972 if style and not tmpl:
973 mapfile = style
973 mapfile = style
974 if not os.path.split(mapfile)[0]:
974 if not os.path.split(mapfile)[0]:
975 mapname = (templater.templatepath('map-cmdline.' + mapfile)
975 mapname = (templater.templatepath('map-cmdline.' + mapfile)
976 or templater.templatepath(mapfile))
976 or templater.templatepath(mapfile))
977 if mapname:
977 if mapname:
978 mapfile = mapname
978 mapfile = mapname
979
979
980 try:
980 try:
981 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
981 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
982 except SyntaxError, inst:
982 except SyntaxError, inst:
983 raise util.Abort(inst.args[0])
983 raise util.Abort(inst.args[0])
984 if tmpl:
984 if tmpl:
985 t.use_template(tmpl)
985 t.use_template(tmpl)
986 return t
986 return t
987
987
988 def finddate(ui, repo, date):
988 def finddate(ui, repo, date):
989 """Find the tipmost changeset that matches the given date spec"""
989 """Find the tipmost changeset that matches the given date spec"""
990
990
991 df = util.matchdate(date)
991 df = util.matchdate(date)
992 m = matchall(repo)
992 m = matchall(repo)
993 results = {}
993 results = {}
994
994
995 def prep(ctx, fns):
995 def prep(ctx, fns):
996 d = ctx.date()
996 d = ctx.date()
997 if df(d[0]):
997 if df(d[0]):
998 results[ctx.rev()] = d
998 results[ctx.rev()] = d
999
999
1000 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1000 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1001 rev = ctx.rev()
1001 rev = ctx.rev()
1002 if rev in results:
1002 if rev in results:
1003 ui.status(_("Found revision %s from %s\n") %
1003 ui.status(_("Found revision %s from %s\n") %
1004 (rev, util.datestr(results[rev])))
1004 (rev, util.datestr(results[rev])))
1005 return str(rev)
1005 return str(rev)
1006
1006
1007 raise util.Abort(_("revision matching date not found"))
1007 raise util.Abort(_("revision matching date not found"))
1008
1008
1009 def walkchangerevs(repo, match, opts, prepare):
1009 def walkchangerevs(repo, match, opts, prepare):
1010 '''Iterate over files and the revs in which they changed.
1010 '''Iterate over files and the revs in which they changed.
1011
1011
1012 Callers most commonly need to iterate backwards over the history
1012 Callers most commonly need to iterate backwards over the history
1013 in which they are interested. Doing so has awful (quadratic-looking)
1013 in which they are interested. Doing so has awful (quadratic-looking)
1014 performance, so we use iterators in a "windowed" way.
1014 performance, so we use iterators in a "windowed" way.
1015
1015
1016 We walk a window of revisions in the desired order. Within the
1016 We walk a window of revisions in the desired order. Within the
1017 window, we first walk forwards to gather data, then in the desired
1017 window, we first walk forwards to gather data, then in the desired
1018 order (usually backwards) to display it.
1018 order (usually backwards) to display it.
1019
1019
1020 This function returns an iterator yielding contexts. Before
1020 This function returns an iterator yielding contexts. Before
1021 yielding each context, the iterator will first call the prepare
1021 yielding each context, the iterator will first call the prepare
1022 function on each context in the window in forward order.'''
1022 function on each context in the window in forward order.'''
1023
1023
1024 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1024 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1025 if start < end:
1025 if start < end:
1026 while start < end:
1026 while start < end:
1027 yield start, min(windowsize, end - start)
1027 yield start, min(windowsize, end - start)
1028 start += windowsize
1028 start += windowsize
1029 if windowsize < sizelimit:
1029 if windowsize < sizelimit:
1030 windowsize *= 2
1030 windowsize *= 2
1031 else:
1031 else:
1032 while start > end:
1032 while start > end:
1033 yield start, min(windowsize, start - end - 1)
1033 yield start, min(windowsize, start - end - 1)
1034 start -= windowsize
1034 start -= windowsize
1035 if windowsize < sizelimit:
1035 if windowsize < sizelimit:
1036 windowsize *= 2
1036 windowsize *= 2
1037
1037
1038 follow = opts.get('follow') or opts.get('follow_first')
1038 follow = opts.get('follow') or opts.get('follow_first')
1039
1039
1040 if not len(repo):
1040 if not len(repo):
1041 return []
1041 return []
1042
1042
1043 if follow:
1043 if follow:
1044 defrange = '%s:0' % repo['.'].rev()
1044 defrange = '%s:0' % repo['.'].rev()
1045 else:
1045 else:
1046 defrange = '-1:0'
1046 defrange = '-1:0'
1047 revs = revrange(repo, opts['rev'] or [defrange])
1047 revs = revrange(repo, opts['rev'] or [defrange])
1048 if not revs:
1048 if not revs:
1049 return []
1049 return []
1050 wanted = set()
1050 wanted = set()
1051 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1051 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1052 fncache = {}
1052 fncache = {}
1053 change = util.cachefunc(repo.changectx)
1053 change = util.cachefunc(repo.changectx)
1054
1054
1055 # First step is to fill wanted, the set of revisions that we want to yield.
1055 # First step is to fill wanted, the set of revisions that we want to yield.
1056 # When it does not induce extra cost, we also fill fncache for revisions in
1056 # When it does not induce extra cost, we also fill fncache for revisions in
1057 # wanted: a cache of filenames that were changed (ctx.files()) and that
1057 # wanted: a cache of filenames that were changed (ctx.files()) and that
1058 # match the file filtering conditions.
1058 # match the file filtering conditions.
1059
1059
1060 if not slowpath and not match.files():
1060 if not slowpath and not match.files():
1061 # No files, no patterns. Display all revs.
1061 # No files, no patterns. Display all revs.
1062 wanted = set(revs)
1062 wanted = set(revs)
1063 copies = []
1063 copies = []
1064
1064
1065 if not slowpath:
1065 if not slowpath:
1066 # We only have to read through the filelog to find wanted revisions
1066 # We only have to read through the filelog to find wanted revisions
1067
1067
1068 minrev, maxrev = min(revs), max(revs)
1068 minrev, maxrev = min(revs), max(revs)
1069 def filerevgen(filelog, last):
1069 def filerevgen(filelog, last):
1070 """
1070 """
1071 Only files, no patterns. Check the history of each file.
1071 Only files, no patterns. Check the history of each file.
1072
1072
1073 Examines filelog entries within minrev, maxrev linkrev range
1073 Examines filelog entries within minrev, maxrev linkrev range
1074 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1074 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1075 tuples in backwards order
1075 tuples in backwards order
1076 """
1076 """
1077 cl_count = len(repo)
1077 cl_count = len(repo)
1078 revs = []
1078 revs = []
1079 for j in xrange(0, last + 1):
1079 for j in xrange(0, last + 1):
1080 linkrev = filelog.linkrev(j)
1080 linkrev = filelog.linkrev(j)
1081 if linkrev < minrev:
1081 if linkrev < minrev:
1082 continue
1082 continue
1083 # only yield rev for which we have the changelog, it can
1083 # only yield rev for which we have the changelog, it can
1084 # happen while doing "hg log" during a pull or commit
1084 # happen while doing "hg log" during a pull or commit
1085 if linkrev > maxrev or linkrev >= cl_count:
1085 if linkrev > maxrev or linkrev >= cl_count:
1086 break
1086 break
1087
1087
1088 parentlinkrevs = []
1088 parentlinkrevs = []
1089 for p in filelog.parentrevs(j):
1089 for p in filelog.parentrevs(j):
1090 if p != nullrev:
1090 if p != nullrev:
1091 parentlinkrevs.append(filelog.linkrev(p))
1091 parentlinkrevs.append(filelog.linkrev(p))
1092 n = filelog.node(j)
1092 n = filelog.node(j)
1093 revs.append((linkrev, parentlinkrevs,
1093 revs.append((linkrev, parentlinkrevs,
1094 follow and filelog.renamed(n)))
1094 follow and filelog.renamed(n)))
1095
1095
1096 return reversed(revs)
1096 return reversed(revs)
1097 def iterfiles():
1097 def iterfiles():
1098 for filename in match.files():
1098 for filename in match.files():
1099 yield filename, None
1099 yield filename, None
1100 for filename_node in copies:
1100 for filename_node in copies:
1101 yield filename_node
1101 yield filename_node
1102 for file_, node in iterfiles():
1102 for file_, node in iterfiles():
1103 filelog = repo.file(file_)
1103 filelog = repo.file(file_)
1104 if not len(filelog):
1104 if not len(filelog):
1105 if node is None:
1105 if node is None:
1106 # A zero count may be a directory or deleted file, so
1106 # A zero count may be a directory or deleted file, so
1107 # try to find matching entries on the slow path.
1107 # try to find matching entries on the slow path.
1108 if follow:
1108 if follow:
1109 raise util.Abort(
1109 raise util.Abort(
1110 _('cannot follow nonexistent file: "%s"') % file_)
1110 _('cannot follow nonexistent file: "%s"') % file_)
1111 slowpath = True
1111 slowpath = True
1112 break
1112 break
1113 else:
1113 else:
1114 continue
1114 continue
1115
1115
1116 if node is None:
1116 if node is None:
1117 last = len(filelog) - 1
1117 last = len(filelog) - 1
1118 else:
1118 else:
1119 last = filelog.rev(node)
1119 last = filelog.rev(node)
1120
1120
1121
1121
1122 # keep track of all ancestors of the file
1122 # keep track of all ancestors of the file
1123 ancestors = set([filelog.linkrev(last)])
1123 ancestors = set([filelog.linkrev(last)])
1124
1124
1125 # iterate from latest to oldest revision
1125 # iterate from latest to oldest revision
1126 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1126 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1127 if rev not in ancestors:
1127 if rev not in ancestors:
1128 continue
1128 continue
1129 # XXX insert 1327 fix here
1129 # XXX insert 1327 fix here
1130 if flparentlinkrevs:
1130 if flparentlinkrevs:
1131 ancestors.update(flparentlinkrevs)
1131 ancestors.update(flparentlinkrevs)
1132
1132
1133 fncache.setdefault(rev, []).append(file_)
1133 fncache.setdefault(rev, []).append(file_)
1134 wanted.add(rev)
1134 wanted.add(rev)
1135 if copied:
1135 if copied:
1136 copies.append(copied)
1136 copies.append(copied)
1137 if slowpath:
1137 if slowpath:
1138 # We have to read the changelog to match filenames against
1138 # We have to read the changelog to match filenames against
1139 # changed files
1139 # changed files
1140
1140
1141 if follow:
1141 if follow:
1142 raise util.Abort(_('can only follow copies/renames for explicit '
1142 raise util.Abort(_('can only follow copies/renames for explicit '
1143 'filenames'))
1143 'filenames'))
1144
1144
1145 # The slow path checks files modified in every changeset.
1145 # The slow path checks files modified in every changeset.
1146 for i in sorted(revs):
1146 for i in sorted(revs):
1147 ctx = change(i)
1147 ctx = change(i)
1148 matches = filter(match, ctx.files())
1148 matches = filter(match, ctx.files())
1149 if matches:
1149 if matches:
1150 fncache[i] = matches
1150 fncache[i] = matches
1151 wanted.add(i)
1151 wanted.add(i)
1152
1152
1153 class followfilter(object):
1153 class followfilter(object):
1154 def __init__(self, onlyfirst=False):
1154 def __init__(self, onlyfirst=False):
1155 self.startrev = nullrev
1155 self.startrev = nullrev
1156 self.roots = set()
1156 self.roots = set()
1157 self.onlyfirst = onlyfirst
1157 self.onlyfirst = onlyfirst
1158
1158
1159 def match(self, rev):
1159 def match(self, rev):
1160 def realparents(rev):
1160 def realparents(rev):
1161 if self.onlyfirst:
1161 if self.onlyfirst:
1162 return repo.changelog.parentrevs(rev)[0:1]
1162 return repo.changelog.parentrevs(rev)[0:1]
1163 else:
1163 else:
1164 return filter(lambda x: x != nullrev,
1164 return filter(lambda x: x != nullrev,
1165 repo.changelog.parentrevs(rev))
1165 repo.changelog.parentrevs(rev))
1166
1166
1167 if self.startrev == nullrev:
1167 if self.startrev == nullrev:
1168 self.startrev = rev
1168 self.startrev = rev
1169 return True
1169 return True
1170
1170
1171 if rev > self.startrev:
1171 if rev > self.startrev:
1172 # forward: all descendants
1172 # forward: all descendants
1173 if not self.roots:
1173 if not self.roots:
1174 self.roots.add(self.startrev)
1174 self.roots.add(self.startrev)
1175 for parent in realparents(rev):
1175 for parent in realparents(rev):
1176 if parent in self.roots:
1176 if parent in self.roots:
1177 self.roots.add(rev)
1177 self.roots.add(rev)
1178 return True
1178 return True
1179 else:
1179 else:
1180 # backwards: all parents
1180 # backwards: all parents
1181 if not self.roots:
1181 if not self.roots:
1182 self.roots.update(realparents(self.startrev))
1182 self.roots.update(realparents(self.startrev))
1183 if rev in self.roots:
1183 if rev in self.roots:
1184 self.roots.remove(rev)
1184 self.roots.remove(rev)
1185 self.roots.update(realparents(rev))
1185 self.roots.update(realparents(rev))
1186 return True
1186 return True
1187
1187
1188 return False
1188 return False
1189
1189
1190 # it might be worthwhile to do this in the iterator if the rev range
1190 # it might be worthwhile to do this in the iterator if the rev range
1191 # is descending and the prune args are all within that range
1191 # is descending and the prune args are all within that range
1192 for rev in opts.get('prune', ()):
1192 for rev in opts.get('prune', ()):
1193 rev = repo.changelog.rev(repo.lookup(rev))
1193 rev = repo.changelog.rev(repo.lookup(rev))
1194 ff = followfilter()
1194 ff = followfilter()
1195 stop = min(revs[0], revs[-1])
1195 stop = min(revs[0], revs[-1])
1196 for x in xrange(rev, stop - 1, -1):
1196 for x in xrange(rev, stop - 1, -1):
1197 if ff.match(x):
1197 if ff.match(x):
1198 wanted.discard(x)
1198 wanted.discard(x)
1199
1199
1200 # Now that wanted is correctly initialized, we can iterate over the
1200 # Now that wanted is correctly initialized, we can iterate over the
1201 # revision range, yielding only revisions in wanted.
1201 # revision range, yielding only revisions in wanted.
1202 def iterate():
1202 def iterate():
1203 if follow and not match.files():
1203 if follow and not match.files():
1204 ff = followfilter(onlyfirst=opts.get('follow_first'))
1204 ff = followfilter(onlyfirst=opts.get('follow_first'))
1205 def want(rev):
1205 def want(rev):
1206 return ff.match(rev) and rev in wanted
1206 return ff.match(rev) and rev in wanted
1207 else:
1207 else:
1208 def want(rev):
1208 def want(rev):
1209 return rev in wanted
1209 return rev in wanted
1210
1210
1211 for i, window in increasing_windows(0, len(revs)):
1211 for i, window in increasing_windows(0, len(revs)):
1212 nrevs = [rev for rev in revs[i:i + window] if want(rev)]
1212 nrevs = [rev for rev in revs[i:i + window] if want(rev)]
1213 for rev in sorted(nrevs):
1213 for rev in sorted(nrevs):
1214 fns = fncache.get(rev)
1214 fns = fncache.get(rev)
1215 ctx = change(rev)
1215 ctx = change(rev)
1216 if not fns:
1216 if not fns:
1217 def fns_generator():
1217 def fns_generator():
1218 for f in ctx.files():
1218 for f in ctx.files():
1219 if match(f):
1219 if match(f):
1220 yield f
1220 yield f
1221 fns = fns_generator()
1221 fns = fns_generator()
1222 prepare(ctx, fns)
1222 prepare(ctx, fns)
1223 for rev in nrevs:
1223 for rev in nrevs:
1224 yield change(rev)
1224 yield change(rev)
1225 return iterate()
1225 return iterate()
1226
1226
1227 def commit(ui, repo, commitfunc, pats, opts):
1227 def commit(ui, repo, commitfunc, pats, opts):
1228 '''commit the specified files or all outstanding changes'''
1228 '''commit the specified files or all outstanding changes'''
1229 date = opts.get('date')
1229 date = opts.get('date')
1230 if date:
1230 if date:
1231 opts['date'] = util.parsedate(date)
1231 opts['date'] = util.parsedate(date)
1232 message = logmessage(opts)
1232 message = logmessage(opts)
1233
1233
1234 # extract addremove carefully -- this function can be called from a command
1234 # extract addremove carefully -- this function can be called from a command
1235 # that doesn't support addremove
1235 # that doesn't support addremove
1236 if opts.get('addremove'):
1236 if opts.get('addremove'):
1237 addremove(repo, pats, opts)
1237 addremove(repo, pats, opts)
1238
1238
1239 return commitfunc(ui, repo, message, match(repo, pats, opts), opts)
1239 return commitfunc(ui, repo, message, match(repo, pats, opts), opts)
1240
1240
1241 def commiteditor(repo, ctx, subs):
1241 def commiteditor(repo, ctx, subs):
1242 if ctx.description():
1242 if ctx.description():
1243 return ctx.description()
1243 return ctx.description()
1244 return commitforceeditor(repo, ctx, subs)
1244 return commitforceeditor(repo, ctx, subs)
1245
1245
1246 def commitforceeditor(repo, ctx, subs):
1246 def commitforceeditor(repo, ctx, subs):
1247 edittext = []
1247 edittext = []
1248 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1248 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1249 if ctx.description():
1249 if ctx.description():
1250 edittext.append(ctx.description())
1250 edittext.append(ctx.description())
1251 edittext.append("")
1251 edittext.append("")
1252 edittext.append("") # Empty line between message and comments.
1252 edittext.append("") # Empty line between message and comments.
1253 edittext.append(_("HG: Enter commit message."
1253 edittext.append(_("HG: Enter commit message."
1254 " Lines beginning with 'HG:' are removed."))
1254 " Lines beginning with 'HG:' are removed."))
1255 edittext.append(_("HG: Leave message empty to abort commit."))
1255 edittext.append(_("HG: Leave message empty to abort commit."))
1256 edittext.append("HG: --")
1256 edittext.append("HG: --")
1257 edittext.append(_("HG: user: %s") % ctx.user())
1257 edittext.append(_("HG: user: %s") % ctx.user())
1258 if ctx.p2():
1258 if ctx.p2():
1259 edittext.append(_("HG: branch merge"))
1259 edittext.append(_("HG: branch merge"))
1260 if ctx.branch():
1260 if ctx.branch():
1261 edittext.append(_("HG: branch '%s'")
1261 edittext.append(_("HG: branch '%s'")
1262 % encoding.tolocal(ctx.branch()))
1262 % encoding.tolocal(ctx.branch()))
1263 edittext.extend([_("HG: subrepo %s") % s for s in subs])
1263 edittext.extend([_("HG: subrepo %s") % s for s in subs])
1264 edittext.extend([_("HG: added %s") % f for f in added])
1264 edittext.extend([_("HG: added %s") % f for f in added])
1265 edittext.extend([_("HG: changed %s") % f for f in modified])
1265 edittext.extend([_("HG: changed %s") % f for f in modified])
1266 edittext.extend([_("HG: removed %s") % f for f in removed])
1266 edittext.extend([_("HG: removed %s") % f for f in removed])
1267 if not added and not modified and not removed:
1267 if not added and not modified and not removed:
1268 edittext.append(_("HG: no files changed"))
1268 edittext.append(_("HG: no files changed"))
1269 edittext.append("")
1269 edittext.append("")
1270 # run editor in the repository root
1270 # run editor in the repository root
1271 olddir = os.getcwd()
1271 olddir = os.getcwd()
1272 os.chdir(repo.root)
1272 os.chdir(repo.root)
1273 text = repo.ui.edit("\n".join(edittext), ctx.user())
1273 text = repo.ui.edit("\n".join(edittext), ctx.user())
1274 text = re.sub("(?m)^HG:.*\n", "", text)
1274 text = re.sub("(?m)^HG:.*\n", "", text)
1275 os.chdir(olddir)
1275 os.chdir(olddir)
1276
1276
1277 if not text.strip():
1277 if not text.strip():
1278 raise util.Abort(_("empty commit message"))
1278 raise util.Abort(_("empty commit message"))
1279
1279
1280 return text
1280 return text
@@ -1,588 +1,588
1 # revset.py - revision set queries for mercurial
1 # revset.py - revision set queries for mercurial
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import re
8 import re
9 import parser, util, error, discovery
9 import parser, util, error, discovery
10 import match as _match
10 import match as matchmod
11 from i18n import _
11 from i18n import _
12
12
13 elements = {
13 elements = {
14 "(": (20, ("group", 1, ")"), ("func", 1, ")")),
14 "(": (20, ("group", 1, ")"), ("func", 1, ")")),
15 "-": (19, ("negate", 19), ("minus", 19)),
15 "-": (19, ("negate", 19), ("minus", 19)),
16 "::": (17, ("dagrangepre", 17), ("dagrange", 17),
16 "::": (17, ("dagrangepre", 17), ("dagrange", 17),
17 ("dagrangepost", 17)),
17 ("dagrangepost", 17)),
18 "..": (17, ("dagrangepre", 17), ("dagrange", 17),
18 "..": (17, ("dagrangepre", 17), ("dagrange", 17),
19 ("dagrangepost", 17)),
19 ("dagrangepost", 17)),
20 ":": (15, ("rangepre", 15), ("range", 15), ("rangepost", 15)),
20 ":": (15, ("rangepre", 15), ("range", 15), ("rangepost", 15)),
21 "not": (10, ("not", 10)),
21 "not": (10, ("not", 10)),
22 "!": (10, ("not", 10)),
22 "!": (10, ("not", 10)),
23 "and": (5, None, ("and", 5)),
23 "and": (5, None, ("and", 5)),
24 "&": (5, None, ("and", 5)),
24 "&": (5, None, ("and", 5)),
25 "or": (4, None, ("or", 4)),
25 "or": (4, None, ("or", 4)),
26 "|": (4, None, ("or", 4)),
26 "|": (4, None, ("or", 4)),
27 "+": (4, None, ("or", 4)),
27 "+": (4, None, ("or", 4)),
28 ",": (2, None, ("list", 2)),
28 ",": (2, None, ("list", 2)),
29 ")": (0, None, None),
29 ")": (0, None, None),
30 "symbol": (0, ("symbol",), None),
30 "symbol": (0, ("symbol",), None),
31 "string": (0, ("string",), None),
31 "string": (0, ("string",), None),
32 "end": (0, None, None),
32 "end": (0, None, None),
33 }
33 }
34
34
35 keywords = set(['and', 'or', 'not'])
35 keywords = set(['and', 'or', 'not'])
36
36
37 def tokenize(program):
37 def tokenize(program):
38 pos, l = 0, len(program)
38 pos, l = 0, len(program)
39 while pos < l:
39 while pos < l:
40 c = program[pos]
40 c = program[pos]
41 if c.isspace(): # skip inter-token whitespace
41 if c.isspace(): # skip inter-token whitespace
42 pass
42 pass
43 elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully
43 elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully
44 yield ('::', None, pos)
44 yield ('::', None, pos)
45 pos += 1 # skip ahead
45 pos += 1 # skip ahead
46 elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully
46 elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully
47 yield ('..', None, pos)
47 yield ('..', None, pos)
48 pos += 1 # skip ahead
48 pos += 1 # skip ahead
49 elif c in "():,-|&+!": # handle simple operators
49 elif c in "():,-|&+!": # handle simple operators
50 yield (c, None, pos)
50 yield (c, None, pos)
51 elif c in '"\'': # handle quoted strings
51 elif c in '"\'': # handle quoted strings
52 pos += 1
52 pos += 1
53 s = pos
53 s = pos
54 while pos < l: # find closing quote
54 while pos < l: # find closing quote
55 d = program[pos]
55 d = program[pos]
56 if d == '\\': # skip over escaped characters
56 if d == '\\': # skip over escaped characters
57 pos += 2
57 pos += 2
58 continue
58 continue
59 if d == c:
59 if d == c:
60 yield ('string', program[s:pos].decode('string-escape'), s)
60 yield ('string', program[s:pos].decode('string-escape'), s)
61 break
61 break
62 pos += 1
62 pos += 1
63 else:
63 else:
64 raise error.ParseError(_("unterminated string"), s)
64 raise error.ParseError(_("unterminated string"), s)
65 elif c.isalnum() or c in '._' or ord(c) > 127: # gather up a symbol/keyword
65 elif c.isalnum() or c in '._' or ord(c) > 127: # gather up a symbol/keyword
66 s = pos
66 s = pos
67 pos += 1
67 pos += 1
68 while pos < l: # find end of symbol
68 while pos < l: # find end of symbol
69 d = program[pos]
69 d = program[pos]
70 if not (d.isalnum() or d in "._" or ord(d) > 127):
70 if not (d.isalnum() or d in "._" or ord(d) > 127):
71 break
71 break
72 if d == '.' and program[pos - 1] == '.': # special case for ..
72 if d == '.' and program[pos - 1] == '.': # special case for ..
73 pos -= 1
73 pos -= 1
74 break
74 break
75 pos += 1
75 pos += 1
76 sym = program[s:pos]
76 sym = program[s:pos]
77 if sym in keywords: # operator keywords
77 if sym in keywords: # operator keywords
78 yield (sym, None, s)
78 yield (sym, None, s)
79 else:
79 else:
80 yield ('symbol', sym, s)
80 yield ('symbol', sym, s)
81 pos -= 1
81 pos -= 1
82 else:
82 else:
83 raise error.ParseError(_("syntax error"), pos)
83 raise error.ParseError(_("syntax error"), pos)
84 pos += 1
84 pos += 1
85 yield ('end', None, pos)
85 yield ('end', None, pos)
86
86
87 # helpers
87 # helpers
88
88
89 def getstring(x, err):
89 def getstring(x, err):
90 if x and (x[0] == 'string' or x[0] == 'symbol'):
90 if x and (x[0] == 'string' or x[0] == 'symbol'):
91 return x[1]
91 return x[1]
92 raise error.ParseError(err)
92 raise error.ParseError(err)
93
93
94 def getlist(x):
94 def getlist(x):
95 if not x:
95 if not x:
96 return []
96 return []
97 if x[0] == 'list':
97 if x[0] == 'list':
98 return getlist(x[1]) + [x[2]]
98 return getlist(x[1]) + [x[2]]
99 return [x]
99 return [x]
100
100
101 def getargs(x, min, max, err):
101 def getargs(x, min, max, err):
102 l = getlist(x)
102 l = getlist(x)
103 if len(l) < min or len(l) > max:
103 if len(l) < min or len(l) > max:
104 raise error.ParseError(err)
104 raise error.ParseError(err)
105 return l
105 return l
106
106
107 def getset(repo, subset, x):
107 def getset(repo, subset, x):
108 if not x:
108 if not x:
109 raise error.ParseError(_("missing argument"))
109 raise error.ParseError(_("missing argument"))
110 return methods[x[0]](repo, subset, *x[1:])
110 return methods[x[0]](repo, subset, *x[1:])
111
111
112 # operator methods
112 # operator methods
113
113
114 def stringset(repo, subset, x):
114 def stringset(repo, subset, x):
115 x = repo[x].rev()
115 x = repo[x].rev()
116 if x == -1 and len(subset) == len(repo):
116 if x == -1 and len(subset) == len(repo):
117 return [-1]
117 return [-1]
118 if x in subset:
118 if x in subset:
119 return [x]
119 return [x]
120 return []
120 return []
121
121
122 def symbolset(repo, subset, x):
122 def symbolset(repo, subset, x):
123 if x in symbols:
123 if x in symbols:
124 raise error.ParseError(_("can't use %s here") % x)
124 raise error.ParseError(_("can't use %s here") % x)
125 return stringset(repo, subset, x)
125 return stringset(repo, subset, x)
126
126
127 def rangeset(repo, subset, x, y):
127 def rangeset(repo, subset, x, y):
128 m = getset(repo, subset, x)
128 m = getset(repo, subset, x)
129 if not m:
129 if not m:
130 m = getset(repo, range(len(repo)), x)
130 m = getset(repo, range(len(repo)), x)
131
131
132 n = getset(repo, subset, y)
132 n = getset(repo, subset, y)
133 if not n:
133 if not n:
134 n = getset(repo, range(len(repo)), y)
134 n = getset(repo, range(len(repo)), y)
135
135
136 if not m or not n:
136 if not m or not n:
137 return []
137 return []
138 m, n = m[0], n[-1]
138 m, n = m[0], n[-1]
139
139
140 if m < n:
140 if m < n:
141 r = range(m, n + 1)
141 r = range(m, n + 1)
142 else:
142 else:
143 r = range(m, n - 1, -1)
143 r = range(m, n - 1, -1)
144 s = set(subset)
144 s = set(subset)
145 return [x for x in r if x in s]
145 return [x for x in r if x in s]
146
146
147 def andset(repo, subset, x, y):
147 def andset(repo, subset, x, y):
148 return getset(repo, getset(repo, subset, x), y)
148 return getset(repo, getset(repo, subset, x), y)
149
149
150 def orset(repo, subset, x, y):
150 def orset(repo, subset, x, y):
151 s = set(getset(repo, subset, x))
151 s = set(getset(repo, subset, x))
152 s |= set(getset(repo, [r for r in subset if r not in s], y))
152 s |= set(getset(repo, [r for r in subset if r not in s], y))
153 return [r for r in subset if r in s]
153 return [r for r in subset if r in s]
154
154
155 def notset(repo, subset, x):
155 def notset(repo, subset, x):
156 s = set(getset(repo, subset, x))
156 s = set(getset(repo, subset, x))
157 return [r for r in subset if r not in s]
157 return [r for r in subset if r not in s]
158
158
159 def listset(repo, subset, a, b):
159 def listset(repo, subset, a, b):
160 raise error.ParseError(_("can't use a list in this context"))
160 raise error.ParseError(_("can't use a list in this context"))
161
161
162 def func(repo, subset, a, b):
162 def func(repo, subset, a, b):
163 if a[0] == 'symbol' and a[1] in symbols:
163 if a[0] == 'symbol' and a[1] in symbols:
164 return symbols[a[1]](repo, subset, b)
164 return symbols[a[1]](repo, subset, b)
165 raise error.ParseError(_("not a function: %s") % a[1])
165 raise error.ParseError(_("not a function: %s") % a[1])
166
166
167 # functions
167 # functions
168
168
169 def p1(repo, subset, x):
169 def p1(repo, subset, x):
170 ps = set()
170 ps = set()
171 cl = repo.changelog
171 cl = repo.changelog
172 for r in getset(repo, subset, x):
172 for r in getset(repo, subset, x):
173 ps.add(cl.parentrevs(r)[0])
173 ps.add(cl.parentrevs(r)[0])
174 return [r for r in subset if r in ps]
174 return [r for r in subset if r in ps]
175
175
176 def p2(repo, subset, x):
176 def p2(repo, subset, x):
177 ps = set()
177 ps = set()
178 cl = repo.changelog
178 cl = repo.changelog
179 for r in getset(repo, subset, x):
179 for r in getset(repo, subset, x):
180 ps.add(cl.parentrevs(r)[1])
180 ps.add(cl.parentrevs(r)[1])
181 return [r for r in subset if r in ps]
181 return [r for r in subset if r in ps]
182
182
183 def parents(repo, subset, x):
183 def parents(repo, subset, x):
184 ps = set()
184 ps = set()
185 cl = repo.changelog
185 cl = repo.changelog
186 for r in getset(repo, subset, x):
186 for r in getset(repo, subset, x):
187 ps.update(cl.parentrevs(r))
187 ps.update(cl.parentrevs(r))
188 return [r for r in subset if r in ps]
188 return [r for r in subset if r in ps]
189
189
190 def maxrev(repo, subset, x):
190 def maxrev(repo, subset, x):
191 s = getset(repo, subset, x)
191 s = getset(repo, subset, x)
192 if s:
192 if s:
193 m = max(s)
193 m = max(s)
194 if m in subset:
194 if m in subset:
195 return [m]
195 return [m]
196 return []
196 return []
197
197
198 def minrev(repo, subset, x):
198 def minrev(repo, subset, x):
199 s = getset(repo, subset, x)
199 s = getset(repo, subset, x)
200 if s:
200 if s:
201 m = min(s)
201 m = min(s)
202 if m in subset:
202 if m in subset:
203 return [m]
203 return [m]
204 return []
204 return []
205
205
206 def limit(repo, subset, x):
206 def limit(repo, subset, x):
207 l = getargs(x, 2, 2, _("limit wants two arguments"))
207 l = getargs(x, 2, 2, _("limit wants two arguments"))
208 try:
208 try:
209 lim = int(getstring(l[1], _("limit wants a number")))
209 lim = int(getstring(l[1], _("limit wants a number")))
210 except ValueError:
210 except ValueError:
211 raise error.ParseError(_("limit expects a number"))
211 raise error.ParseError(_("limit expects a number"))
212 return getset(repo, subset, l[0])[:lim]
212 return getset(repo, subset, l[0])[:lim]
213
213
214 def children(repo, subset, x):
214 def children(repo, subset, x):
215 cs = set()
215 cs = set()
216 cl = repo.changelog
216 cl = repo.changelog
217 s = set(getset(repo, subset, x))
217 s = set(getset(repo, subset, x))
218 for r in xrange(0, len(repo)):
218 for r in xrange(0, len(repo)):
219 for p in cl.parentrevs(r):
219 for p in cl.parentrevs(r):
220 if p in s:
220 if p in s:
221 cs.add(r)
221 cs.add(r)
222 return [r for r in subset if r in cs]
222 return [r for r in subset if r in cs]
223
223
224 def branch(repo, subset, x):
224 def branch(repo, subset, x):
225 s = getset(repo, range(len(repo)), x)
225 s = getset(repo, range(len(repo)), x)
226 b = set()
226 b = set()
227 for r in s:
227 for r in s:
228 b.add(repo[r].branch())
228 b.add(repo[r].branch())
229 s = set(s)
229 s = set(s)
230 return [r for r in subset if r in s or repo[r].branch() in b]
230 return [r for r in subset if r in s or repo[r].branch() in b]
231
231
232 def ancestor(repo, subset, x):
232 def ancestor(repo, subset, x):
233 l = getargs(x, 2, 2, _("ancestor wants two arguments"))
233 l = getargs(x, 2, 2, _("ancestor wants two arguments"))
234 r = range(len(repo))
234 r = range(len(repo))
235 a = getset(repo, r, l[0])
235 a = getset(repo, r, l[0])
236 b = getset(repo, r, l[1])
236 b = getset(repo, r, l[1])
237 if len(a) != 1 or len(b) != 1:
237 if len(a) != 1 or len(b) != 1:
238 raise error.ParseError(_("ancestor arguments must be single revisions"))
238 raise error.ParseError(_("ancestor arguments must be single revisions"))
239 an = [repo[a[0]].ancestor(repo[b[0]]).rev()]
239 an = [repo[a[0]].ancestor(repo[b[0]]).rev()]
240
240
241 return [r for r in an if r in subset]
241 return [r for r in an if r in subset]
242
242
243 def ancestors(repo, subset, x):
243 def ancestors(repo, subset, x):
244 args = getset(repo, range(len(repo)), x)
244 args = getset(repo, range(len(repo)), x)
245 if not args:
245 if not args:
246 return []
246 return []
247 s = set(repo.changelog.ancestors(*args)) | set(args)
247 s = set(repo.changelog.ancestors(*args)) | set(args)
248 return [r for r in subset if r in s]
248 return [r for r in subset if r in s]
249
249
250 def descendants(repo, subset, x):
250 def descendants(repo, subset, x):
251 args = getset(repo, range(len(repo)), x)
251 args = getset(repo, range(len(repo)), x)
252 if not args:
252 if not args:
253 return []
253 return []
254 s = set(repo.changelog.descendants(*args)) | set(args)
254 s = set(repo.changelog.descendants(*args)) | set(args)
255 return [r for r in subset if r in s]
255 return [r for r in subset if r in s]
256
256
257 def follow(repo, subset, x):
257 def follow(repo, subset, x):
258 getargs(x, 0, 0, _("follow takes no arguments"))
258 getargs(x, 0, 0, _("follow takes no arguments"))
259 p = repo['.'].rev()
259 p = repo['.'].rev()
260 s = set(repo.changelog.ancestors(p)) | set([p])
260 s = set(repo.changelog.ancestors(p)) | set([p])
261 return [r for r in subset if r in s]
261 return [r for r in subset if r in s]
262
262
263 def date(repo, subset, x):
263 def date(repo, subset, x):
264 ds = getstring(x, _("date wants a string"))
264 ds = getstring(x, _("date wants a string"))
265 dm = util.matchdate(ds)
265 dm = util.matchdate(ds)
266 return [r for r in subset if dm(repo[r].date()[0])]
266 return [r for r in subset if dm(repo[r].date()[0])]
267
267
268 def keyword(repo, subset, x):
268 def keyword(repo, subset, x):
269 kw = getstring(x, _("keyword wants a string")).lower()
269 kw = getstring(x, _("keyword wants a string")).lower()
270 l = []
270 l = []
271 for r in subset:
271 for r in subset:
272 c = repo[r]
272 c = repo[r]
273 t = " ".join(c.files() + [c.user(), c.description()])
273 t = " ".join(c.files() + [c.user(), c.description()])
274 if kw in t.lower():
274 if kw in t.lower():
275 l.append(r)
275 l.append(r)
276 return l
276 return l
277
277
278 def grep(repo, subset, x):
278 def grep(repo, subset, x):
279 gr = re.compile(getstring(x, _("grep wants a string")))
279 gr = re.compile(getstring(x, _("grep wants a string")))
280 l = []
280 l = []
281 for r in subset:
281 for r in subset:
282 c = repo[r]
282 c = repo[r]
283 for e in c.files() + [c.user(), c.description()]:
283 for e in c.files() + [c.user(), c.description()]:
284 if gr.search(e):
284 if gr.search(e):
285 l.append(r)
285 l.append(r)
286 continue
286 continue
287 return l
287 return l
288
288
289 def author(repo, subset, x):
289 def author(repo, subset, x):
290 n = getstring(x, _("author wants a string")).lower()
290 n = getstring(x, _("author wants a string")).lower()
291 return [r for r in subset if n in repo[r].user().lower()]
291 return [r for r in subset if n in repo[r].user().lower()]
292
292
293 def hasfile(repo, subset, x):
293 def hasfile(repo, subset, x):
294 pat = getstring(x, _("file wants a pattern"))
294 pat = getstring(x, _("file wants a pattern"))
295 m = _match.match(repo.root, repo.getcwd(), [pat])
295 m = matchmod.match(repo.root, repo.getcwd(), [pat])
296 s = []
296 s = []
297 for r in subset:
297 for r in subset:
298 for f in repo[r].files():
298 for f in repo[r].files():
299 if m(f):
299 if m(f):
300 s.append(r)
300 s.append(r)
301 continue
301 continue
302 return s
302 return s
303
303
304 def contains(repo, subset, x):
304 def contains(repo, subset, x):
305 pat = getstring(x, _("contains wants a pattern"))
305 pat = getstring(x, _("contains wants a pattern"))
306 m = _match.match(repo.root, repo.getcwd(), [pat])
306 m = matchmod.match(repo.root, repo.getcwd(), [pat])
307 s = []
307 s = []
308 if m.files() == [pat]:
308 if m.files() == [pat]:
309 for r in subset:
309 for r in subset:
310 if pat in repo[r]:
310 if pat in repo[r]:
311 s.append(r)
311 s.append(r)
312 continue
312 continue
313 else:
313 else:
314 for r in subset:
314 for r in subset:
315 for f in repo[r].manifest():
315 for f in repo[r].manifest():
316 if m(f):
316 if m(f):
317 s.append(r)
317 s.append(r)
318 continue
318 continue
319 return s
319 return s
320
320
321 def checkstatus(repo, subset, pat, field):
321 def checkstatus(repo, subset, pat, field):
322 m = _match.match(repo.root, repo.getcwd(), [pat])
322 m = matchmod.match(repo.root, repo.getcwd(), [pat])
323 s = []
323 s = []
324 fast = (m.files() == [pat])
324 fast = (m.files() == [pat])
325 for r in subset:
325 for r in subset:
326 c = repo[r]
326 c = repo[r]
327 if fast:
327 if fast:
328 if pat not in c.files():
328 if pat not in c.files():
329 continue
329 continue
330 else:
330 else:
331 for f in c.files():
331 for f in c.files():
332 if m(f):
332 if m(f):
333 break
333 break
334 else:
334 else:
335 continue
335 continue
336 files = repo.status(c.p1().node(), c.node())[field]
336 files = repo.status(c.p1().node(), c.node())[field]
337 if fast:
337 if fast:
338 if pat in files:
338 if pat in files:
339 s.append(r)
339 s.append(r)
340 continue
340 continue
341 else:
341 else:
342 for f in files:
342 for f in files:
343 if m(f):
343 if m(f):
344 s.append(r)
344 s.append(r)
345 continue
345 continue
346 return s
346 return s
347
347
348 def modifies(repo, subset, x):
348 def modifies(repo, subset, x):
349 pat = getstring(x, _("modifies wants a pattern"))
349 pat = getstring(x, _("modifies wants a pattern"))
350 return checkstatus(repo, subset, pat, 0)
350 return checkstatus(repo, subset, pat, 0)
351
351
352 def adds(repo, subset, x):
352 def adds(repo, subset, x):
353 pat = getstring(x, _("adds wants a pattern"))
353 pat = getstring(x, _("adds wants a pattern"))
354 return checkstatus(repo, subset, pat, 1)
354 return checkstatus(repo, subset, pat, 1)
355
355
356 def removes(repo, subset, x):
356 def removes(repo, subset, x):
357 pat = getstring(x, _("removes wants a pattern"))
357 pat = getstring(x, _("removes wants a pattern"))
358 return checkstatus(repo, subset, pat, 2)
358 return checkstatus(repo, subset, pat, 2)
359
359
360 def merge(repo, subset, x):
360 def merge(repo, subset, x):
361 getargs(x, 0, 0, _("merge takes no arguments"))
361 getargs(x, 0, 0, _("merge takes no arguments"))
362 cl = repo.changelog
362 cl = repo.changelog
363 return [r for r in subset if cl.parentrevs(r)[1] != -1]
363 return [r for r in subset if cl.parentrevs(r)[1] != -1]
364
364
365 def closed(repo, subset, x):
365 def closed(repo, subset, x):
366 getargs(x, 0, 0, _("closed takes no arguments"))
366 getargs(x, 0, 0, _("closed takes no arguments"))
367 return [r for r in subset if repo[r].extra().get('close')]
367 return [r for r in subset if repo[r].extra().get('close')]
368
368
369 def head(repo, subset, x):
369 def head(repo, subset, x):
370 getargs(x, 0, 0, _("head takes no arguments"))
370 getargs(x, 0, 0, _("head takes no arguments"))
371 hs = set()
371 hs = set()
372 for b, ls in repo.branchmap().iteritems():
372 for b, ls in repo.branchmap().iteritems():
373 hs.update(repo[h].rev() for h in ls)
373 hs.update(repo[h].rev() for h in ls)
374 return [r for r in subset if r in hs]
374 return [r for r in subset if r in hs]
375
375
376 def reverse(repo, subset, x):
376 def reverse(repo, subset, x):
377 l = getset(repo, subset, x)
377 l = getset(repo, subset, x)
378 l.reverse()
378 l.reverse()
379 return l
379 return l
380
380
381 def present(repo, subset, x):
381 def present(repo, subset, x):
382 try:
382 try:
383 return getset(repo, subset, x)
383 return getset(repo, subset, x)
384 except error.RepoLookupError:
384 except error.RepoLookupError:
385 return []
385 return []
386
386
387 def sort(repo, subset, x):
387 def sort(repo, subset, x):
388 l = getargs(x, 1, 2, _("sort wants one or two arguments"))
388 l = getargs(x, 1, 2, _("sort wants one or two arguments"))
389 keys = "rev"
389 keys = "rev"
390 if len(l) == 2:
390 if len(l) == 2:
391 keys = getstring(l[1], _("sort spec must be a string"))
391 keys = getstring(l[1], _("sort spec must be a string"))
392
392
393 s = l[0]
393 s = l[0]
394 keys = keys.split()
394 keys = keys.split()
395 l = []
395 l = []
396 def invert(s):
396 def invert(s):
397 return "".join(chr(255 - ord(c)) for c in s)
397 return "".join(chr(255 - ord(c)) for c in s)
398 for r in getset(repo, subset, s):
398 for r in getset(repo, subset, s):
399 c = repo[r]
399 c = repo[r]
400 e = []
400 e = []
401 for k in keys:
401 for k in keys:
402 if k == 'rev':
402 if k == 'rev':
403 e.append(r)
403 e.append(r)
404 elif k == '-rev':
404 elif k == '-rev':
405 e.append(-r)
405 e.append(-r)
406 elif k == 'branch':
406 elif k == 'branch':
407 e.append(c.branch())
407 e.append(c.branch())
408 elif k == '-branch':
408 elif k == '-branch':
409 e.append(invert(c.branch()))
409 e.append(invert(c.branch()))
410 elif k == 'desc':
410 elif k == 'desc':
411 e.append(c.description())
411 e.append(c.description())
412 elif k == '-desc':
412 elif k == '-desc':
413 e.append(invert(c.description()))
413 e.append(invert(c.description()))
414 elif k in 'user author':
414 elif k in 'user author':
415 e.append(c.user())
415 e.append(c.user())
416 elif k in '-user -author':
416 elif k in '-user -author':
417 e.append(invert(c.user()))
417 e.append(invert(c.user()))
418 elif k == 'date':
418 elif k == 'date':
419 e.append(c.date()[0])
419 e.append(c.date()[0])
420 elif k == '-date':
420 elif k == '-date':
421 e.append(-c.date()[0])
421 e.append(-c.date()[0])
422 else:
422 else:
423 raise error.ParseError(_("unknown sort key %r") % k)
423 raise error.ParseError(_("unknown sort key %r") % k)
424 e.append(r)
424 e.append(r)
425 l.append(e)
425 l.append(e)
426 l.sort()
426 l.sort()
427 return [e[-1] for e in l]
427 return [e[-1] for e in l]
428
428
429 def getall(repo, subset, x):
429 def getall(repo, subset, x):
430 getargs(x, 0, 0, _("all takes no arguments"))
430 getargs(x, 0, 0, _("all takes no arguments"))
431 return subset
431 return subset
432
432
433 def heads(repo, subset, x):
433 def heads(repo, subset, x):
434 s = getset(repo, subset, x)
434 s = getset(repo, subset, x)
435 ps = set(parents(repo, subset, x))
435 ps = set(parents(repo, subset, x))
436 return [r for r in s if r not in ps]
436 return [r for r in s if r not in ps]
437
437
438 def roots(repo, subset, x):
438 def roots(repo, subset, x):
439 s = getset(repo, subset, x)
439 s = getset(repo, subset, x)
440 cs = set(children(repo, subset, x))
440 cs = set(children(repo, subset, x))
441 return [r for r in s if r not in cs]
441 return [r for r in s if r not in cs]
442
442
443 def outgoing(repo, subset, x):
443 def outgoing(repo, subset, x):
444 import hg # avoid start-up nasties
444 import hg # avoid start-up nasties
445 l = getargs(x, 0, 1, _("outgoing wants a repository path"))
445 l = getargs(x, 0, 1, _("outgoing wants a repository path"))
446 dest = l and getstring(l[0], _("outgoing wants a repository path")) or ''
446 dest = l and getstring(l[0], _("outgoing wants a repository path")) or ''
447 dest = repo.ui.expandpath(dest or 'default-push', dest or 'default')
447 dest = repo.ui.expandpath(dest or 'default-push', dest or 'default')
448 dest, branches = hg.parseurl(dest)
448 dest, branches = hg.parseurl(dest)
449 other = hg.repository(hg.remoteui(repo, {}), dest)
449 other = hg.repository(hg.remoteui(repo, {}), dest)
450 repo.ui.pushbuffer()
450 repo.ui.pushbuffer()
451 o = discovery.findoutgoing(repo, other)
451 o = discovery.findoutgoing(repo, other)
452 repo.ui.popbuffer()
452 repo.ui.popbuffer()
453 cl = repo.changelog
453 cl = repo.changelog
454 o = set([cl.rev(r) for r in repo.changelog.nodesbetween(o, None)[0]])
454 o = set([cl.rev(r) for r in repo.changelog.nodesbetween(o, None)[0]])
455 return [r for r in subset if r in o]
455 return [r for r in subset if r in o]
456
456
457 def tagged(repo, subset, x):
457 def tagged(repo, subset, x):
458 getargs(x, 0, 0, _("tagged takes no arguments"))
458 getargs(x, 0, 0, _("tagged takes no arguments"))
459 cl = repo.changelog
459 cl = repo.changelog
460 s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
460 s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
461 return [r for r in subset if r in s]
461 return [r for r in subset if r in s]
462
462
463 symbols = {
463 symbols = {
464 "adds": adds,
464 "adds": adds,
465 "all": getall,
465 "all": getall,
466 "ancestor": ancestor,
466 "ancestor": ancestor,
467 "ancestors": ancestors,
467 "ancestors": ancestors,
468 "author": author,
468 "author": author,
469 "branch": branch,
469 "branch": branch,
470 "children": children,
470 "children": children,
471 "closed": closed,
471 "closed": closed,
472 "contains": contains,
472 "contains": contains,
473 "date": date,
473 "date": date,
474 "descendants": descendants,
474 "descendants": descendants,
475 "file": hasfile,
475 "file": hasfile,
476 "follow": follow,
476 "follow": follow,
477 "grep": grep,
477 "grep": grep,
478 "head": head,
478 "head": head,
479 "heads": heads,
479 "heads": heads,
480 "keyword": keyword,
480 "keyword": keyword,
481 "limit": limit,
481 "limit": limit,
482 "max": maxrev,
482 "max": maxrev,
483 "min": minrev,
483 "min": minrev,
484 "merge": merge,
484 "merge": merge,
485 "modifies": modifies,
485 "modifies": modifies,
486 "outgoing": outgoing,
486 "outgoing": outgoing,
487 "p1": p1,
487 "p1": p1,
488 "p2": p2,
488 "p2": p2,
489 "parents": parents,
489 "parents": parents,
490 "present": present,
490 "present": present,
491 "removes": removes,
491 "removes": removes,
492 "reverse": reverse,
492 "reverse": reverse,
493 "roots": roots,
493 "roots": roots,
494 "sort": sort,
494 "sort": sort,
495 "tagged": tagged,
495 "tagged": tagged,
496 "user": author,
496 "user": author,
497 }
497 }
498
498
499 methods = {
499 methods = {
500 "range": rangeset,
500 "range": rangeset,
501 "string": stringset,
501 "string": stringset,
502 "symbol": symbolset,
502 "symbol": symbolset,
503 "and": andset,
503 "and": andset,
504 "or": orset,
504 "or": orset,
505 "not": notset,
505 "not": notset,
506 "list": listset,
506 "list": listset,
507 "func": func,
507 "func": func,
508 }
508 }
509
509
510 def optimize(x, small):
510 def optimize(x, small):
511 if x == None:
511 if x == None:
512 return 0, x
512 return 0, x
513
513
514 smallbonus = 1
514 smallbonus = 1
515 if small:
515 if small:
516 smallbonus = .5
516 smallbonus = .5
517
517
518 op = x[0]
518 op = x[0]
519 if op == 'minus':
519 if op == 'minus':
520 return optimize(('and', x[1], ('not', x[2])), small)
520 return optimize(('and', x[1], ('not', x[2])), small)
521 elif op == 'dagrange':
521 elif op == 'dagrange':
522 return optimize(('and', ('func', ('symbol', 'descendants'), x[1]),
522 return optimize(('and', ('func', ('symbol', 'descendants'), x[1]),
523 ('func', ('symbol', 'ancestors'), x[2])), small)
523 ('func', ('symbol', 'ancestors'), x[2])), small)
524 elif op == 'dagrangepre':
524 elif op == 'dagrangepre':
525 return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
525 return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
526 elif op == 'dagrangepost':
526 elif op == 'dagrangepost':
527 return optimize(('func', ('symbol', 'descendants'), x[1]), small)
527 return optimize(('func', ('symbol', 'descendants'), x[1]), small)
528 elif op == 'rangepre':
528 elif op == 'rangepre':
529 return optimize(('range', ('string', '0'), x[1]), small)
529 return optimize(('range', ('string', '0'), x[1]), small)
530 elif op == 'rangepost':
530 elif op == 'rangepost':
531 return optimize(('range', x[1], ('string', 'tip')), small)
531 return optimize(('range', x[1], ('string', 'tip')), small)
532 elif op == 'negate':
532 elif op == 'negate':
533 return optimize(('string',
533 return optimize(('string',
534 '-' + getstring(x[1], _("can't negate that"))), small)
534 '-' + getstring(x[1], _("can't negate that"))), small)
535 elif op in 'string symbol negate':
535 elif op in 'string symbol negate':
536 return smallbonus, x # single revisions are small
536 return smallbonus, x # single revisions are small
537 elif op == 'and' or op == 'dagrange':
537 elif op == 'and' or op == 'dagrange':
538 wa, ta = optimize(x[1], True)
538 wa, ta = optimize(x[1], True)
539 wb, tb = optimize(x[2], True)
539 wb, tb = optimize(x[2], True)
540 w = min(wa, wb)
540 w = min(wa, wb)
541 if wa > wb:
541 if wa > wb:
542 return w, (op, tb, ta)
542 return w, (op, tb, ta)
543 return w, (op, ta, tb)
543 return w, (op, ta, tb)
544 elif op == 'or':
544 elif op == 'or':
545 wa, ta = optimize(x[1], False)
545 wa, ta = optimize(x[1], False)
546 wb, tb = optimize(x[2], False)
546 wb, tb = optimize(x[2], False)
547 if wb < wa:
547 if wb < wa:
548 wb, wa = wa, wb
548 wb, wa = wa, wb
549 return max(wa, wb), (op, ta, tb)
549 return max(wa, wb), (op, ta, tb)
550 elif op == 'not':
550 elif op == 'not':
551 o = optimize(x[1], not small)
551 o = optimize(x[1], not small)
552 return o[0], (op, o[1])
552 return o[0], (op, o[1])
553 elif op == 'group':
553 elif op == 'group':
554 return optimize(x[1], small)
554 return optimize(x[1], small)
555 elif op in 'range list':
555 elif op in 'range list':
556 wa, ta = optimize(x[1], small)
556 wa, ta = optimize(x[1], small)
557 wb, tb = optimize(x[2], small)
557 wb, tb = optimize(x[2], small)
558 return wa + wb, (op, ta, tb)
558 return wa + wb, (op, ta, tb)
559 elif op == 'func':
559 elif op == 'func':
560 f = getstring(x[1], _("not a symbol"))
560 f = getstring(x[1], _("not a symbol"))
561 wa, ta = optimize(x[2], small)
561 wa, ta = optimize(x[2], small)
562 if f in "grep date user author keyword branch file":
562 if f in "grep date user author keyword branch file":
563 w = 10 # slow
563 w = 10 # slow
564 elif f in "modifies adds removes outgoing":
564 elif f in "modifies adds removes outgoing":
565 w = 30 # slower
565 w = 30 # slower
566 elif f == "contains":
566 elif f == "contains":
567 w = 100 # very slow
567 w = 100 # very slow
568 elif f == "ancestor":
568 elif f == "ancestor":
569 w = 1 * smallbonus
569 w = 1 * smallbonus
570 elif f == "reverse limit":
570 elif f == "reverse limit":
571 w = 0
571 w = 0
572 elif f in "sort":
572 elif f in "sort":
573 w = 10 # assume most sorts look at changelog
573 w = 10 # assume most sorts look at changelog
574 else:
574 else:
575 w = 1
575 w = 1
576 return w + wa, (op, x[1], ta)
576 return w + wa, (op, x[1], ta)
577 return 1, x
577 return 1, x
578
578
579 parse = parser.parser(tokenize, elements).parse
579 parse = parser.parser(tokenize, elements).parse
580
580
581 def match(spec):
581 def match(spec):
582 if not spec:
582 if not spec:
583 raise error.ParseError(_("empty query"))
583 raise error.ParseError(_("empty query"))
584 tree = parse(spec)
584 tree = parse(spec)
585 weight, tree = optimize(tree, True)
585 weight, tree = optimize(tree, True)
586 def mfunc(repo, subset):
586 def mfunc(repo, subset):
587 return getset(repo, subset, tree)
587 return getset(repo, subset, tree)
588 return mfunc
588 return mfunc
@@ -1,326 +1,326
1 # wireproto.py - generic wire protocol support functions
1 # wireproto.py - generic wire protocol support functions
2 #
2 #
3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import urllib, tempfile, os, sys
8 import urllib, tempfile, os, sys
9 from i18n import _
9 from i18n import _
10 from node import bin, hex
10 from node import bin, hex
11 import changegroup as changegroupmod
11 import changegroup as changegroupmod
12 import repo, error, encoding, util, store
12 import repo, error, encoding, util, store
13 import pushkey as pushkey_
13 import pushkey as pushkeymod
14
14
15 # list of nodes encoding / decoding
15 # list of nodes encoding / decoding
16
16
17 def decodelist(l, sep=' '):
17 def decodelist(l, sep=' '):
18 return map(bin, l.split(sep))
18 return map(bin, l.split(sep))
19
19
20 def encodelist(l, sep=' '):
20 def encodelist(l, sep=' '):
21 return sep.join(map(hex, l))
21 return sep.join(map(hex, l))
22
22
23 # client side
23 # client side
24
24
25 class wirerepository(repo.repository):
25 class wirerepository(repo.repository):
26 def lookup(self, key):
26 def lookup(self, key):
27 self.requirecap('lookup', _('look up remote revision'))
27 self.requirecap('lookup', _('look up remote revision'))
28 d = self._call("lookup", key=key)
28 d = self._call("lookup", key=key)
29 success, data = d[:-1].split(" ", 1)
29 success, data = d[:-1].split(" ", 1)
30 if int(success):
30 if int(success):
31 return bin(data)
31 return bin(data)
32 self._abort(error.RepoError(data))
32 self._abort(error.RepoError(data))
33
33
34 def heads(self):
34 def heads(self):
35 d = self._call("heads")
35 d = self._call("heads")
36 try:
36 try:
37 return decodelist(d[:-1])
37 return decodelist(d[:-1])
38 except:
38 except:
39 self._abort(error.ResponseError(_("unexpected response:"), d))
39 self._abort(error.ResponseError(_("unexpected response:"), d))
40
40
41 def branchmap(self):
41 def branchmap(self):
42 d = self._call("branchmap")
42 d = self._call("branchmap")
43 try:
43 try:
44 branchmap = {}
44 branchmap = {}
45 for branchpart in d.splitlines():
45 for branchpart in d.splitlines():
46 branchname, branchheads = branchpart.split(' ', 1)
46 branchname, branchheads = branchpart.split(' ', 1)
47 branchname = urllib.unquote(branchname)
47 branchname = urllib.unquote(branchname)
48 # Earlier servers (1.3.x) send branch names in (their) local
48 # Earlier servers (1.3.x) send branch names in (their) local
49 # charset. The best we can do is assume it's identical to our
49 # charset. The best we can do is assume it's identical to our
50 # own local charset, in case it's not utf-8.
50 # own local charset, in case it's not utf-8.
51 try:
51 try:
52 branchname.decode('utf-8')
52 branchname.decode('utf-8')
53 except UnicodeDecodeError:
53 except UnicodeDecodeError:
54 branchname = encoding.fromlocal(branchname)
54 branchname = encoding.fromlocal(branchname)
55 branchheads = decodelist(branchheads)
55 branchheads = decodelist(branchheads)
56 branchmap[branchname] = branchheads
56 branchmap[branchname] = branchheads
57 return branchmap
57 return branchmap
58 except TypeError:
58 except TypeError:
59 self._abort(error.ResponseError(_("unexpected response:"), d))
59 self._abort(error.ResponseError(_("unexpected response:"), d))
60
60
61 def branches(self, nodes):
61 def branches(self, nodes):
62 n = encodelist(nodes)
62 n = encodelist(nodes)
63 d = self._call("branches", nodes=n)
63 d = self._call("branches", nodes=n)
64 try:
64 try:
65 br = [tuple(decodelist(b)) for b in d.splitlines()]
65 br = [tuple(decodelist(b)) for b in d.splitlines()]
66 return br
66 return br
67 except:
67 except:
68 self._abort(error.ResponseError(_("unexpected response:"), d))
68 self._abort(error.ResponseError(_("unexpected response:"), d))
69
69
70 def between(self, pairs):
70 def between(self, pairs):
71 batch = 8 # avoid giant requests
71 batch = 8 # avoid giant requests
72 r = []
72 r = []
73 for i in xrange(0, len(pairs), batch):
73 for i in xrange(0, len(pairs), batch):
74 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
74 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
75 d = self._call("between", pairs=n)
75 d = self._call("between", pairs=n)
76 try:
76 try:
77 r.extend(l and decodelist(l) or [] for l in d.splitlines())
77 r.extend(l and decodelist(l) or [] for l in d.splitlines())
78 except:
78 except:
79 self._abort(error.ResponseError(_("unexpected response:"), d))
79 self._abort(error.ResponseError(_("unexpected response:"), d))
80 return r
80 return r
81
81
82 def pushkey(self, namespace, key, old, new):
82 def pushkey(self, namespace, key, old, new):
83 if not self.capable('pushkey'):
83 if not self.capable('pushkey'):
84 return False
84 return False
85 d = self._call("pushkey",
85 d = self._call("pushkey",
86 namespace=namespace, key=key, old=old, new=new)
86 namespace=namespace, key=key, old=old, new=new)
87 return bool(int(d))
87 return bool(int(d))
88
88
89 def listkeys(self, namespace):
89 def listkeys(self, namespace):
90 if not self.capable('pushkey'):
90 if not self.capable('pushkey'):
91 return {}
91 return {}
92 d = self._call("listkeys", namespace=namespace)
92 d = self._call("listkeys", namespace=namespace)
93 r = {}
93 r = {}
94 for l in d.splitlines():
94 for l in d.splitlines():
95 k, v = l.split('\t')
95 k, v = l.split('\t')
96 r[k.decode('string-escape')] = v.decode('string-escape')
96 r[k.decode('string-escape')] = v.decode('string-escape')
97 return r
97 return r
98
98
99 def stream_out(self):
99 def stream_out(self):
100 return self._callstream('stream_out')
100 return self._callstream('stream_out')
101
101
102 def changegroup(self, nodes, kind):
102 def changegroup(self, nodes, kind):
103 n = encodelist(nodes)
103 n = encodelist(nodes)
104 f = self._callstream("changegroup", roots=n)
104 f = self._callstream("changegroup", roots=n)
105 return self._decompress(f)
105 return self._decompress(f)
106
106
107 def changegroupsubset(self, bases, heads, kind):
107 def changegroupsubset(self, bases, heads, kind):
108 self.requirecap('changegroupsubset', _('look up remote changes'))
108 self.requirecap('changegroupsubset', _('look up remote changes'))
109 bases = encodelist(bases)
109 bases = encodelist(bases)
110 heads = encodelist(heads)
110 heads = encodelist(heads)
111 return self._decompress(self._callstream("changegroupsubset",
111 return self._decompress(self._callstream("changegroupsubset",
112 bases=bases, heads=heads))
112 bases=bases, heads=heads))
113
113
114 def unbundle(self, cg, heads, source):
114 def unbundle(self, cg, heads, source):
115 '''Send cg (a readable file-like object representing the
115 '''Send cg (a readable file-like object representing the
116 changegroup to push, typically a chunkbuffer object) to the
116 changegroup to push, typically a chunkbuffer object) to the
117 remote server as a bundle. Return an integer indicating the
117 remote server as a bundle. Return an integer indicating the
118 result of the push (see localrepository.addchangegroup()).'''
118 result of the push (see localrepository.addchangegroup()).'''
119
119
120 ret, output = self._callpush("unbundle", cg, heads=encodelist(heads))
120 ret, output = self._callpush("unbundle", cg, heads=encodelist(heads))
121 if ret == "":
121 if ret == "":
122 raise error.ResponseError(
122 raise error.ResponseError(
123 _('push failed:'), output)
123 _('push failed:'), output)
124 try:
124 try:
125 ret = int(ret)
125 ret = int(ret)
126 except ValueError:
126 except ValueError:
127 raise error.ResponseError(
127 raise error.ResponseError(
128 _('push failed (unexpected response):'), ret)
128 _('push failed (unexpected response):'), ret)
129
129
130 for l in output.splitlines(True):
130 for l in output.splitlines(True):
131 self.ui.status(_('remote: '), l)
131 self.ui.status(_('remote: '), l)
132 return ret
132 return ret
133
133
134 # server side
134 # server side
135
135
136 class streamres(object):
136 class streamres(object):
137 def __init__(self, gen):
137 def __init__(self, gen):
138 self.gen = gen
138 self.gen = gen
139
139
140 class pushres(object):
140 class pushres(object):
141 def __init__(self, res):
141 def __init__(self, res):
142 self.res = res
142 self.res = res
143
143
144 def dispatch(repo, proto, command):
144 def dispatch(repo, proto, command):
145 func, spec = commands[command]
145 func, spec = commands[command]
146 args = proto.getargs(spec)
146 args = proto.getargs(spec)
147 return func(repo, proto, *args)
147 return func(repo, proto, *args)
148
148
149 def between(repo, proto, pairs):
149 def between(repo, proto, pairs):
150 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
150 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
151 r = []
151 r = []
152 for b in repo.between(pairs):
152 for b in repo.between(pairs):
153 r.append(encodelist(b) + "\n")
153 r.append(encodelist(b) + "\n")
154 return "".join(r)
154 return "".join(r)
155
155
156 def branchmap(repo, proto):
156 def branchmap(repo, proto):
157 branchmap = repo.branchmap()
157 branchmap = repo.branchmap()
158 heads = []
158 heads = []
159 for branch, nodes in branchmap.iteritems():
159 for branch, nodes in branchmap.iteritems():
160 branchname = urllib.quote(branch)
160 branchname = urllib.quote(branch)
161 branchnodes = encodelist(nodes)
161 branchnodes = encodelist(nodes)
162 heads.append('%s %s' % (branchname, branchnodes))
162 heads.append('%s %s' % (branchname, branchnodes))
163 return '\n'.join(heads)
163 return '\n'.join(heads)
164
164
165 def branches(repo, proto, nodes):
165 def branches(repo, proto, nodes):
166 nodes = decodelist(nodes)
166 nodes = decodelist(nodes)
167 r = []
167 r = []
168 for b in repo.branches(nodes):
168 for b in repo.branches(nodes):
169 r.append(encodelist(b) + "\n")
169 r.append(encodelist(b) + "\n")
170 return "".join(r)
170 return "".join(r)
171
171
172 def capabilities(repo, proto):
172 def capabilities(repo, proto):
173 caps = 'lookup changegroupsubset branchmap pushkey'.split()
173 caps = 'lookup changegroupsubset branchmap pushkey'.split()
174 if _allowstream(repo.ui):
174 if _allowstream(repo.ui):
175 caps.append('stream=%d' % repo.changelog.version)
175 caps.append('stream=%d' % repo.changelog.version)
176 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
176 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
177 return ' '.join(caps)
177 return ' '.join(caps)
178
178
179 def changegroup(repo, proto, roots):
179 def changegroup(repo, proto, roots):
180 nodes = decodelist(roots)
180 nodes = decodelist(roots)
181 cg = repo.changegroup(nodes, 'serve')
181 cg = repo.changegroup(nodes, 'serve')
182 return streamres(proto.groupchunks(cg))
182 return streamres(proto.groupchunks(cg))
183
183
184 def changegroupsubset(repo, proto, bases, heads):
184 def changegroupsubset(repo, proto, bases, heads):
185 bases = decodelist(bases)
185 bases = decodelist(bases)
186 heads = decodelist(heads)
186 heads = decodelist(heads)
187 cg = repo.changegroupsubset(bases, heads, 'serve')
187 cg = repo.changegroupsubset(bases, heads, 'serve')
188 return streamres(proto.groupchunks(cg))
188 return streamres(proto.groupchunks(cg))
189
189
190 def heads(repo, proto):
190 def heads(repo, proto):
191 h = repo.heads()
191 h = repo.heads()
192 return encodelist(h) + "\n"
192 return encodelist(h) + "\n"
193
193
194 def hello(repo, proto):
194 def hello(repo, proto):
195 '''the hello command returns a set of lines describing various
195 '''the hello command returns a set of lines describing various
196 interesting things about the server, in an RFC822-like format.
196 interesting things about the server, in an RFC822-like format.
197 Currently the only one defined is "capabilities", which
197 Currently the only one defined is "capabilities", which
198 consists of a line in the form:
198 consists of a line in the form:
199
199
200 capabilities: space separated list of tokens
200 capabilities: space separated list of tokens
201 '''
201 '''
202 return "capabilities: %s\n" % (capabilities(repo, proto))
202 return "capabilities: %s\n" % (capabilities(repo, proto))
203
203
204 def listkeys(repo, proto, namespace):
204 def listkeys(repo, proto, namespace):
205 d = pushkey_.list(repo, namespace).items()
205 d = pushkeymod.list(repo, namespace).items()
206 t = '\n'.join(['%s\t%s' % (k.encode('string-escape'),
206 t = '\n'.join(['%s\t%s' % (k.encode('string-escape'),
207 v.encode('string-escape')) for k, v in d])
207 v.encode('string-escape')) for k, v in d])
208 return t
208 return t
209
209
210 def lookup(repo, proto, key):
210 def lookup(repo, proto, key):
211 try:
211 try:
212 r = hex(repo.lookup(key))
212 r = hex(repo.lookup(key))
213 success = 1
213 success = 1
214 except Exception, inst:
214 except Exception, inst:
215 r = str(inst)
215 r = str(inst)
216 success = 0
216 success = 0
217 return "%s %s\n" % (success, r)
217 return "%s %s\n" % (success, r)
218
218
219 def pushkey(repo, proto, namespace, key, old, new):
219 def pushkey(repo, proto, namespace, key, old, new):
220 r = pushkey_.push(repo, namespace, key, old, new)
220 r = pushkeymod.push(repo, namespace, key, old, new)
221 return '%s\n' % int(r)
221 return '%s\n' % int(r)
222
222
223 def _allowstream(ui):
223 def _allowstream(ui):
224 return ui.configbool('server', 'uncompressed', True, untrusted=True)
224 return ui.configbool('server', 'uncompressed', True, untrusted=True)
225
225
226 def stream(repo, proto):
226 def stream(repo, proto):
227 '''If the server supports streaming clone, it advertises the "stream"
227 '''If the server supports streaming clone, it advertises the "stream"
228 capability with a value representing the version and flags of the repo
228 capability with a value representing the version and flags of the repo
229 it is serving. Client checks to see if it understands the format.
229 it is serving. Client checks to see if it understands the format.
230
230
231 The format is simple: the server writes out a line with the amount
231 The format is simple: the server writes out a line with the amount
232 of files, then the total amount of bytes to be transfered (separated
232 of files, then the total amount of bytes to be transfered (separated
233 by a space). Then, for each file, the server first writes the filename
233 by a space). Then, for each file, the server first writes the filename
234 and filesize (separated by the null character), then the file contents.
234 and filesize (separated by the null character), then the file contents.
235 '''
235 '''
236
236
237 if not _allowstream(repo.ui):
237 if not _allowstream(repo.ui):
238 return '1\n'
238 return '1\n'
239
239
240 entries = []
240 entries = []
241 total_bytes = 0
241 total_bytes = 0
242 try:
242 try:
243 # get consistent snapshot of repo, lock during scan
243 # get consistent snapshot of repo, lock during scan
244 lock = repo.lock()
244 lock = repo.lock()
245 try:
245 try:
246 repo.ui.debug('scanning\n')
246 repo.ui.debug('scanning\n')
247 for name, ename, size in repo.store.walk():
247 for name, ename, size in repo.store.walk():
248 entries.append((name, size))
248 entries.append((name, size))
249 total_bytes += size
249 total_bytes += size
250 finally:
250 finally:
251 lock.release()
251 lock.release()
252 except error.LockError:
252 except error.LockError:
253 return '2\n' # error: 2
253 return '2\n' # error: 2
254
254
255 def streamer(repo, entries, total):
255 def streamer(repo, entries, total):
256 '''stream out all metadata files in repository.'''
256 '''stream out all metadata files in repository.'''
257 yield '0\n' # success
257 yield '0\n' # success
258 repo.ui.debug('%d files, %d bytes to transfer\n' %
258 repo.ui.debug('%d files, %d bytes to transfer\n' %
259 (len(entries), total_bytes))
259 (len(entries), total_bytes))
260 yield '%d %d\n' % (len(entries), total_bytes)
260 yield '%d %d\n' % (len(entries), total_bytes)
261 for name, size in entries:
261 for name, size in entries:
262 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
262 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
263 # partially encode name over the wire for backwards compat
263 # partially encode name over the wire for backwards compat
264 yield '%s\0%d\n' % (store.encodedir(name), size)
264 yield '%s\0%d\n' % (store.encodedir(name), size)
265 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
265 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
266 yield chunk
266 yield chunk
267
267
268 return streamres(streamer(repo, entries, total_bytes))
268 return streamres(streamer(repo, entries, total_bytes))
269
269
270 def unbundle(repo, proto, heads):
270 def unbundle(repo, proto, heads):
271 their_heads = decodelist(heads)
271 their_heads = decodelist(heads)
272
272
273 def check_heads():
273 def check_heads():
274 heads = repo.heads()
274 heads = repo.heads()
275 return their_heads == ['force'] or their_heads == heads
275 return their_heads == ['force'] or their_heads == heads
276
276
277 # fail early if possible
277 # fail early if possible
278 if not check_heads():
278 if not check_heads():
279 return 'unsynced changes'
279 return 'unsynced changes'
280
280
281 # write bundle data to temporary file because it can be big
281 # write bundle data to temporary file because it can be big
282 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
282 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
283 fp = os.fdopen(fd, 'wb+')
283 fp = os.fdopen(fd, 'wb+')
284 r = 0
284 r = 0
285 proto.redirect()
285 proto.redirect()
286 try:
286 try:
287 proto.getfile(fp)
287 proto.getfile(fp)
288 lock = repo.lock()
288 lock = repo.lock()
289 try:
289 try:
290 if not check_heads():
290 if not check_heads():
291 # someone else committed/pushed/unbundled while we
291 # someone else committed/pushed/unbundled while we
292 # were transferring data
292 # were transferring data
293 return 'unsynced changes'
293 return 'unsynced changes'
294
294
295 # push can proceed
295 # push can proceed
296 fp.seek(0)
296 fp.seek(0)
297 gen = changegroupmod.readbundle(fp, None)
297 gen = changegroupmod.readbundle(fp, None)
298
298
299 try:
299 try:
300 r = repo.addchangegroup(gen, 'serve', proto._client(),
300 r = repo.addchangegroup(gen, 'serve', proto._client(),
301 lock=lock)
301 lock=lock)
302 except util.Abort, inst:
302 except util.Abort, inst:
303 sys.stderr.write("abort: %s\n" % inst)
303 sys.stderr.write("abort: %s\n" % inst)
304 finally:
304 finally:
305 lock.release()
305 lock.release()
306 return pushres(r)
306 return pushres(r)
307
307
308 finally:
308 finally:
309 fp.close()
309 fp.close()
310 os.unlink(tempname)
310 os.unlink(tempname)
311
311
312 commands = {
312 commands = {
313 'between': (between, 'pairs'),
313 'between': (between, 'pairs'),
314 'branchmap': (branchmap, ''),
314 'branchmap': (branchmap, ''),
315 'branches': (branches, 'nodes'),
315 'branches': (branches, 'nodes'),
316 'capabilities': (capabilities, ''),
316 'capabilities': (capabilities, ''),
317 'changegroup': (changegroup, 'roots'),
317 'changegroup': (changegroup, 'roots'),
318 'changegroupsubset': (changegroupsubset, 'bases heads'),
318 'changegroupsubset': (changegroupsubset, 'bases heads'),
319 'heads': (heads, ''),
319 'heads': (heads, ''),
320 'hello': (hello, ''),
320 'hello': (hello, ''),
321 'listkeys': (listkeys, 'namespace'),
321 'listkeys': (listkeys, 'namespace'),
322 'lookup': (lookup, 'key'),
322 'lookup': (lookup, 'key'),
323 'pushkey': (pushkey, 'namespace key old new'),
323 'pushkey': (pushkey, 'namespace key old new'),
324 'stream_out': (stream, ''),
324 'stream_out': (stream, ''),
325 'unbundle': (unbundle, 'heads'),
325 'unbundle': (unbundle, 'heads'),
326 }
326 }
General Comments 0
You need to be logged in to leave comments. Login now