##// END OF EJS Templates
merge with stable
Matt Mackall -
r20829:9a09a625 merge default
parent child Browse files
Show More
@@ -1,897 +1,901 b''
1 1 # dispatch.py - command dispatching for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 import os, sys, atexit, signal, pdb, socket, errno, shlex, time, traceback, re
10 10 import util, commands, hg, fancyopts, extensions, hook, error
11 11 import cmdutil, encoding
12 12 import ui as uimod
13 13
14 14 class request(object):
15 15 def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
16 16 ferr=None):
17 17 self.args = args
18 18 self.ui = ui
19 19 self.repo = repo
20 20
21 21 # input/output/error streams
22 22 self.fin = fin
23 23 self.fout = fout
24 24 self.ferr = ferr
25 25
26 26 def run():
27 27 "run the command in sys.argv"
28 28 sys.exit((dispatch(request(sys.argv[1:])) or 0) & 255)
29 29
30 30 def dispatch(req):
31 31 "run the command specified in req.args"
32 32 if req.ferr:
33 33 ferr = req.ferr
34 34 elif req.ui:
35 35 ferr = req.ui.ferr
36 36 else:
37 37 ferr = sys.stderr
38 38
39 39 try:
40 40 if not req.ui:
41 41 req.ui = uimod.ui()
42 42 if '--traceback' in req.args:
43 43 req.ui.setconfig('ui', 'traceback', 'on', '--traceback')
44 44
45 45 # set ui streams from the request
46 46 if req.fin:
47 47 req.ui.fin = req.fin
48 48 if req.fout:
49 49 req.ui.fout = req.fout
50 50 if req.ferr:
51 51 req.ui.ferr = req.ferr
52 52 except util.Abort, inst:
53 53 ferr.write(_("abort: %s\n") % inst)
54 54 if inst.hint:
55 55 ferr.write(_("(%s)\n") % inst.hint)
56 56 return -1
57 57 except error.ParseError, inst:
58 58 if len(inst.args) > 1:
59 59 ferr.write(_("hg: parse error at %s: %s\n") %
60 60 (inst.args[1], inst.args[0]))
61 61 else:
62 62 ferr.write(_("hg: parse error: %s\n") % inst.args[0])
63 63 return -1
64 64
65 65 msg = ' '.join(' ' in a and repr(a) or a for a in req.args)
66 66 starttime = time.time()
67 67 ret = None
68 68 try:
69 69 ret = _runcatch(req)
70 70 return ret
71 71 finally:
72 72 duration = time.time() - starttime
73 73 req.ui.log("commandfinish", "%s exited %s after %0.2f seconds\n",
74 74 msg, ret or 0, duration)
75 75
76 76 def _runcatch(req):
77 77 def catchterm(*args):
78 78 raise error.SignalInterrupt
79 79
80 80 ui = req.ui
81 81 try:
82 82 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
83 83 num = getattr(signal, name, None)
84 84 if num:
85 85 signal.signal(num, catchterm)
86 86 except ValueError:
87 87 pass # happens if called in a thread
88 88
89 89 try:
90 90 try:
91 91 debugger = 'pdb'
92 92 debugtrace = {
93 93 'pdb' : pdb.set_trace
94 94 }
95 95 debugmortem = {
96 96 'pdb' : pdb.post_mortem
97 97 }
98 98
99 99 # read --config before doing anything else
100 100 # (e.g. to change trust settings for reading .hg/hgrc)
101 101 cfgs = _parseconfig(req.ui, _earlygetopt(['--config'], req.args))
102 102
103 103 if req.repo:
104 104 # copy configs that were passed on the cmdline (--config) to
105 105 # the repo ui
106 106 for sec, name, val in cfgs:
107 107 req.repo.ui.setconfig(sec, name, val, source='--config')
108 108
109 109 # if we are in HGPLAIN mode, then disable custom debugging
110 110 debugger = ui.config("ui", "debugger")
111 debugmod = pdb
111 112 if not debugger or ui.plain():
112 113 debugger = 'pdb'
113
114 try:
115 debugmod = __import__(debugger)
116 except ImportError:
117 debugmod = pdb
114 elif '--debugger' in req.args:
115 # This import can be slow for fancy debuggers, so only
116 # do it when absolutely necessary, i.e. when actual
117 # debugging has been requested
118 try:
119 debugmod = __import__(debugger)
120 except ImportError:
121 pass # Leave debugmod = pdb
118 122
119 123 debugtrace[debugger] = debugmod.set_trace
120 124 debugmortem[debugger] = debugmod.post_mortem
121 125
122 126 # enter the debugger before command execution
123 127 if '--debugger' in req.args:
124 128 ui.warn(_("entering debugger - "
125 129 "type c to continue starting hg or h for help\n"))
126 130
127 131 if (debugger != 'pdb' and
128 132 debugtrace[debugger] == debugtrace['pdb']):
129 133 ui.warn(_("%s debugger specified "
130 134 "but its module was not found\n") % debugger)
131 135
132 136 debugtrace[debugger]()
133 137 try:
134 138 return _dispatch(req)
135 139 finally:
136 140 ui.flush()
137 141 except: # re-raises
138 142 # enter the debugger when we hit an exception
139 143 if '--debugger' in req.args:
140 144 traceback.print_exc()
141 145 debugmortem[debugger](sys.exc_info()[2])
142 146 ui.traceback()
143 147 raise
144 148
145 149 # Global exception handling, alphabetically
146 150 # Mercurial-specific first, followed by built-in and library exceptions
147 151 except error.AmbiguousCommand, inst:
148 152 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
149 153 (inst.args[0], " ".join(inst.args[1])))
150 154 except error.ParseError, inst:
151 155 if len(inst.args) > 1:
152 156 ui.warn(_("hg: parse error at %s: %s\n") %
153 157 (inst.args[1], inst.args[0]))
154 158 else:
155 159 ui.warn(_("hg: parse error: %s\n") % inst.args[0])
156 160 return -1
157 161 except error.LockHeld, inst:
158 162 if inst.errno == errno.ETIMEDOUT:
159 163 reason = _('timed out waiting for lock held by %s') % inst.locker
160 164 else:
161 165 reason = _('lock held by %s') % inst.locker
162 166 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
163 167 except error.LockUnavailable, inst:
164 168 ui.warn(_("abort: could not lock %s: %s\n") %
165 169 (inst.desc or inst.filename, inst.strerror))
166 170 except error.CommandError, inst:
167 171 if inst.args[0]:
168 172 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
169 173 commands.help_(ui, inst.args[0], full=False, command=True)
170 174 else:
171 175 ui.warn(_("hg: %s\n") % inst.args[1])
172 176 commands.help_(ui, 'shortlist')
173 177 except error.OutOfBandError, inst:
174 178 ui.warn(_("abort: remote error:\n"))
175 179 ui.warn(''.join(inst.args))
176 180 except error.RepoError, inst:
177 181 ui.warn(_("abort: %s!\n") % inst)
178 182 if inst.hint:
179 183 ui.warn(_("(%s)\n") % inst.hint)
180 184 except error.ResponseError, inst:
181 185 ui.warn(_("abort: %s") % inst.args[0])
182 186 if not isinstance(inst.args[1], basestring):
183 187 ui.warn(" %r\n" % (inst.args[1],))
184 188 elif not inst.args[1]:
185 189 ui.warn(_(" empty string\n"))
186 190 else:
187 191 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
188 192 except error.RevlogError, inst:
189 193 ui.warn(_("abort: %s!\n") % inst)
190 194 except error.SignalInterrupt:
191 195 ui.warn(_("killed!\n"))
192 196 except error.UnknownCommand, inst:
193 197 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
194 198 try:
195 199 # check if the command is in a disabled extension
196 200 # (but don't check for extensions themselves)
197 201 commands.help_(ui, inst.args[0], unknowncmd=True)
198 202 except error.UnknownCommand:
199 203 commands.help_(ui, 'shortlist')
200 204 except error.InterventionRequired, inst:
201 205 ui.warn("%s\n" % inst)
202 206 return 1
203 207 except util.Abort, inst:
204 208 ui.warn(_("abort: %s\n") % inst)
205 209 if inst.hint:
206 210 ui.warn(_("(%s)\n") % inst.hint)
207 211 except ImportError, inst:
208 212 ui.warn(_("abort: %s!\n") % inst)
209 213 m = str(inst).split()[-1]
210 214 if m in "mpatch bdiff".split():
211 215 ui.warn(_("(did you forget to compile extensions?)\n"))
212 216 elif m in "zlib".split():
213 217 ui.warn(_("(is your Python install correct?)\n"))
214 218 except IOError, inst:
215 219 if util.safehasattr(inst, "code"):
216 220 ui.warn(_("abort: %s\n") % inst)
217 221 elif util.safehasattr(inst, "reason"):
218 222 try: # usually it is in the form (errno, strerror)
219 223 reason = inst.reason.args[1]
220 224 except (AttributeError, IndexError):
221 225 # it might be anything, for example a string
222 226 reason = inst.reason
223 227 ui.warn(_("abort: error: %s\n") % reason)
224 228 elif util.safehasattr(inst, "args") and inst.args[0] == errno.EPIPE:
225 229 if ui.debugflag:
226 230 ui.warn(_("broken pipe\n"))
227 231 elif getattr(inst, "strerror", None):
228 232 if getattr(inst, "filename", None):
229 233 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
230 234 else:
231 235 ui.warn(_("abort: %s\n") % inst.strerror)
232 236 else:
233 237 raise
234 238 except OSError, inst:
235 239 if getattr(inst, "filename", None) is not None:
236 240 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
237 241 else:
238 242 ui.warn(_("abort: %s\n") % inst.strerror)
239 243 except KeyboardInterrupt:
240 244 try:
241 245 ui.warn(_("interrupted!\n"))
242 246 except IOError, inst:
243 247 if inst.errno == errno.EPIPE:
244 248 if ui.debugflag:
245 249 ui.warn(_("\nbroken pipe\n"))
246 250 else:
247 251 raise
248 252 except MemoryError:
249 253 ui.warn(_("abort: out of memory\n"))
250 254 except SystemExit, inst:
251 255 # Commands shouldn't sys.exit directly, but give a return code.
252 256 # Just in case catch this and and pass exit code to caller.
253 257 return inst.code
254 258 except socket.error, inst:
255 259 ui.warn(_("abort: %s\n") % inst.args[-1])
256 260 except: # re-raises
257 261 myver = util.version()
258 262 # For compatibility checking, we discard the portion of the hg
259 263 # version after the + on the assumption that if a "normal
260 264 # user" is running a build with a + in it the packager
261 265 # probably built from fairly close to a tag and anyone with a
262 266 # 'make local' copy of hg (where the version number can be out
263 267 # of date) will be clueful enough to notice the implausible
264 268 # version number and try updating.
265 269 compare = myver.split('+')[0]
266 270 ct = tuplever(compare)
267 271 worst = None, ct, ''
268 272 for name, mod in extensions.extensions():
269 273 testedwith = getattr(mod, 'testedwith', '')
270 274 report = getattr(mod, 'buglink', _('the extension author.'))
271 275 if not testedwith.strip():
272 276 # We found an untested extension. It's likely the culprit.
273 277 worst = name, 'unknown', report
274 278 break
275 279 if compare not in testedwith.split() and testedwith != 'internal':
276 280 tested = [tuplever(v) for v in testedwith.split()]
277 281 lower = [t for t in tested if t < ct]
278 282 nearest = max(lower or tested)
279 283 if worst[0] is None or nearest < worst[1]:
280 284 worst = name, nearest, report
281 285 if worst[0] is not None:
282 286 name, testedwith, report = worst
283 287 if not isinstance(testedwith, str):
284 288 testedwith = '.'.join([str(c) for c in testedwith])
285 289 warning = (_('** Unknown exception encountered with '
286 290 'possibly-broken third-party extension %s\n'
287 291 '** which supports versions %s of Mercurial.\n'
288 292 '** Please disable %s and try your action again.\n'
289 293 '** If that fixes the bug please report it to %s\n')
290 294 % (name, testedwith, name, report))
291 295 else:
292 296 warning = (_("** unknown exception encountered, "
293 297 "please report by visiting\n") +
294 298 _("** http://mercurial.selenic.com/wiki/BugTracker\n"))
295 299 warning += ((_("** Python %s\n") % sys.version.replace('\n', '')) +
296 300 (_("** Mercurial Distributed SCM (version %s)\n") % myver) +
297 301 (_("** Extensions loaded: %s\n") %
298 302 ", ".join([x[0] for x in extensions.extensions()])))
299 303 ui.log("commandexception", "%s\n%s\n", warning, traceback.format_exc())
300 304 ui.warn(warning)
301 305 raise
302 306
303 307 return -1
304 308
305 309 def tuplever(v):
306 310 try:
307 311 return tuple([int(i) for i in v.split('.')])
308 312 except ValueError:
309 313 return tuple()
310 314
311 315 def aliasargs(fn, givenargs):
312 316 args = getattr(fn, 'args', [])
313 317 if args:
314 318 cmd = ' '.join(map(util.shellquote, args))
315 319
316 320 nums = []
317 321 def replacer(m):
318 322 num = int(m.group(1)) - 1
319 323 nums.append(num)
320 324 if num < len(givenargs):
321 325 return givenargs[num]
322 326 raise util.Abort(_('too few arguments for command alias'))
323 327 cmd = re.sub(r'\$(\d+|\$)', replacer, cmd)
324 328 givenargs = [x for i, x in enumerate(givenargs)
325 329 if i not in nums]
326 330 args = shlex.split(cmd)
327 331 return args + givenargs
328 332
329 333 class cmdalias(object):
330 334 def __init__(self, name, definition, cmdtable):
331 335 self.name = self.cmd = name
332 336 self.cmdname = ''
333 337 self.definition = definition
334 338 self.args = []
335 339 self.opts = []
336 340 self.help = ''
337 341 self.norepo = True
338 342 self.optionalrepo = False
339 343 self.badalias = False
340 344
341 345 try:
342 346 aliases, entry = cmdutil.findcmd(self.name, cmdtable)
343 347 for alias, e in cmdtable.iteritems():
344 348 if e is entry:
345 349 self.cmd = alias
346 350 break
347 351 self.shadows = True
348 352 except error.UnknownCommand:
349 353 self.shadows = False
350 354
351 355 if not self.definition:
352 356 def fn(ui, *args):
353 357 ui.warn(_("no definition for alias '%s'\n") % self.name)
354 358 return 1
355 359 self.fn = fn
356 360 self.badalias = True
357 361 return
358 362
359 363 if self.definition.startswith('!'):
360 364 self.shell = True
361 365 def fn(ui, *args):
362 366 env = {'HG_ARGS': ' '.join((self.name,) + args)}
363 367 def _checkvar(m):
364 368 if m.groups()[0] == '$':
365 369 return m.group()
366 370 elif int(m.groups()[0]) <= len(args):
367 371 return m.group()
368 372 else:
369 373 ui.debug("No argument found for substitution "
370 374 "of %i variable in alias '%s' definition."
371 375 % (int(m.groups()[0]), self.name))
372 376 return ''
373 377 cmd = re.sub(r'\$(\d+|\$)', _checkvar, self.definition[1:])
374 378 replace = dict((str(i + 1), arg) for i, arg in enumerate(args))
375 379 replace['0'] = self.name
376 380 replace['@'] = ' '.join(args)
377 381 cmd = util.interpolate(r'\$', replace, cmd, escape_prefix=True)
378 382 return util.system(cmd, environ=env, out=ui.fout)
379 383 self.fn = fn
380 384 return
381 385
382 386 args = shlex.split(self.definition)
383 387 self.cmdname = cmd = args.pop(0)
384 388 args = map(util.expandpath, args)
385 389
386 390 for invalidarg in ("--cwd", "-R", "--repository", "--repo", "--config"):
387 391 if _earlygetopt([invalidarg], args):
388 392 def fn(ui, *args):
389 393 ui.warn(_("error in definition for alias '%s': %s may only "
390 394 "be given on the command line\n")
391 395 % (self.name, invalidarg))
392 396 return 1
393 397
394 398 self.fn = fn
395 399 self.badalias = True
396 400 return
397 401
398 402 try:
399 403 tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1]
400 404 if len(tableentry) > 2:
401 405 self.fn, self.opts, self.help = tableentry
402 406 else:
403 407 self.fn, self.opts = tableentry
404 408
405 409 self.args = aliasargs(self.fn, args)
406 410 if cmd not in commands.norepo.split(' '):
407 411 self.norepo = False
408 412 if cmd in commands.optionalrepo.split(' '):
409 413 self.optionalrepo = True
410 414 if self.help.startswith("hg " + cmd):
411 415 # drop prefix in old-style help lines so hg shows the alias
412 416 self.help = self.help[4 + len(cmd):]
413 417 self.__doc__ = self.fn.__doc__
414 418
415 419 except error.UnknownCommand:
416 420 def fn(ui, *args):
417 421 ui.warn(_("alias '%s' resolves to unknown command '%s'\n") \
418 422 % (self.name, cmd))
419 423 try:
420 424 # check if the command is in a disabled extension
421 425 commands.help_(ui, cmd, unknowncmd=True)
422 426 except error.UnknownCommand:
423 427 pass
424 428 return 1
425 429 self.fn = fn
426 430 self.badalias = True
427 431 except error.AmbiguousCommand:
428 432 def fn(ui, *args):
429 433 ui.warn(_("alias '%s' resolves to ambiguous command '%s'\n") \
430 434 % (self.name, cmd))
431 435 return 1
432 436 self.fn = fn
433 437 self.badalias = True
434 438
435 439 def __call__(self, ui, *args, **opts):
436 440 if self.shadows:
437 441 ui.debug("alias '%s' shadows command '%s'\n" %
438 442 (self.name, self.cmdname))
439 443
440 444 if util.safehasattr(self, 'shell'):
441 445 return self.fn(ui, *args, **opts)
442 446 else:
443 447 try:
444 448 util.checksignature(self.fn)(ui, *args, **opts)
445 449 except error.SignatureError:
446 450 args = ' '.join([self.cmdname] + self.args)
447 451 ui.debug("alias '%s' expands to '%s'\n" % (self.name, args))
448 452 raise
449 453
450 454 def addaliases(ui, cmdtable):
451 455 # aliases are processed after extensions have been loaded, so they
452 456 # may use extension commands. Aliases can also use other alias definitions,
453 457 # but only if they have been defined prior to the current definition.
454 458 for alias, definition in ui.configitems('alias'):
455 459 aliasdef = cmdalias(alias, definition, cmdtable)
456 460
457 461 try:
458 462 olddef = cmdtable[aliasdef.cmd][0]
459 463 if olddef.definition == aliasdef.definition:
460 464 continue
461 465 except (KeyError, AttributeError):
462 466 # definition might not exist or it might not be a cmdalias
463 467 pass
464 468
465 469 cmdtable[aliasdef.name] = (aliasdef, aliasdef.opts, aliasdef.help)
466 470 if aliasdef.norepo:
467 471 commands.norepo += ' %s' % alias
468 472 if aliasdef.optionalrepo:
469 473 commands.optionalrepo += ' %s' % alias
470 474
471 475 def _parse(ui, args):
472 476 options = {}
473 477 cmdoptions = {}
474 478
475 479 try:
476 480 args = fancyopts.fancyopts(args, commands.globalopts, options)
477 481 except fancyopts.getopt.GetoptError, inst:
478 482 raise error.CommandError(None, inst)
479 483
480 484 if args:
481 485 cmd, args = args[0], args[1:]
482 486 aliases, entry = cmdutil.findcmd(cmd, commands.table,
483 487 ui.configbool("ui", "strict"))
484 488 cmd = aliases[0]
485 489 args = aliasargs(entry[0], args)
486 490 defaults = ui.config("defaults", cmd)
487 491 if defaults:
488 492 args = map(util.expandpath, shlex.split(defaults)) + args
489 493 c = list(entry[1])
490 494 else:
491 495 cmd = None
492 496 c = []
493 497
494 498 # combine global options into local
495 499 for o in commands.globalopts:
496 500 c.append((o[0], o[1], options[o[1]], o[3]))
497 501
498 502 try:
499 503 args = fancyopts.fancyopts(args, c, cmdoptions, True)
500 504 except fancyopts.getopt.GetoptError, inst:
501 505 raise error.CommandError(cmd, inst)
502 506
503 507 # separate global options back out
504 508 for o in commands.globalopts:
505 509 n = o[1]
506 510 options[n] = cmdoptions[n]
507 511 del cmdoptions[n]
508 512
509 513 return (cmd, cmd and entry[0] or None, args, options, cmdoptions)
510 514
511 515 def _parseconfig(ui, config):
512 516 """parse the --config options from the command line"""
513 517 configs = []
514 518
515 519 for cfg in config:
516 520 try:
517 521 name, value = cfg.split('=', 1)
518 522 section, name = name.split('.', 1)
519 523 if not section or not name:
520 524 raise IndexError
521 525 ui.setconfig(section, name, value, '--config')
522 526 configs.append((section, name, value))
523 527 except (IndexError, ValueError):
524 528 raise util.Abort(_('malformed --config option: %r '
525 529 '(use --config section.name=value)') % cfg)
526 530
527 531 return configs
528 532
529 533 def _earlygetopt(aliases, args):
530 534 """Return list of values for an option (or aliases).
531 535
532 536 The values are listed in the order they appear in args.
533 537 The options and values are removed from args.
534 538
535 539 >>> args = ['x', '--cwd', 'foo', 'y']
536 540 >>> _earlygetopt(['--cwd'], args), args
537 541 (['foo'], ['x', 'y'])
538 542
539 543 >>> args = ['x', '--cwd=bar', 'y']
540 544 >>> _earlygetopt(['--cwd'], args), args
541 545 (['bar'], ['x', 'y'])
542 546
543 547 >>> args = ['x', '-R', 'foo', 'y']
544 548 >>> _earlygetopt(['-R'], args), args
545 549 (['foo'], ['x', 'y'])
546 550
547 551 >>> args = ['x', '-Rbar', 'y']
548 552 >>> _earlygetopt(['-R'], args), args
549 553 (['bar'], ['x', 'y'])
550 554 """
551 555 try:
552 556 argcount = args.index("--")
553 557 except ValueError:
554 558 argcount = len(args)
555 559 shortopts = [opt for opt in aliases if len(opt) == 2]
556 560 values = []
557 561 pos = 0
558 562 while pos < argcount:
559 563 fullarg = arg = args[pos]
560 564 equals = arg.find('=')
561 565 if equals > -1:
562 566 arg = arg[:equals]
563 567 if arg in aliases:
564 568 del args[pos]
565 569 if equals > -1:
566 570 values.append(fullarg[equals + 1:])
567 571 argcount -= 1
568 572 else:
569 573 if pos + 1 >= argcount:
570 574 # ignore and let getopt report an error if there is no value
571 575 break
572 576 values.append(args.pop(pos))
573 577 argcount -= 2
574 578 elif arg[:2] in shortopts:
575 579 # short option can have no following space, e.g. hg log -Rfoo
576 580 values.append(args.pop(pos)[2:])
577 581 argcount -= 1
578 582 else:
579 583 pos += 1
580 584 return values
581 585
582 586 def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
583 587 # run pre-hook, and abort if it fails
584 588 hook.hook(lui, repo, "pre-%s" % cmd, True, args=" ".join(fullargs),
585 589 pats=cmdpats, opts=cmdoptions)
586 590 ret = _runcommand(ui, options, cmd, d)
587 591 # run post-hook, passing command result
588 592 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
589 593 result=ret, pats=cmdpats, opts=cmdoptions)
590 594 return ret
591 595
592 596 def _getlocal(ui, rpath):
593 597 """Return (path, local ui object) for the given target path.
594 598
595 599 Takes paths in [cwd]/.hg/hgrc into account."
596 600 """
597 601 try:
598 602 wd = os.getcwd()
599 603 except OSError, e:
600 604 raise util.Abort(_("error getting current working directory: %s") %
601 605 e.strerror)
602 606 path = cmdutil.findrepo(wd) or ""
603 607 if not path:
604 608 lui = ui
605 609 else:
606 610 lui = ui.copy()
607 611 lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
608 612
609 613 if rpath and rpath[-1]:
610 614 path = lui.expandpath(rpath[-1])
611 615 lui = ui.copy()
612 616 lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
613 617
614 618 return path, lui
615 619
616 620 def _checkshellalias(lui, ui, args):
617 621 options = {}
618 622
619 623 try:
620 624 args = fancyopts.fancyopts(args, commands.globalopts, options)
621 625 except fancyopts.getopt.GetoptError:
622 626 return
623 627
624 628 if not args:
625 629 return
626 630
627 631 norepo = commands.norepo
628 632 optionalrepo = commands.optionalrepo
629 633 def restorecommands():
630 634 commands.norepo = norepo
631 635 commands.optionalrepo = optionalrepo
632 636
633 637 cmdtable = commands.table.copy()
634 638 addaliases(lui, cmdtable)
635 639
636 640 cmd = args[0]
637 641 try:
638 642 aliases, entry = cmdutil.findcmd(cmd, cmdtable)
639 643 except (error.AmbiguousCommand, error.UnknownCommand):
640 644 restorecommands()
641 645 return
642 646
643 647 cmd = aliases[0]
644 648 fn = entry[0]
645 649
646 650 if cmd and util.safehasattr(fn, 'shell'):
647 651 d = lambda: fn(ui, *args[1:])
648 652 return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d,
649 653 [], {})
650 654
651 655 restorecommands()
652 656
653 657 _loaded = set()
654 658 def _dispatch(req):
655 659 args = req.args
656 660 ui = req.ui
657 661
658 662 # check for cwd
659 663 cwd = _earlygetopt(['--cwd'], args)
660 664 if cwd:
661 665 os.chdir(cwd[-1])
662 666
663 667 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
664 668 path, lui = _getlocal(ui, rpath)
665 669
666 670 # Now that we're operating in the right directory/repository with
667 671 # the right config settings, check for shell aliases
668 672 shellaliasfn = _checkshellalias(lui, ui, args)
669 673 if shellaliasfn:
670 674 return shellaliasfn()
671 675
672 676 # Configure extensions in phases: uisetup, extsetup, cmdtable, and
673 677 # reposetup. Programs like TortoiseHg will call _dispatch several
674 678 # times so we keep track of configured extensions in _loaded.
675 679 extensions.loadall(lui)
676 680 exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded]
677 681 # Propagate any changes to lui.__class__ by extensions
678 682 ui.__class__ = lui.__class__
679 683
680 684 # (uisetup and extsetup are handled in extensions.loadall)
681 685
682 686 for name, module in exts:
683 687 cmdtable = getattr(module, 'cmdtable', {})
684 688 overrides = [cmd for cmd in cmdtable if cmd in commands.table]
685 689 if overrides:
686 690 ui.warn(_("extension '%s' overrides commands: %s\n")
687 691 % (name, " ".join(overrides)))
688 692 commands.table.update(cmdtable)
689 693 _loaded.add(name)
690 694
691 695 # (reposetup is handled in hg.repository)
692 696
693 697 addaliases(lui, commands.table)
694 698
695 699 # check for fallback encoding
696 700 fallback = lui.config('ui', 'fallbackencoding')
697 701 if fallback:
698 702 encoding.fallbackencoding = fallback
699 703
700 704 fullargs = args
701 705 cmd, func, args, options, cmdoptions = _parse(lui, args)
702 706
703 707 if options["config"]:
704 708 raise util.Abort(_("option --config may not be abbreviated!"))
705 709 if options["cwd"]:
706 710 raise util.Abort(_("option --cwd may not be abbreviated!"))
707 711 if options["repository"]:
708 712 raise util.Abort(_(
709 713 "option -R has to be separated from other options (e.g. not -qR) "
710 714 "and --repository may only be abbreviated as --repo!"))
711 715
712 716 if options["encoding"]:
713 717 encoding.encoding = options["encoding"]
714 718 if options["encodingmode"]:
715 719 encoding.encodingmode = options["encodingmode"]
716 720 if options["time"]:
717 721 def get_times():
718 722 t = os.times()
719 723 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
720 724 t = (t[0], t[1], t[2], t[3], time.clock())
721 725 return t
722 726 s = get_times()
723 727 def print_time():
724 728 t = get_times()
725 729 ui.warn(_("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
726 730 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
727 731 atexit.register(print_time)
728 732
729 733 uis = set([ui, lui])
730 734
731 735 if req.repo:
732 736 uis.add(req.repo.ui)
733 737
734 738 if options['verbose'] or options['debug'] or options['quiet']:
735 739 for opt in ('verbose', 'debug', 'quiet'):
736 740 val = str(bool(options[opt]))
737 741 for ui_ in uis:
738 742 ui_.setconfig('ui', opt, val, '--' + opt)
739 743
740 744 if options['traceback']:
741 745 for ui_ in uis:
742 746 ui_.setconfig('ui', 'traceback', 'on', '--traceback')
743 747
744 748 if options['noninteractive']:
745 749 for ui_ in uis:
746 750 ui_.setconfig('ui', 'interactive', 'off', '-y')
747 751
748 752 if cmdoptions.get('insecure', False):
749 753 for ui_ in uis:
750 754 ui_.setconfig('web', 'cacerts', '', '--insecure')
751 755
752 756 if options['version']:
753 757 return commands.version_(ui)
754 758 if options['help']:
755 759 return commands.help_(ui, cmd)
756 760 elif not cmd:
757 761 return commands.help_(ui, 'shortlist')
758 762
759 763 repo = None
760 764 cmdpats = args[:]
761 765 if cmd not in commands.norepo.split():
762 766 # use the repo from the request only if we don't have -R
763 767 if not rpath and not cwd:
764 768 repo = req.repo
765 769
766 770 if repo:
767 771 # set the descriptors of the repo ui to those of ui
768 772 repo.ui.fin = ui.fin
769 773 repo.ui.fout = ui.fout
770 774 repo.ui.ferr = ui.ferr
771 775 else:
772 776 try:
773 777 repo = hg.repository(ui, path=path)
774 778 if not repo.local():
775 779 raise util.Abort(_("repository '%s' is not local") % path)
776 780 repo.ui.setconfig("bundle", "mainreporoot", repo.root, 'repo')
777 781 except error.RequirementError:
778 782 raise
779 783 except error.RepoError:
780 784 if cmd not in commands.optionalrepo.split():
781 785 if (cmd in commands.inferrepo.split() and
782 786 args and not path): # try to infer -R from command args
783 787 repos = map(cmdutil.findrepo, args)
784 788 guess = repos[0]
785 789 if guess and repos.count(guess) == len(repos):
786 790 req.args = ['--repository', guess] + fullargs
787 791 return _dispatch(req)
788 792 if not path:
789 793 raise error.RepoError(_("no repository found in '%s'"
790 794 " (.hg not found)")
791 795 % os.getcwd())
792 796 raise
793 797 if repo:
794 798 ui = repo.ui
795 799 if options['hidden']:
796 800 repo = repo.unfiltered()
797 801 args.insert(0, repo)
798 802 elif rpath:
799 803 ui.warn(_("warning: --repository ignored\n"))
800 804
801 805 msg = ' '.join(' ' in a and repr(a) or a for a in fullargs)
802 806 ui.log("command", '%s\n', msg)
803 807 d = lambda: util.checksignature(func)(ui, *args, **cmdoptions)
804 808 try:
805 809 return runcommand(lui, repo, cmd, fullargs, ui, options, d,
806 810 cmdpats, cmdoptions)
807 811 finally:
808 812 if repo and repo != req.repo:
809 813 repo.close()
810 814
811 815 def lsprofile(ui, func, fp):
812 816 format = ui.config('profiling', 'format', default='text')
813 817 field = ui.config('profiling', 'sort', default='inlinetime')
814 818 limit = ui.configint('profiling', 'limit', default=30)
815 819 climit = ui.configint('profiling', 'nested', default=5)
816 820
817 821 if format not in ['text', 'kcachegrind']:
818 822 ui.warn(_("unrecognized profiling format '%s'"
819 823 " - Ignored\n") % format)
820 824 format = 'text'
821 825
822 826 try:
823 827 from mercurial import lsprof
824 828 except ImportError:
825 829 raise util.Abort(_(
826 830 'lsprof not available - install from '
827 831 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
828 832 p = lsprof.Profiler()
829 833 p.enable(subcalls=True)
830 834 try:
831 835 return func()
832 836 finally:
833 837 p.disable()
834 838
835 839 if format == 'kcachegrind':
836 840 import lsprofcalltree
837 841 calltree = lsprofcalltree.KCacheGrind(p)
838 842 calltree.output(fp)
839 843 else:
840 844 # format == 'text'
841 845 stats = lsprof.Stats(p.getstats())
842 846 stats.sort(field)
843 847 stats.pprint(limit=limit, file=fp, climit=climit)
844 848
845 849 def statprofile(ui, func, fp):
846 850 try:
847 851 import statprof
848 852 except ImportError:
849 853 raise util.Abort(_(
850 854 'statprof not available - install using "easy_install statprof"'))
851 855
852 856 freq = ui.configint('profiling', 'freq', default=1000)
853 857 if freq > 0:
854 858 statprof.reset(freq)
855 859 else:
856 860 ui.warn(_("invalid sampling frequency '%s' - ignoring\n") % freq)
857 861
858 862 statprof.start()
859 863 try:
860 864 return func()
861 865 finally:
862 866 statprof.stop()
863 867 statprof.display(fp)
864 868
865 869 def _runcommand(ui, options, cmd, cmdfunc):
866 870 def checkargs():
867 871 try:
868 872 return cmdfunc()
869 873 except error.SignatureError:
870 874 raise error.CommandError(cmd, _("invalid arguments"))
871 875
872 876 if options['profile']:
873 877 profiler = os.getenv('HGPROF')
874 878 if profiler is None:
875 879 profiler = ui.config('profiling', 'type', default='ls')
876 880 if profiler not in ('ls', 'stat'):
877 881 ui.warn(_("unrecognized profiler '%s' - ignored\n") % profiler)
878 882 profiler = 'ls'
879 883
880 884 output = ui.config('profiling', 'output')
881 885
882 886 if output:
883 887 path = ui.expandpath(output)
884 888 fp = open(path, 'wb')
885 889 else:
886 890 fp = sys.stderr
887 891
888 892 try:
889 893 if profiler == 'ls':
890 894 return lsprofile(ui, checkargs, fp)
891 895 else:
892 896 return statprofile(ui, checkargs, fp)
893 897 finally:
894 898 if output:
895 899 fp.close()
896 900 else:
897 901 return checkargs()
@@ -1,649 +1,651 b''
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from i18n import _
10 10 from lock import release
11 11 from node import hex, nullid
12 12 import localrepo, bundlerepo, unionrepo, httppeer, sshpeer, statichttprepo
13 13 import bookmarks, lock, util, extensions, error, node, scmutil, phases, url
14 14 import cmdutil, discovery
15 15 import merge as mergemod
16 16 import verify as verifymod
17 17 import errno, os, shutil
18 18
19 19 def _local(path):
20 20 path = util.expandpath(util.urllocalpath(path))
21 21 return (os.path.isfile(path) and bundlerepo or localrepo)
22 22
23 23 def addbranchrevs(lrepo, other, branches, revs):
24 24 peer = other.peer() # a courtesy to callers using a localrepo for other
25 25 hashbranch, branches = branches
26 26 if not hashbranch and not branches:
27 27 return revs or None, revs and revs[0] or None
28 28 revs = revs and list(revs) or []
29 29 if not peer.capable('branchmap'):
30 30 if branches:
31 31 raise util.Abort(_("remote branch lookup not supported"))
32 32 revs.append(hashbranch)
33 33 return revs, revs[0]
34 34 branchmap = peer.branchmap()
35 35
36 36 def primary(branch):
37 37 if branch == '.':
38 38 if not lrepo:
39 39 raise util.Abort(_("dirstate branch not accessible"))
40 40 branch = lrepo.dirstate.branch()
41 41 if branch in branchmap:
42 42 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
43 43 return True
44 44 else:
45 45 return False
46 46
47 47 for branch in branches:
48 48 if not primary(branch):
49 49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
50 50 if hashbranch:
51 51 if not primary(hashbranch):
52 52 revs.append(hashbranch)
53 53 return revs, revs[0]
54 54
55 55 def parseurl(path, branches=None):
56 56 '''parse url#branch, returning (url, (branch, branches))'''
57 57
58 58 u = util.url(path)
59 59 branch = None
60 60 if u.fragment:
61 61 branch = u.fragment
62 62 u.fragment = None
63 63 return str(u), (branch, branches or [])
64 64
65 65 schemes = {
66 66 'bundle': bundlerepo,
67 67 'union': unionrepo,
68 68 'file': _local,
69 69 'http': httppeer,
70 70 'https': httppeer,
71 71 'ssh': sshpeer,
72 72 'static-http': statichttprepo,
73 73 }
74 74
75 75 def _peerlookup(path):
76 76 u = util.url(path)
77 77 scheme = u.scheme or 'file'
78 78 thing = schemes.get(scheme) or schemes['file']
79 79 try:
80 80 return thing(path)
81 81 except TypeError:
82 82 return thing
83 83
84 84 def islocal(repo):
85 85 '''return true if repo (or path pointing to repo) is local'''
86 86 if isinstance(repo, str):
87 87 try:
88 88 return _peerlookup(repo).islocal(repo)
89 89 except AttributeError:
90 90 return False
91 91 return repo.local()
92 92
93 93 def openpath(ui, path):
94 94 '''open path with open if local, url.open if remote'''
95 95 pathurl = util.url(path, parsequery=False, parsefragment=False)
96 96 if pathurl.islocal():
97 97 return util.posixfile(pathurl.localpath(), 'rb')
98 98 else:
99 99 return url.open(ui, path)
100 100
101 101 def _peerorrepo(ui, path, create=False):
102 102 """return a repository object for the specified path"""
103 103 obj = _peerlookup(path).instance(ui, path, create)
104 104 ui = getattr(obj, "ui", ui)
105 105 for name, module in extensions.extensions(ui):
106 106 hook = getattr(module, 'reposetup', None)
107 107 if hook:
108 108 hook(ui, obj)
109 109 return obj
110 110
111 111 def repository(ui, path='', create=False):
112 112 """return a repository object for the specified path"""
113 113 peer = _peerorrepo(ui, path, create)
114 114 repo = peer.local()
115 115 if not repo:
116 116 raise util.Abort(_("repository '%s' is not local") %
117 117 (path or peer.url()))
118 118 return repo.filtered('visible')
119 119
120 120 def peer(uiorrepo, opts, path, create=False):
121 121 '''return a repository peer for the specified path'''
122 122 rui = remoteui(uiorrepo, opts)
123 123 return _peerorrepo(rui, path, create).peer()
124 124
125 125 def defaultdest(source):
126 126 '''return default destination of clone if none is given
127 127
128 128 >>> defaultdest('foo')
129 129 'foo'
130 130 >>> defaultdest('/foo/bar')
131 131 'bar'
132 132 >>> defaultdest('/')
133 133 ''
134 134 >>> defaultdest('')
135 135 ''
136 136 >>> defaultdest('http://example.org/')
137 137 ''
138 138 >>> defaultdest('http://example.org/foo/')
139 139 'foo'
140 140 '''
141 141 path = util.url(source).path
142 142 if not path:
143 143 return ''
144 144 return os.path.basename(os.path.normpath(path))
145 145
146 146 def share(ui, source, dest=None, update=True):
147 147 '''create a shared repository'''
148 148
149 149 if not islocal(source):
150 150 raise util.Abort(_('can only share local repositories'))
151 151
152 152 if not dest:
153 153 dest = defaultdest(source)
154 154 else:
155 155 dest = ui.expandpath(dest)
156 156
157 157 if isinstance(source, str):
158 158 origsource = ui.expandpath(source)
159 159 source, branches = parseurl(origsource)
160 160 srcrepo = repository(ui, source)
161 161 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
162 162 else:
163 163 srcrepo = source.local()
164 164 origsource = source = srcrepo.url()
165 165 checkout = None
166 166
167 167 sharedpath = srcrepo.sharedpath # if our source is already sharing
168 168
169 169 root = os.path.realpath(dest)
170 170 roothg = os.path.join(root, '.hg')
171 171
172 172 if os.path.exists(roothg):
173 173 raise util.Abort(_('destination already exists'))
174 174
175 175 if not os.path.isdir(root):
176 176 os.mkdir(root)
177 177 util.makedir(roothg, notindexed=True)
178 178
179 179 requirements = ''
180 180 try:
181 181 requirements = srcrepo.opener.read('requires')
182 182 except IOError, inst:
183 183 if inst.errno != errno.ENOENT:
184 184 raise
185 185
186 186 requirements += 'shared\n'
187 187 util.writefile(os.path.join(roothg, 'requires'), requirements)
188 188 util.writefile(os.path.join(roothg, 'sharedpath'), sharedpath)
189 189
190 190 r = repository(ui, root)
191 191
192 192 default = srcrepo.ui.config('paths', 'default')
193 193 if default:
194 194 fp = r.opener("hgrc", "w", text=True)
195 195 fp.write("[paths]\n")
196 196 fp.write("default = %s\n" % default)
197 197 fp.close()
198 198
199 199 if update:
200 200 r.ui.status(_("updating working directory\n"))
201 201 if update is not True:
202 202 checkout = update
203 203 for test in (checkout, 'default', 'tip'):
204 204 if test is None:
205 205 continue
206 206 try:
207 207 uprev = r.lookup(test)
208 208 break
209 209 except error.RepoLookupError:
210 210 continue
211 211 _update(r, uprev)
212 212
213 213 def copystore(ui, srcrepo, destpath):
214 214 '''copy files from store of srcrepo in destpath
215 215
216 216 returns destlock
217 217 '''
218 218 destlock = None
219 219 try:
220 220 hardlink = None
221 221 num = 0
222 222 srcpublishing = srcrepo.ui.configbool('phases', 'publish', True)
223 223 srcvfs = scmutil.vfs(srcrepo.sharedpath)
224 224 dstvfs = scmutil.vfs(destpath)
225 225 for f in srcrepo.store.copylist():
226 226 if srcpublishing and f.endswith('phaseroots'):
227 227 continue
228 228 dstbase = os.path.dirname(f)
229 229 if dstbase and not dstvfs.exists(dstbase):
230 230 dstvfs.mkdir(dstbase)
231 231 if srcvfs.exists(f):
232 232 if f.endswith('data'):
233 # 'dstbase' may be empty (e.g. revlog format 0)
234 lockfile = os.path.join(dstbase, "lock")
233 235 # lock to avoid premature writing to the target
234 destlock = lock.lock(dstvfs, dstbase + "/lock")
236 destlock = lock.lock(dstvfs, lockfile)
235 237 hardlink, n = util.copyfiles(srcvfs.join(f), dstvfs.join(f),
236 238 hardlink)
237 239 num += n
238 240 if hardlink:
239 241 ui.debug("linked %d files\n" % num)
240 242 else:
241 243 ui.debug("copied %d files\n" % num)
242 244 return destlock
243 245 except: # re-raises
244 246 release(destlock)
245 247 raise
246 248
247 249 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
248 250 update=True, stream=False, branch=None):
249 251 """Make a copy of an existing repository.
250 252
251 253 Create a copy of an existing repository in a new directory. The
252 254 source and destination are URLs, as passed to the repository
253 255 function. Returns a pair of repository peers, the source and
254 256 newly created destination.
255 257
256 258 The location of the source is added to the new repository's
257 259 .hg/hgrc file, as the default to be used for future pulls and
258 260 pushes.
259 261
260 262 If an exception is raised, the partly cloned/updated destination
261 263 repository will be deleted.
262 264
263 265 Arguments:
264 266
265 267 source: repository object or URL
266 268
267 269 dest: URL of destination repository to create (defaults to base
268 270 name of source repository)
269 271
270 272 pull: always pull from source repository, even in local case
271 273
272 274 stream: stream raw data uncompressed from repository (fast over
273 275 LAN, slow over WAN)
274 276
275 277 rev: revision to clone up to (implies pull=True)
276 278
277 279 update: update working directory after clone completes, if
278 280 destination is local repository (True means update to default rev,
279 281 anything else is treated as a revision)
280 282
281 283 branch: branches to clone
282 284 """
283 285
284 286 if isinstance(source, str):
285 287 origsource = ui.expandpath(source)
286 288 source, branch = parseurl(origsource, branch)
287 289 srcpeer = peer(ui, peeropts, source)
288 290 else:
289 291 srcpeer = source.peer() # in case we were called with a localrepo
290 292 branch = (None, branch or [])
291 293 origsource = source = srcpeer.url()
292 294 rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)
293 295
294 296 if dest is None:
295 297 dest = defaultdest(source)
296 298 if dest:
297 299 ui.status(_("destination directory: %s\n") % dest)
298 300 else:
299 301 dest = ui.expandpath(dest)
300 302
301 303 dest = util.urllocalpath(dest)
302 304 source = util.urllocalpath(source)
303 305
304 306 if not dest:
305 307 raise util.Abort(_("empty destination path is not valid"))
306 308 if os.path.exists(dest):
307 309 if not os.path.isdir(dest):
308 310 raise util.Abort(_("destination '%s' already exists") % dest)
309 311 elif os.listdir(dest):
310 312 raise util.Abort(_("destination '%s' is not empty") % dest)
311 313
312 314 srclock = destlock = cleandir = None
313 315 srcrepo = srcpeer.local()
314 316 try:
315 317 abspath = origsource
316 318 if islocal(origsource):
317 319 abspath = os.path.abspath(util.urllocalpath(origsource))
318 320
319 321 if islocal(dest):
320 322 cleandir = dest
321 323
322 324 copy = False
323 325 if (srcrepo and srcrepo.cancopy() and islocal(dest)
324 326 and not phases.hassecret(srcrepo)):
325 327 copy = not pull and not rev
326 328
327 329 if copy:
328 330 try:
329 331 # we use a lock here because if we race with commit, we
330 332 # can end up with extra data in the cloned revlogs that's
331 333 # not pointed to by changesets, thus causing verify to
332 334 # fail
333 335 srclock = srcrepo.lock(wait=False)
334 336 except error.LockError:
335 337 copy = False
336 338
337 339 if copy:
338 340 srcrepo.hook('preoutgoing', throw=True, source='clone')
339 341 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
340 342 if not os.path.exists(dest):
341 343 os.mkdir(dest)
342 344 else:
343 345 # only clean up directories we create ourselves
344 346 cleandir = hgdir
345 347 try:
346 348 destpath = hgdir
347 349 util.makedir(destpath, notindexed=True)
348 350 except OSError, inst:
349 351 if inst.errno == errno.EEXIST:
350 352 cleandir = None
351 353 raise util.Abort(_("destination '%s' already exists")
352 354 % dest)
353 355 raise
354 356
355 357 destlock = copystore(ui, srcrepo, destpath)
356 358
357 359 # Recomputing branch cache might be slow on big repos,
358 360 # so just copy it
359 361 dstcachedir = os.path.join(destpath, 'cache')
360 362 srcbranchcache = srcrepo.sjoin('cache/branch2')
361 363 dstbranchcache = os.path.join(dstcachedir, 'branch2')
362 364 if os.path.exists(srcbranchcache):
363 365 if not os.path.exists(dstcachedir):
364 366 os.mkdir(dstcachedir)
365 367 util.copyfile(srcbranchcache, dstbranchcache)
366 368
367 369 # we need to re-init the repo after manually copying the data
368 370 # into it
369 371 destpeer = peer(srcrepo, peeropts, dest)
370 372 srcrepo.hook('outgoing', source='clone',
371 373 node=node.hex(node.nullid))
372 374 else:
373 375 try:
374 376 destpeer = peer(srcrepo or ui, peeropts, dest, create=True)
375 377 # only pass ui when no srcrepo
376 378 except OSError, inst:
377 379 if inst.errno == errno.EEXIST:
378 380 cleandir = None
379 381 raise util.Abort(_("destination '%s' already exists")
380 382 % dest)
381 383 raise
382 384
383 385 revs = None
384 386 if rev:
385 387 if not srcpeer.capable('lookup'):
386 388 raise util.Abort(_("src repository does not support "
387 389 "revision lookup and so doesn't "
388 390 "support clone by revision"))
389 391 revs = [srcpeer.lookup(r) for r in rev]
390 392 checkout = revs[0]
391 393 if destpeer.local():
392 394 destpeer.local().clone(srcpeer, heads=revs, stream=stream)
393 395 elif srcrepo:
394 396 srcrepo.push(destpeer, revs=revs)
395 397 else:
396 398 raise util.Abort(_("clone from remote to remote not supported"))
397 399
398 400 cleandir = None
399 401
400 402 # clone all bookmarks except divergent ones
401 403 destrepo = destpeer.local()
402 404 if destrepo and srcpeer.capable("pushkey"):
403 405 rb = srcpeer.listkeys('bookmarks')
404 406 marks = destrepo._bookmarks
405 407 for k, n in rb.iteritems():
406 408 try:
407 409 m = destrepo.lookup(n)
408 410 marks[k] = m
409 411 except error.RepoLookupError:
410 412 pass
411 413 if rb:
412 414 marks.write()
413 415 elif srcrepo and destpeer.capable("pushkey"):
414 416 for k, n in srcrepo._bookmarks.iteritems():
415 417 destpeer.pushkey('bookmarks', k, '', hex(n))
416 418
417 419 if destrepo:
418 420 fp = destrepo.opener("hgrc", "w", text=True)
419 421 fp.write("[paths]\n")
420 422 u = util.url(abspath)
421 423 u.passwd = None
422 424 defaulturl = str(u)
423 425 fp.write("default = %s\n" % defaulturl)
424 426 fp.close()
425 427
426 428 destrepo.ui.setconfig('paths', 'default', defaulturl, 'clone')
427 429
428 430 if update:
429 431 if update is not True:
430 432 checkout = srcpeer.lookup(update)
431 433 uprev = None
432 434 status = None
433 435 if checkout is not None:
434 436 try:
435 437 uprev = destrepo.lookup(checkout)
436 438 except error.RepoLookupError:
437 439 pass
438 440 if uprev is None:
439 441 try:
440 442 uprev = destrepo._bookmarks['@']
441 443 update = '@'
442 444 bn = destrepo[uprev].branch()
443 445 if bn == 'default':
444 446 status = _("updating to bookmark @\n")
445 447 else:
446 448 status = _("updating to bookmark @ on branch %s\n"
447 449 % bn)
448 450 except KeyError:
449 451 try:
450 452 uprev = destrepo.branchtip('default')
451 453 except error.RepoLookupError:
452 454 uprev = destrepo.lookup('tip')
453 455 if not status:
454 456 bn = destrepo[uprev].branch()
455 457 status = _("updating to branch %s\n") % bn
456 458 destrepo.ui.status(status)
457 459 _update(destrepo, uprev)
458 460 if update in destrepo._bookmarks:
459 461 bookmarks.setcurrent(destrepo, update)
460 462 finally:
461 463 release(srclock, destlock)
462 464 if cleandir is not None:
463 465 shutil.rmtree(cleandir, True)
464 466 if srcpeer is not None:
465 467 srcpeer.close()
466 468 return srcpeer, destpeer
467 469
468 470 def _showstats(repo, stats):
469 471 repo.ui.status(_("%d files updated, %d files merged, "
470 472 "%d files removed, %d files unresolved\n") % stats)
471 473
472 474 def updaterepo(repo, node, overwrite):
473 475 """Update the working directory to node.
474 476
475 477 When overwrite is set, changes are clobbered, merged else
476 478
477 479 returns stats (see pydoc mercurial.merge.applyupdates)"""
478 480 return mergemod.update(repo, node, False, overwrite, None)
479 481
480 482 def update(repo, node):
481 483 """update the working directory to node, merging linear changes"""
482 484 stats = updaterepo(repo, node, False)
483 485 _showstats(repo, stats)
484 486 if stats[3]:
485 487 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
486 488 return stats[3] > 0
487 489
488 490 # naming conflict in clone()
489 491 _update = update
490 492
491 493 def clean(repo, node, show_stats=True):
492 494 """forcibly switch the working directory to node, clobbering changes"""
493 495 stats = updaterepo(repo, node, True)
494 496 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
495 497 if show_stats:
496 498 _showstats(repo, stats)
497 499 return stats[3] > 0
498 500
499 501 def merge(repo, node, force=None, remind=True):
500 502 """Branch merge with node, resolving changes. Return true if any
501 503 unresolved conflicts."""
502 504 stats = mergemod.update(repo, node, True, force, False)
503 505 _showstats(repo, stats)
504 506 if stats[3]:
505 507 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
506 508 "or 'hg update -C .' to abandon\n"))
507 509 elif remind:
508 510 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
509 511 return stats[3] > 0
510 512
511 513 def _incoming(displaychlist, subreporecurse, ui, repo, source,
512 514 opts, buffered=False):
513 515 """
514 516 Helper for incoming / gincoming.
515 517 displaychlist gets called with
516 518 (remoterepo, incomingchangesetlist, displayer) parameters,
517 519 and is supposed to contain only code that can't be unified.
518 520 """
519 521 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
520 522 other = peer(repo, opts, source)
521 523 ui.status(_('comparing with %s\n') % util.hidepassword(source))
522 524 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
523 525
524 526 if revs:
525 527 revs = [other.lookup(rev) for rev in revs]
526 528 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
527 529 revs, opts["bundle"], opts["force"])
528 530 try:
529 531 if not chlist:
530 532 ui.status(_("no changes found\n"))
531 533 return subreporecurse()
532 534
533 535 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
534 536 displaychlist(other, chlist, displayer)
535 537 displayer.close()
536 538 finally:
537 539 cleanupfn()
538 540 subreporecurse()
539 541 return 0 # exit code is zero since we found incoming changes
540 542
541 543 def incoming(ui, repo, source, opts):
542 544 def subreporecurse():
543 545 ret = 1
544 546 if opts.get('subrepos'):
545 547 ctx = repo[None]
546 548 for subpath in sorted(ctx.substate):
547 549 sub = ctx.sub(subpath)
548 550 ret = min(ret, sub.incoming(ui, source, opts))
549 551 return ret
550 552
551 553 def display(other, chlist, displayer):
552 554 limit = cmdutil.loglimit(opts)
553 555 if opts.get('newest_first'):
554 556 chlist.reverse()
555 557 count = 0
556 558 for n in chlist:
557 559 if limit is not None and count >= limit:
558 560 break
559 561 parents = [p for p in other.changelog.parents(n) if p != nullid]
560 562 if opts.get('no_merges') and len(parents) == 2:
561 563 continue
562 564 count += 1
563 565 displayer.show(other[n])
564 566 return _incoming(display, subreporecurse, ui, repo, source, opts)
565 567
566 568 def _outgoing(ui, repo, dest, opts):
567 569 dest = ui.expandpath(dest or 'default-push', dest or 'default')
568 570 dest, branches = parseurl(dest, opts.get('branch'))
569 571 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
570 572 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
571 573 if revs:
572 574 revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
573 575
574 576 other = peer(repo, opts, dest)
575 577 outgoing = discovery.findcommonoutgoing(repo.unfiltered(), other, revs,
576 578 force=opts.get('force'))
577 579 o = outgoing.missing
578 580 if not o:
579 581 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
580 582 return None
581 583 return o
582 584
583 585 def outgoing(ui, repo, dest, opts):
584 586 def recurse():
585 587 ret = 1
586 588 if opts.get('subrepos'):
587 589 ctx = repo[None]
588 590 for subpath in sorted(ctx.substate):
589 591 sub = ctx.sub(subpath)
590 592 ret = min(ret, sub.outgoing(ui, dest, opts))
591 593 return ret
592 594
593 595 limit = cmdutil.loglimit(opts)
594 596 o = _outgoing(ui, repo, dest, opts)
595 597 if o is None:
596 598 return recurse()
597 599
598 600 if opts.get('newest_first'):
599 601 o.reverse()
600 602 displayer = cmdutil.show_changeset(ui, repo, opts)
601 603 count = 0
602 604 for n in o:
603 605 if limit is not None and count >= limit:
604 606 break
605 607 parents = [p for p in repo.changelog.parents(n) if p != nullid]
606 608 if opts.get('no_merges') and len(parents) == 2:
607 609 continue
608 610 count += 1
609 611 displayer.show(repo[n])
610 612 displayer.close()
611 613 recurse()
612 614 return 0 # exit code is zero since we found outgoing changes
613 615
614 616 def revert(repo, node, choose):
615 617 """revert changes to revision in node without updating dirstate"""
616 618 return mergemod.update(repo, node, False, True, choose)[3] > 0
617 619
618 620 def verify(repo):
619 621 """verify the consistency of a repository"""
620 622 return verifymod.verify(repo)
621 623
622 624 def remoteui(src, opts):
623 625 'build a remote ui from ui or repo and opts'
624 626 if util.safehasattr(src, 'baseui'): # looks like a repository
625 627 dst = src.baseui.copy() # drop repo-specific config
626 628 src = src.ui # copy target options from repo
627 629 else: # assume it's a global ui object
628 630 dst = src.copy() # keep all global options
629 631
630 632 # copy ssh-specific options
631 633 for o in 'ssh', 'remotecmd':
632 634 v = opts.get(o) or src.config('ui', o)
633 635 if v:
634 636 dst.setconfig("ui", o, v, 'copied')
635 637
636 638 # copy bundle-specific options
637 639 r = src.config('bundle', 'mainreporoot')
638 640 if r:
639 641 dst.setconfig('bundle', 'mainreporoot', r, 'copied')
640 642
641 643 # copy selected local settings to the remote ui
642 644 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
643 645 for key, val in src.configitems(sect):
644 646 dst.setconfig(sect, key, val, 'copied')
645 647 v = src.config('web', 'cacerts')
646 648 if v:
647 649 dst.setconfig('web', 'cacerts', util.expandpath(v), 'copied')
648 650
649 651 return dst
@@ -1,2845 +1,2849 b''
1 1 # revset.py - revision set queries for mercurial
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import re
9 9 import parser, util, error, discovery, hbisect, phases
10 10 import node
11 11 import heapq
12 12 import match as matchmod
13 13 import ancestor as ancestormod
14 14 from i18n import _
15 15 import encoding
16 16 import obsolete as obsmod
17 17 import pathutil
18 18 import repoview
19 19
20 20 def _revancestors(repo, revs, followfirst):
21 21 """Like revlog.ancestors(), but supports followfirst."""
22 22 cut = followfirst and 1 or None
23 23 cl = repo.changelog
24 24
25 25 def iterate():
26 26 revqueue, revsnode = None, None
27 27 h = []
28 28
29 29 revs.descending()
30 30 revqueue = util.deque(revs)
31 31 if revqueue:
32 32 revsnode = revqueue.popleft()
33 33 heapq.heappush(h, -revsnode)
34 34
35 35 seen = set([node.nullrev])
36 36 while h:
37 37 current = -heapq.heappop(h)
38 38 if current not in seen:
39 39 if revsnode and current == revsnode:
40 40 if revqueue:
41 41 revsnode = revqueue.popleft()
42 42 heapq.heappush(h, -revsnode)
43 43 seen.add(current)
44 44 yield current
45 45 for parent in cl.parentrevs(current)[:cut]:
46 46 if parent != node.nullrev:
47 47 heapq.heappush(h, -parent)
48 48
49 49 return _descgeneratorset(iterate())
50 50
51 51 def _revdescendants(repo, revs, followfirst):
52 52 """Like revlog.descendants() but supports followfirst."""
53 53 cut = followfirst and 1 or None
54 54
55 55 def iterate():
56 56 cl = repo.changelog
57 57 first = min(revs)
58 58 nullrev = node.nullrev
59 59 if first == nullrev:
60 60 # Are there nodes with a null first parent and a non-null
61 61 # second one? Maybe. Do we care? Probably not.
62 62 for i in cl:
63 63 yield i
64 64 else:
65 65 seen = set(revs)
66 66 for i in cl.revs(first + 1):
67 67 for x in cl.parentrevs(i)[:cut]:
68 68 if x != nullrev and x in seen:
69 69 seen.add(i)
70 70 yield i
71 71 break
72 72
73 73 return _ascgeneratorset(iterate())
74 74
75 75 def _revsbetween(repo, roots, heads):
76 76 """Return all paths between roots and heads, inclusive of both endpoint
77 77 sets."""
78 78 if not roots:
79 79 return baseset([])
80 80 parentrevs = repo.changelog.parentrevs
81 81 visit = baseset(heads)
82 82 reachable = set()
83 83 seen = {}
84 84 minroot = min(roots)
85 85 roots = set(roots)
86 86 # open-code the post-order traversal due to the tiny size of
87 87 # sys.getrecursionlimit()
88 88 while visit:
89 89 rev = visit.pop()
90 90 if rev in roots:
91 91 reachable.add(rev)
92 92 parents = parentrevs(rev)
93 93 seen[rev] = parents
94 94 for parent in parents:
95 95 if parent >= minroot and parent not in seen:
96 96 visit.append(parent)
97 97 if not reachable:
98 98 return baseset([])
99 99 for rev in sorted(seen):
100 100 for parent in seen[rev]:
101 101 if parent in reachable:
102 102 reachable.add(rev)
103 103 return baseset(sorted(reachable))
104 104
105 105 elements = {
106 106 "(": (20, ("group", 1, ")"), ("func", 1, ")")),
107 107 "~": (18, None, ("ancestor", 18)),
108 108 "^": (18, None, ("parent", 18), ("parentpost", 18)),
109 109 "-": (5, ("negate", 19), ("minus", 5)),
110 110 "::": (17, ("dagrangepre", 17), ("dagrange", 17),
111 111 ("dagrangepost", 17)),
112 112 "..": (17, ("dagrangepre", 17), ("dagrange", 17),
113 113 ("dagrangepost", 17)),
114 114 ":": (15, ("rangepre", 15), ("range", 15), ("rangepost", 15)),
115 115 "not": (10, ("not", 10)),
116 116 "!": (10, ("not", 10)),
117 117 "and": (5, None, ("and", 5)),
118 118 "&": (5, None, ("and", 5)),
119 119 "or": (4, None, ("or", 4)),
120 120 "|": (4, None, ("or", 4)),
121 121 "+": (4, None, ("or", 4)),
122 122 ",": (2, None, ("list", 2)),
123 123 ")": (0, None, None),
124 124 "symbol": (0, ("symbol",), None),
125 125 "string": (0, ("string",), None),
126 126 "end": (0, None, None),
127 127 }
128 128
129 129 keywords = set(['and', 'or', 'not'])
130 130
131 131 def tokenize(program, lookup=None):
132 132 '''
133 133 Parse a revset statement into a stream of tokens
134 134
135 135 Check that @ is a valid unquoted token character (issue3686):
136 136 >>> list(tokenize("@::"))
137 137 [('symbol', '@', 0), ('::', None, 1), ('end', None, 3)]
138 138
139 139 '''
140 140
141 141 pos, l = 0, len(program)
142 142 while pos < l:
143 143 c = program[pos]
144 144 if c.isspace(): # skip inter-token whitespace
145 145 pass
146 146 elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully
147 147 yield ('::', None, pos)
148 148 pos += 1 # skip ahead
149 149 elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully
150 150 yield ('..', None, pos)
151 151 pos += 1 # skip ahead
152 152 elif c in "():,-|&+!~^": # handle simple operators
153 153 yield (c, None, pos)
154 154 elif (c in '"\'' or c == 'r' and
155 155 program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings
156 156 if c == 'r':
157 157 pos += 1
158 158 c = program[pos]
159 159 decode = lambda x: x
160 160 else:
161 161 decode = lambda x: x.decode('string-escape')
162 162 pos += 1
163 163 s = pos
164 164 while pos < l: # find closing quote
165 165 d = program[pos]
166 166 if d == '\\': # skip over escaped characters
167 167 pos += 2
168 168 continue
169 169 if d == c:
170 170 yield ('string', decode(program[s:pos]), s)
171 171 break
172 172 pos += 1
173 173 else:
174 174 raise error.ParseError(_("unterminated string"), s)
175 175 # gather up a symbol/keyword
176 176 elif c.isalnum() or c in '._@' or ord(c) > 127:
177 177 s = pos
178 178 pos += 1
179 179 while pos < l: # find end of symbol
180 180 d = program[pos]
181 181 if not (d.isalnum() or d in "-._/@" or ord(d) > 127):
182 182 break
183 183 if d == '.' and program[pos - 1] == '.': # special case for ..
184 184 pos -= 1
185 185 break
186 186 pos += 1
187 187 sym = program[s:pos]
188 188 if sym in keywords: # operator keywords
189 189 yield (sym, None, s)
190 190 elif '-' in sym:
191 191 # some jerk gave us foo-bar-baz, try to check if it's a symbol
192 192 if lookup and lookup(sym):
193 193 # looks like a real symbol
194 194 yield ('symbol', sym, s)
195 195 else:
196 196 # looks like an expression
197 197 parts = sym.split('-')
198 198 for p in parts[:-1]:
199 199 if p: # possible consecutive -
200 200 yield ('symbol', p, s)
201 201 s += len(p)
202 202 yield ('-', None, pos)
203 203 s += 1
204 204 if parts[-1]: # possible trailing -
205 205 yield ('symbol', parts[-1], s)
206 206 else:
207 207 yield ('symbol', sym, s)
208 208 pos -= 1
209 209 else:
210 210 raise error.ParseError(_("syntax error"), pos)
211 211 pos += 1
212 212 yield ('end', None, pos)
213 213
214 214 # helpers
215 215
216 216 def getstring(x, err):
217 217 if x and (x[0] == 'string' or x[0] == 'symbol'):
218 218 return x[1]
219 219 raise error.ParseError(err)
220 220
221 221 def getlist(x):
222 222 if not x:
223 223 return []
224 224 if x[0] == 'list':
225 225 return getlist(x[1]) + [x[2]]
226 226 return [x]
227 227
228 228 def getargs(x, min, max, err):
229 229 l = getlist(x)
230 230 if len(l) < min or (max >= 0 and len(l) > max):
231 231 raise error.ParseError(err)
232 232 return l
233 233
234 234 def getset(repo, subset, x):
235 235 if not x:
236 236 raise error.ParseError(_("missing argument"))
237 237 s = methods[x[0]](repo, subset, *x[1:])
238 238 if util.safehasattr(s, 'set'):
239 239 return s
240 240 return baseset(s)
241 241
242 242 def _getrevsource(repo, r):
243 243 extra = repo[r].extra()
244 244 for label in ('source', 'transplant_source', 'rebase_source'):
245 245 if label in extra:
246 246 try:
247 247 return repo[extra[label]].rev()
248 248 except error.RepoLookupError:
249 249 pass
250 250 return None
251 251
252 252 # operator methods
253 253
254 254 def stringset(repo, subset, x):
255 255 x = repo[x].rev()
256 256 if x == -1 and len(subset) == len(repo):
257 257 return baseset([-1])
258 258 if len(subset) == len(repo) or x in subset:
259 259 return baseset([x])
260 260 return baseset([])
261 261
262 262 def symbolset(repo, subset, x):
263 263 if x in symbols:
264 264 raise error.ParseError(_("can't use %s here") % x)
265 265 return stringset(repo, subset, x)
266 266
267 267 def rangeset(repo, subset, x, y):
268 268 cl = baseset(repo.changelog)
269 269 m = getset(repo, cl, x)
270 270 n = getset(repo, cl, y)
271 271
272 272 if not m or not n:
273 273 return baseset([])
274 274 m, n = m[0], n[-1]
275 275
276 276 if m < n:
277 277 r = spanset(repo, m, n + 1)
278 278 else:
279 279 r = spanset(repo, m, n - 1)
280 280 return r & subset
281 281
282 282 def dagrange(repo, subset, x, y):
283 283 r = spanset(repo)
284 284 xs = _revsbetween(repo, getset(repo, r, x), getset(repo, r, y))
285 285 s = subset.set()
286 286 return xs.filter(lambda r: r in s)
287 287
288 288 def andset(repo, subset, x, y):
289 289 return getset(repo, getset(repo, subset, x), y)
290 290
291 291 def orset(repo, subset, x, y):
292 292 xl = getset(repo, subset, x)
293 293 yl = getset(repo, subset - xl, y)
294 294 return xl + yl
295 295
296 296 def notset(repo, subset, x):
297 297 return subset - getset(repo, subset, x)
298 298
299 299 def listset(repo, subset, a, b):
300 300 raise error.ParseError(_("can't use a list in this context"))
301 301
302 302 def func(repo, subset, a, b):
303 303 if a[0] == 'symbol' and a[1] in symbols:
304 304 return symbols[a[1]](repo, subset, b)
305 305 raise error.ParseError(_("not a function: %s") % a[1])
306 306
307 307 # functions
308 308
309 309 def adds(repo, subset, x):
310 310 """``adds(pattern)``
311 311 Changesets that add a file matching pattern.
312 312
313 313 The pattern without explicit kind like ``glob:`` is expected to be
314 314 relative to the current directory and match against a file or a
315 315 directory.
316 316 """
317 317 # i18n: "adds" is a keyword
318 318 pat = getstring(x, _("adds requires a pattern"))
319 319 return checkstatus(repo, subset, pat, 1)
320 320
321 321 def ancestor(repo, subset, x):
322 322 """``ancestor(*changeset)``
323 323 Greatest common ancestor of the changesets.
324 324
325 325 Accepts 0 or more changesets.
326 326 Will return empty list when passed no args.
327 327 Greatest common ancestor of a single changeset is that changeset.
328 328 """
329 329 # i18n: "ancestor" is a keyword
330 330 l = getlist(x)
331 331 rl = spanset(repo)
332 332 anc = None
333 333
334 334 # (getset(repo, rl, i) for i in l) generates a list of lists
335 335 rev = repo.changelog.rev
336 336 ancestor = repo.changelog.ancestor
337 337 node = repo.changelog.node
338 338 for revs in (getset(repo, rl, i) for i in l):
339 339 for r in revs:
340 340 if anc is None:
341 341 anc = r
342 342 else:
343 343 anc = rev(ancestor(node(anc), node(r)))
344 344
345 345 if anc is not None and anc in subset:
346 346 return baseset([anc])
347 347 return baseset([])
348 348
349 349 def _ancestors(repo, subset, x, followfirst=False):
350 350 args = getset(repo, spanset(repo), x)
351 351 if not args:
352 352 return baseset([])
353 353 s = _revancestors(repo, args, followfirst)
354 354 return subset.filter(lambda r: r in s)
355 355
356 356 def ancestors(repo, subset, x):
357 357 """``ancestors(set)``
358 358 Changesets that are ancestors of a changeset in set.
359 359 """
360 360 return _ancestors(repo, subset, x)
361 361
362 362 def _firstancestors(repo, subset, x):
363 363 # ``_firstancestors(set)``
364 364 # Like ``ancestors(set)`` but follows only the first parents.
365 365 return _ancestors(repo, subset, x, followfirst=True)
366 366
367 367 def ancestorspec(repo, subset, x, n):
368 368 """``set~n``
369 369 Changesets that are the Nth ancestor (first parents only) of a changeset
370 370 in set.
371 371 """
372 372 try:
373 373 n = int(n[1])
374 374 except (TypeError, ValueError):
375 375 raise error.ParseError(_("~ expects a number"))
376 376 ps = set()
377 377 cl = repo.changelog
378 378 for r in getset(repo, baseset(cl), x):
379 379 for i in range(n):
380 380 r = cl.parentrevs(r)[0]
381 381 ps.add(r)
382 382 return subset.filter(lambda r: r in ps)
383 383
384 384 def author(repo, subset, x):
385 385 """``author(string)``
386 386 Alias for ``user(string)``.
387 387 """
388 388 # i18n: "author" is a keyword
389 389 n = encoding.lower(getstring(x, _("author requires a string")))
390 390 kind, pattern, matcher = _substringmatcher(n)
391 391 return subset.filter(lambda x: matcher(encoding.lower(repo[x].user())))
392 392
393 393 def only(repo, subset, x):
394 394 """``only(set, [set])``
395 395 Changesets that are ancestors of the first set that are not ancestors
396 396 of any other head in the repo. If a second set is specified, the result
397 397 is ancestors of the first set that are not ancestors of the second set
398 398 (i.e. ::<set1> - ::<set2>).
399 399 """
400 400 cl = repo.changelog
401 401 args = getargs(x, 1, 2, _('only takes one or two arguments'))
402 402 include = getset(repo, spanset(repo), args[0]).set()
403 403 if len(args) == 1:
404 404 descendants = set(_revdescendants(repo, include, False))
405 405 exclude = [rev for rev in cl.headrevs()
406 406 if not rev in descendants and not rev in include]
407 407 else:
408 408 exclude = getset(repo, spanset(repo), args[1])
409 409
410 410 results = set(ancestormod.missingancestors(include, exclude, cl.parentrevs))
411 411 return lazyset(subset, lambda x: x in results)
412 412
413 413 def bisect(repo, subset, x):
414 414 """``bisect(string)``
415 415 Changesets marked in the specified bisect status:
416 416
417 417 - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip
418 418 - ``goods``, ``bads`` : csets topologically good/bad
419 419 - ``range`` : csets taking part in the bisection
420 420 - ``pruned`` : csets that are goods, bads or skipped
421 421 - ``untested`` : csets whose fate is yet unknown
422 422 - ``ignored`` : csets ignored due to DAG topology
423 423 - ``current`` : the cset currently being bisected
424 424 """
425 425 # i18n: "bisect" is a keyword
426 426 status = getstring(x, _("bisect requires a string")).lower()
427 427 state = set(hbisect.get(repo, status))
428 428 return subset.filter(lambda r: r in state)
429 429
430 430 # Backward-compatibility
431 431 # - no help entry so that we do not advertise it any more
432 432 def bisected(repo, subset, x):
433 433 return bisect(repo, subset, x)
434 434
435 435 def bookmark(repo, subset, x):
436 436 """``bookmark([name])``
437 437 The named bookmark or all bookmarks.
438 438
439 439 If `name` starts with `re:`, the remainder of the name is treated as
440 440 a regular expression. To match a bookmark that actually starts with `re:`,
441 441 use the prefix `literal:`.
442 442 """
443 443 # i18n: "bookmark" is a keyword
444 444 args = getargs(x, 0, 1, _('bookmark takes one or no arguments'))
445 445 if args:
446 446 bm = getstring(args[0],
447 447 # i18n: "bookmark" is a keyword
448 448 _('the argument to bookmark must be a string'))
449 449 kind, pattern, matcher = _stringmatcher(bm)
450 450 if kind == 'literal':
451 451 bmrev = repo._bookmarks.get(bm, None)
452 452 if not bmrev:
453 453 raise util.Abort(_("bookmark '%s' does not exist") % bm)
454 454 bmrev = repo[bmrev].rev()
455 455 return subset.filter(lambda r: r == bmrev)
456 456 else:
457 457 matchrevs = set()
458 458 for name, bmrev in repo._bookmarks.iteritems():
459 459 if matcher(name):
460 460 matchrevs.add(bmrev)
461 461 if not matchrevs:
462 462 raise util.Abort(_("no bookmarks exist that match '%s'")
463 463 % pattern)
464 464 bmrevs = set()
465 465 for bmrev in matchrevs:
466 466 bmrevs.add(repo[bmrev].rev())
467 467 return subset & bmrevs
468 468
469 469 bms = set([repo[r].rev()
470 470 for r in repo._bookmarks.values()])
471 471 return subset.filter(lambda r: r in bms)
472 472
473 473 def branch(repo, subset, x):
474 474 """``branch(string or set)``
475 475 All changesets belonging to the given branch or the branches of the given
476 476 changesets.
477 477
478 478 If `string` starts with `re:`, the remainder of the name is treated as
479 479 a regular expression. To match a branch that actually starts with `re:`,
480 480 use the prefix `literal:`.
481 481 """
482 482 try:
483 483 b = getstring(x, '')
484 484 except error.ParseError:
485 485 # not a string, but another revspec, e.g. tip()
486 486 pass
487 487 else:
488 488 kind, pattern, matcher = _stringmatcher(b)
489 489 if kind == 'literal':
490 490 # note: falls through to the revspec case if no branch with
491 491 # this name exists
492 492 if pattern in repo.branchmap():
493 493 return subset.filter(lambda r: matcher(repo[r].branch()))
494 494 else:
495 495 return subset.filter(lambda r: matcher(repo[r].branch()))
496 496
497 497 s = getset(repo, spanset(repo), x)
498 498 b = set()
499 499 for r in s:
500 500 b.add(repo[r].branch())
501 501 s = s.set()
502 502 return subset.filter(lambda r: r in s or repo[r].branch() in b)
503 503
504 504 def bumped(repo, subset, x):
505 505 """``bumped()``
506 506 Mutable changesets marked as successors of public changesets.
507 507
508 508 Only non-public and non-obsolete changesets can be `bumped`.
509 509 """
510 510 # i18n: "bumped" is a keyword
511 511 getargs(x, 0, 0, _("bumped takes no arguments"))
512 512 bumped = obsmod.getrevs(repo, 'bumped')
513 513 return subset & bumped
514 514
515 515 def bundle(repo, subset, x):
516 516 """``bundle()``
517 517 Changesets in the bundle.
518 518
519 519 Bundle must be specified by the -R option."""
520 520
521 521 try:
522 522 bundlerevs = repo.changelog.bundlerevs
523 523 except AttributeError:
524 524 raise util.Abort(_("no bundle provided - specify with -R"))
525 525 return subset & bundlerevs
526 526
527 527 def checkstatus(repo, subset, pat, field):
528 528 hasset = matchmod.patkind(pat) == 'set'
529 529
530 530 def matches(x):
531 531 m = None
532 532 fname = None
533 533 c = repo[x]
534 534 if not m or hasset:
535 535 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
536 536 if not m.anypats() and len(m.files()) == 1:
537 537 fname = m.files()[0]
538 538 if fname is not None:
539 539 if fname not in c.files():
540 540 return False
541 541 else:
542 542 for f in c.files():
543 543 if m(f):
544 544 break
545 545 else:
546 546 return False
547 547 files = repo.status(c.p1().node(), c.node())[field]
548 548 if fname is not None:
549 549 if fname in files:
550 550 return True
551 551 else:
552 552 for f in files:
553 553 if m(f):
554 554 return True
555 555
556 556 return subset.filter(matches)
557 557
558 558 def _children(repo, narrow, parentset):
559 559 cs = set()
560 560 if not parentset:
561 561 return baseset(cs)
562 562 pr = repo.changelog.parentrevs
563 563 minrev = min(parentset)
564 564 for r in narrow:
565 565 if r <= minrev:
566 566 continue
567 567 for p in pr(r):
568 568 if p in parentset:
569 569 cs.add(r)
570 570 return baseset(cs)
571 571
572 572 def children(repo, subset, x):
573 573 """``children(set)``
574 574 Child changesets of changesets in set.
575 575 """
576 576 s = getset(repo, baseset(repo), x).set()
577 577 cs = _children(repo, subset, s)
578 578 return subset & cs
579 579
580 580 def closed(repo, subset, x):
581 581 """``closed()``
582 582 Changeset is closed.
583 583 """
584 584 # i18n: "closed" is a keyword
585 585 getargs(x, 0, 0, _("closed takes no arguments"))
586 586 return subset.filter(lambda r: repo[r].closesbranch())
587 587
588 588 def contains(repo, subset, x):
589 589 """``contains(pattern)``
590 590 Revision contains a file matching pattern. See :hg:`help patterns`
591 591 for information about file patterns.
592 592
593 593 The pattern without explicit kind like ``glob:`` is expected to be
594 594 relative to the current directory and match against a file exactly
595 595 for efficiency.
596 596 """
597 597 # i18n: "contains" is a keyword
598 598 pat = getstring(x, _("contains requires a pattern"))
599 599
600 600 def matches(x):
601 601 if not matchmod.patkind(pat):
602 602 pats = pathutil.canonpath(repo.root, repo.getcwd(), pat)
603 603 if pats in repo[x]:
604 604 return True
605 605 else:
606 606 c = repo[x]
607 607 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
608 608 for f in c.manifest():
609 609 if m(f):
610 610 return True
611 611 return False
612 612
613 613 return subset.filter(matches)
614 614
615 615 def converted(repo, subset, x):
616 616 """``converted([id])``
617 617 Changesets converted from the given identifier in the old repository if
618 618 present, or all converted changesets if no identifier is specified.
619 619 """
620 620
621 621 # There is exactly no chance of resolving the revision, so do a simple
622 622 # string compare and hope for the best
623 623
624 624 rev = None
625 625 # i18n: "converted" is a keyword
626 626 l = getargs(x, 0, 1, _('converted takes one or no arguments'))
627 627 if l:
628 628 # i18n: "converted" is a keyword
629 629 rev = getstring(l[0], _('converted requires a revision'))
630 630
631 631 def _matchvalue(r):
632 632 source = repo[r].extra().get('convert_revision', None)
633 633 return source is not None and (rev is None or source.startswith(rev))
634 634
635 635 return subset.filter(lambda r: _matchvalue(r))
636 636
637 637 def date(repo, subset, x):
638 638 """``date(interval)``
639 639 Changesets within the interval, see :hg:`help dates`.
640 640 """
641 641 # i18n: "date" is a keyword
642 642 ds = getstring(x, _("date requires a string"))
643 643 dm = util.matchdate(ds)
644 644 return subset.filter(lambda x: dm(repo[x].date()[0]))
645 645
646 646 def desc(repo, subset, x):
647 647 """``desc(string)``
648 648 Search commit message for string. The match is case-insensitive.
649 649 """
650 650 # i18n: "desc" is a keyword
651 651 ds = encoding.lower(getstring(x, _("desc requires a string")))
652 652
653 653 def matches(x):
654 654 c = repo[x]
655 655 return ds in encoding.lower(c.description())
656 656
657 657 return subset.filter(matches)
658 658
659 659 def _descendants(repo, subset, x, followfirst=False):
660 660 args = getset(repo, spanset(repo), x)
661 661 if not args:
662 662 return baseset([])
663 663 s = _revdescendants(repo, args, followfirst)
664 664 a = set(args)
665 665 return subset.filter(lambda r: r in s or r in a)
666 666
667 667 def descendants(repo, subset, x):
668 668 """``descendants(set)``
669 669 Changesets which are descendants of changesets in set.
670 670 """
671 671 return _descendants(repo, subset, x)
672 672
673 673 def _firstdescendants(repo, subset, x):
674 674 # ``_firstdescendants(set)``
675 675 # Like ``descendants(set)`` but follows only the first parents.
676 676 return _descendants(repo, subset, x, followfirst=True)
677 677
678 678 def destination(repo, subset, x):
679 679 """``destination([set])``
680 680 Changesets that were created by a graft, transplant or rebase operation,
681 681 with the given revisions specified as the source. Omitting the optional set
682 682 is the same as passing all().
683 683 """
684 684 if x is not None:
685 685 args = getset(repo, spanset(repo), x).set()
686 686 else:
687 687 args = getall(repo, spanset(repo), x).set()
688 688
689 689 dests = set()
690 690
691 691 # subset contains all of the possible destinations that can be returned, so
692 692 # iterate over them and see if their source(s) were provided in the args.
693 693 # Even if the immediate src of r is not in the args, src's source (or
694 694 # further back) may be. Scanning back further than the immediate src allows
695 695 # transitive transplants and rebases to yield the same results as transitive
696 696 # grafts.
697 697 for r in subset:
698 698 src = _getrevsource(repo, r)
699 699 lineage = None
700 700
701 701 while src is not None:
702 702 if lineage is None:
703 703 lineage = list()
704 704
705 705 lineage.append(r)
706 706
707 707 # The visited lineage is a match if the current source is in the arg
708 708 # set. Since every candidate dest is visited by way of iterating
709 709 # subset, any dests further back in the lineage will be tested by a
710 710 # different iteration over subset. Likewise, if the src was already
711 711 # selected, the current lineage can be selected without going back
712 712 # further.
713 713 if src in args or src in dests:
714 714 dests.update(lineage)
715 715 break
716 716
717 717 r = src
718 718 src = _getrevsource(repo, r)
719 719
720 720 return subset.filter(lambda r: r in dests)
721 721
722 722 def divergent(repo, subset, x):
723 723 """``divergent()``
724 724 Final successors of changesets with an alternative set of final successors.
725 725 """
726 726 # i18n: "divergent" is a keyword
727 727 getargs(x, 0, 0, _("divergent takes no arguments"))
728 728 divergent = obsmod.getrevs(repo, 'divergent')
729 729 return subset.filter(lambda r: r in divergent)
730 730
731 731 def draft(repo, subset, x):
732 732 """``draft()``
733 733 Changeset in draft phase."""
734 734 # i18n: "draft" is a keyword
735 735 getargs(x, 0, 0, _("draft takes no arguments"))
736 736 pc = repo._phasecache
737 737 return subset.filter(lambda r: pc.phase(repo, r) == phases.draft)
738 738
739 739 def extinct(repo, subset, x):
740 740 """``extinct()``
741 741 Obsolete changesets with obsolete descendants only.
742 742 """
743 743 # i18n: "extinct" is a keyword
744 744 getargs(x, 0, 0, _("extinct takes no arguments"))
745 745 extincts = obsmod.getrevs(repo, 'extinct')
746 746 return subset & extincts
747 747
748 748 def extra(repo, subset, x):
749 749 """``extra(label, [value])``
750 750 Changesets with the given label in the extra metadata, with the given
751 751 optional value.
752 752
753 753 If `value` starts with `re:`, the remainder of the value is treated as
754 754 a regular expression. To match a value that actually starts with `re:`,
755 755 use the prefix `literal:`.
756 756 """
757 757
758 758 # i18n: "extra" is a keyword
759 759 l = getargs(x, 1, 2, _('extra takes at least 1 and at most 2 arguments'))
760 760 # i18n: "extra" is a keyword
761 761 label = getstring(l[0], _('first argument to extra must be a string'))
762 762 value = None
763 763
764 764 if len(l) > 1:
765 765 # i18n: "extra" is a keyword
766 766 value = getstring(l[1], _('second argument to extra must be a string'))
767 767 kind, value, matcher = _stringmatcher(value)
768 768
769 769 def _matchvalue(r):
770 770 extra = repo[r].extra()
771 771 return label in extra and (value is None or matcher(extra[label]))
772 772
773 773 return subset.filter(lambda r: _matchvalue(r))
774 774
775 775 def filelog(repo, subset, x):
776 776 """``filelog(pattern)``
777 777 Changesets connected to the specified filelog.
778 778
779 779 For performance reasons, ``filelog()`` does not show every changeset
780 780 that affects the requested file(s). See :hg:`help log` for details. For
781 781 a slower, more accurate result, use ``file()``.
782 782
783 783 The pattern without explicit kind like ``glob:`` is expected to be
784 784 relative to the current directory and match against a file exactly
785 785 for efficiency.
786 786 """
787 787
788 788 # i18n: "filelog" is a keyword
789 789 pat = getstring(x, _("filelog requires a pattern"))
790 790 s = set()
791 791
792 792 if not matchmod.patkind(pat):
793 793 f = pathutil.canonpath(repo.root, repo.getcwd(), pat)
794 794 fl = repo.file(f)
795 795 for fr in fl:
796 796 s.add(fl.linkrev(fr))
797 797 else:
798 798 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=repo[None])
799 799 for f in repo[None]:
800 800 if m(f):
801 801 fl = repo.file(f)
802 802 for fr in fl:
803 803 s.add(fl.linkrev(fr))
804 804
805 805 return subset.filter(lambda r: r in s)
806 806
807 807 def first(repo, subset, x):
808 808 """``first(set, [n])``
809 809 An alias for limit().
810 810 """
811 811 return limit(repo, subset, x)
812 812
813 813 def _follow(repo, subset, x, name, followfirst=False):
814 814 l = getargs(x, 0, 1, _("%s takes no arguments or a filename") % name)
815 815 c = repo['.']
816 816 if l:
817 817 x = getstring(l[0], _("%s expected a filename") % name)
818 818 if x in c:
819 819 cx = c[x]
820 820 s = set(ctx.rev() for ctx in cx.ancestors(followfirst=followfirst))
821 821 # include the revision responsible for the most recent version
822 822 s.add(cx.linkrev())
823 823 else:
824 824 return baseset([])
825 825 else:
826 826 s = _revancestors(repo, baseset([c.rev()]), followfirst)
827 827
828 828 return subset.filter(lambda r: r in s)
829 829
830 830 def follow(repo, subset, x):
831 831 """``follow([file])``
832 832 An alias for ``::.`` (ancestors of the working copy's first parent).
833 833 If a filename is specified, the history of the given file is followed,
834 834 including copies.
835 835 """
836 836 return _follow(repo, subset, x, 'follow')
837 837
838 838 def _followfirst(repo, subset, x):
839 839 # ``followfirst([file])``
840 840 # Like ``follow([file])`` but follows only the first parent of
841 841 # every revision or file revision.
842 842 return _follow(repo, subset, x, '_followfirst', followfirst=True)
843 843
844 844 def getall(repo, subset, x):
845 845 """``all()``
846 846 All changesets, the same as ``0:tip``.
847 847 """
848 848 # i18n: "all" is a keyword
849 849 getargs(x, 0, 0, _("all takes no arguments"))
850 850 return subset
851 851
852 852 def grep(repo, subset, x):
853 853 """``grep(regex)``
854 854 Like ``keyword(string)`` but accepts a regex. Use ``grep(r'...')``
855 855 to ensure special escape characters are handled correctly. Unlike
856 856 ``keyword(string)``, the match is case-sensitive.
857 857 """
858 858 try:
859 859 # i18n: "grep" is a keyword
860 860 gr = re.compile(getstring(x, _("grep requires a string")))
861 861 except re.error, e:
862 862 raise error.ParseError(_('invalid match pattern: %s') % e)
863 863
864 864 def matches(x):
865 865 c = repo[x]
866 866 for e in c.files() + [c.user(), c.description()]:
867 867 if gr.search(e):
868 868 return True
869 869 return False
870 870
871 871 return subset.filter(matches)
872 872
873 873 def _matchfiles(repo, subset, x):
874 874 # _matchfiles takes a revset list of prefixed arguments:
875 875 #
876 876 # [p:foo, i:bar, x:baz]
877 877 #
878 878 # builds a match object from them and filters subset. Allowed
879 879 # prefixes are 'p:' for regular patterns, 'i:' for include
880 880 # patterns and 'x:' for exclude patterns. Use 'r:' prefix to pass
881 881 # a revision identifier, or the empty string to reference the
882 882 # working directory, from which the match object is
883 883 # initialized. Use 'd:' to set the default matching mode, default
884 884 # to 'glob'. At most one 'r:' and 'd:' argument can be passed.
885 885
886 886 # i18n: "_matchfiles" is a keyword
887 887 l = getargs(x, 1, -1, _("_matchfiles requires at least one argument"))
888 888 pats, inc, exc = [], [], []
889 889 hasset = False
890 890 rev, default = None, None
891 891 for arg in l:
892 892 # i18n: "_matchfiles" is a keyword
893 893 s = getstring(arg, _("_matchfiles requires string arguments"))
894 894 prefix, value = s[:2], s[2:]
895 895 if prefix == 'p:':
896 896 pats.append(value)
897 897 elif prefix == 'i:':
898 898 inc.append(value)
899 899 elif prefix == 'x:':
900 900 exc.append(value)
901 901 elif prefix == 'r:':
902 902 if rev is not None:
903 903 # i18n: "_matchfiles" is a keyword
904 904 raise error.ParseError(_('_matchfiles expected at most one '
905 905 'revision'))
906 906 rev = value
907 907 elif prefix == 'd:':
908 908 if default is not None:
909 909 # i18n: "_matchfiles" is a keyword
910 910 raise error.ParseError(_('_matchfiles expected at most one '
911 911 'default mode'))
912 912 default = value
913 913 else:
914 914 # i18n: "_matchfiles" is a keyword
915 915 raise error.ParseError(_('invalid _matchfiles prefix: %s') % prefix)
916 916 if not hasset and matchmod.patkind(value) == 'set':
917 917 hasset = True
918 918 if not default:
919 919 default = 'glob'
920 920
921 921 def matches(x):
922 922 m = None
923 923 c = repo[x]
924 924 if not m or (hasset and rev is None):
925 925 ctx = c
926 926 if rev is not None:
927 927 ctx = repo[rev or None]
928 928 m = matchmod.match(repo.root, repo.getcwd(), pats, include=inc,
929 929 exclude=exc, ctx=ctx, default=default)
930 930 for f in c.files():
931 931 if m(f):
932 932 return True
933 933 return False
934 934
935 935 return subset.filter(matches)
936 936
937 937 def hasfile(repo, subset, x):
938 938 """``file(pattern)``
939 939 Changesets affecting files matched by pattern.
940 940
941 941 For a faster but less accurate result, consider using ``filelog()``
942 942 instead.
943 943
944 944 This predicate uses ``glob:`` as the default kind of pattern.
945 945 """
946 946 # i18n: "file" is a keyword
947 947 pat = getstring(x, _("file requires a pattern"))
948 948 return _matchfiles(repo, subset, ('string', 'p:' + pat))
949 949
950 950 def head(repo, subset, x):
951 951 """``head()``
952 952 Changeset is a named branch head.
953 953 """
954 954 # i18n: "head" is a keyword
955 955 getargs(x, 0, 0, _("head takes no arguments"))
956 956 hs = set()
957 957 for b, ls in repo.branchmap().iteritems():
958 958 hs.update(repo[h].rev() for h in ls)
959 959 return baseset(hs).filter(subset.__contains__)
960 960
961 961 def heads(repo, subset, x):
962 962 """``heads(set)``
963 963 Members of set with no children in set.
964 964 """
965 965 s = getset(repo, subset, x)
966 966 ps = parents(repo, subset, x)
967 967 return s - ps
968 968
969 969 def hidden(repo, subset, x):
970 970 """``hidden()``
971 971 Hidden changesets.
972 972 """
973 973 # i18n: "hidden" is a keyword
974 974 getargs(x, 0, 0, _("hidden takes no arguments"))
975 975 hiddenrevs = repoview.filterrevs(repo, 'visible')
976 976 return subset & hiddenrevs
977 977
978 978 def keyword(repo, subset, x):
979 979 """``keyword(string)``
980 980 Search commit message, user name, and names of changed files for
981 981 string. The match is case-insensitive.
982 982 """
983 983 # i18n: "keyword" is a keyword
984 984 kw = encoding.lower(getstring(x, _("keyword requires a string")))
985 985
986 986 def matches(r):
987 987 c = repo[r]
988 988 return util.any(kw in encoding.lower(t) for t in c.files() + [c.user(),
989 989 c.description()])
990 990
991 991 return subset.filter(matches)
992 992
993 993 def limit(repo, subset, x):
994 994 """``limit(set, [n])``
995 995 First n members of set, defaulting to 1.
996 996 """
997 997 # i18n: "limit" is a keyword
998 998 l = getargs(x, 1, 2, _("limit requires one or two arguments"))
999 999 try:
1000 1000 lim = 1
1001 1001 if len(l) == 2:
1002 1002 # i18n: "limit" is a keyword
1003 1003 lim = int(getstring(l[1], _("limit requires a number")))
1004 1004 except (TypeError, ValueError):
1005 1005 # i18n: "limit" is a keyword
1006 1006 raise error.ParseError(_("limit expects a number"))
1007 1007 ss = subset.set()
1008 1008 os = getset(repo, spanset(repo), l[0])
1009 1009 bs = baseset([])
1010 1010 it = iter(os)
1011 1011 for x in xrange(lim):
1012 1012 try:
1013 1013 y = it.next()
1014 1014 if y in ss:
1015 1015 bs.append(y)
1016 1016 except (StopIteration):
1017 1017 break
1018 1018 return bs
1019 1019
1020 1020 def last(repo, subset, x):
1021 1021 """``last(set, [n])``
1022 1022 Last n members of set, defaulting to 1.
1023 1023 """
1024 1024 # i18n: "last" is a keyword
1025 1025 l = getargs(x, 1, 2, _("last requires one or two arguments"))
1026 1026 try:
1027 1027 lim = 1
1028 1028 if len(l) == 2:
1029 1029 # i18n: "last" is a keyword
1030 1030 lim = int(getstring(l[1], _("last requires a number")))
1031 1031 except (TypeError, ValueError):
1032 1032 # i18n: "last" is a keyword
1033 1033 raise error.ParseError(_("last expects a number"))
1034 1034 ss = subset.set()
1035 1035 os = getset(repo, spanset(repo), l[0])
1036 1036 os.reverse()
1037 1037 bs = baseset([])
1038 1038 it = iter(os)
1039 1039 for x in xrange(lim):
1040 1040 try:
1041 1041 y = it.next()
1042 1042 if y in ss:
1043 1043 bs.append(y)
1044 1044 except (StopIteration):
1045 1045 break
1046 1046 return bs
1047 1047
1048 1048 def maxrev(repo, subset, x):
1049 1049 """``max(set)``
1050 1050 Changeset with highest revision number in set.
1051 1051 """
1052 1052 os = getset(repo, spanset(repo), x)
1053 1053 if os:
1054 1054 m = os.max()
1055 1055 if m in subset:
1056 1056 return baseset([m])
1057 1057 return baseset([])
1058 1058
1059 1059 def merge(repo, subset, x):
1060 1060 """``merge()``
1061 1061 Changeset is a merge changeset.
1062 1062 """
1063 1063 # i18n: "merge" is a keyword
1064 1064 getargs(x, 0, 0, _("merge takes no arguments"))
1065 1065 cl = repo.changelog
1066 1066 return subset.filter(lambda r: cl.parentrevs(r)[1] != -1)
1067 1067
1068 1068 def branchpoint(repo, subset, x):
1069 1069 """``branchpoint()``
1070 1070 Changesets with more than one child.
1071 1071 """
1072 1072 # i18n: "branchpoint" is a keyword
1073 1073 getargs(x, 0, 0, _("branchpoint takes no arguments"))
1074 1074 cl = repo.changelog
1075 1075 if not subset:
1076 1076 return baseset([])
1077 1077 baserev = min(subset)
1078 1078 parentscount = [0]*(len(repo) - baserev)
1079 1079 for r in cl.revs(start=baserev + 1):
1080 1080 for p in cl.parentrevs(r):
1081 1081 if p >= baserev:
1082 1082 parentscount[p - baserev] += 1
1083 1083 return subset.filter(lambda r: parentscount[r - baserev] > 1)
1084 1084
1085 1085 def minrev(repo, subset, x):
1086 1086 """``min(set)``
1087 1087 Changeset with lowest revision number in set.
1088 1088 """
1089 1089 os = getset(repo, spanset(repo), x)
1090 1090 if os:
1091 1091 m = os.min()
1092 1092 if m in subset:
1093 1093 return baseset([m])
1094 1094 return baseset([])
1095 1095
1096 1096 def _missingancestors(repo, subset, x):
1097 1097 # i18n: "_missingancestors" is a keyword
1098 1098 revs, bases = getargs(x, 2, 2,
1099 1099 _("_missingancestors requires two arguments"))
1100 1100 rs = baseset(repo)
1101 1101 revs = getset(repo, rs, revs)
1102 1102 bases = getset(repo, rs, bases)
1103 1103 missing = set(repo.changelog.findmissingrevs(bases, revs))
1104 1104 return baseset([r for r in subset if r in missing])
1105 1105
1106 1106 def modifies(repo, subset, x):
1107 1107 """``modifies(pattern)``
1108 1108 Changesets modifying files matched by pattern.
1109 1109
1110 1110 The pattern without explicit kind like ``glob:`` is expected to be
1111 1111 relative to the current directory and match against a file or a
1112 1112 directory.
1113 1113 """
1114 1114 # i18n: "modifies" is a keyword
1115 1115 pat = getstring(x, _("modifies requires a pattern"))
1116 1116 return checkstatus(repo, subset, pat, 0)
1117 1117
1118 1118 def node_(repo, subset, x):
1119 1119 """``id(string)``
1120 1120 Revision non-ambiguously specified by the given hex string prefix.
1121 1121 """
1122 1122 # i18n: "id" is a keyword
1123 1123 l = getargs(x, 1, 1, _("id requires one argument"))
1124 1124 # i18n: "id" is a keyword
1125 1125 n = getstring(l[0], _("id requires a string"))
1126 1126 if len(n) == 40:
1127 1127 rn = repo[n].rev()
1128 1128 else:
1129 1129 rn = None
1130 1130 pm = repo.changelog._partialmatch(n)
1131 1131 if pm is not None:
1132 1132 rn = repo.changelog.rev(pm)
1133 1133
1134 1134 return subset.filter(lambda r: r == rn)
1135 1135
1136 1136 def obsolete(repo, subset, x):
1137 1137 """``obsolete()``
1138 1138 Mutable changeset with a newer version."""
1139 1139 # i18n: "obsolete" is a keyword
1140 1140 getargs(x, 0, 0, _("obsolete takes no arguments"))
1141 1141 obsoletes = obsmod.getrevs(repo, 'obsolete')
1142 1142 return subset & obsoletes
1143 1143
1144 1144 def origin(repo, subset, x):
1145 1145 """``origin([set])``
1146 1146 Changesets that were specified as a source for the grafts, transplants or
1147 1147 rebases that created the given revisions. Omitting the optional set is the
1148 1148 same as passing all(). If a changeset created by these operations is itself
1149 1149 specified as a source for one of these operations, only the source changeset
1150 1150 for the first operation is selected.
1151 1151 """
1152 1152 if x is not None:
1153 1153 args = getset(repo, spanset(repo), x).set()
1154 1154 else:
1155 1155 args = getall(repo, spanset(repo), x).set()
1156 1156
1157 1157 def _firstsrc(rev):
1158 1158 src = _getrevsource(repo, rev)
1159 1159 if src is None:
1160 1160 return None
1161 1161
1162 1162 while True:
1163 1163 prev = _getrevsource(repo, src)
1164 1164
1165 1165 if prev is None:
1166 1166 return src
1167 1167 src = prev
1168 1168
1169 1169 o = set([_firstsrc(r) for r in args])
1170 1170 return subset.filter(lambda r: r in o)
1171 1171
1172 1172 def outgoing(repo, subset, x):
1173 1173 """``outgoing([path])``
1174 1174 Changesets not found in the specified destination repository, or the
1175 1175 default push location.
1176 1176 """
1177 1177 import hg # avoid start-up nasties
1178 1178 # i18n: "outgoing" is a keyword
1179 1179 l = getargs(x, 0, 1, _("outgoing takes one or no arguments"))
1180 1180 # i18n: "outgoing" is a keyword
1181 1181 dest = l and getstring(l[0], _("outgoing requires a repository path")) or ''
1182 1182 dest = repo.ui.expandpath(dest or 'default-push', dest or 'default')
1183 1183 dest, branches = hg.parseurl(dest)
1184 1184 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
1185 1185 if revs:
1186 1186 revs = [repo.lookup(rev) for rev in revs]
1187 1187 other = hg.peer(repo, {}, dest)
1188 1188 repo.ui.pushbuffer()
1189 1189 outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs)
1190 1190 repo.ui.popbuffer()
1191 1191 cl = repo.changelog
1192 1192 o = set([cl.rev(r) for r in outgoing.missing])
1193 1193 return subset.filter(lambda r: r in o)
1194 1194
1195 1195 def p1(repo, subset, x):
1196 1196 """``p1([set])``
1197 1197 First parent of changesets in set, or the working directory.
1198 1198 """
1199 1199 if x is None:
1200 1200 p = repo[x].p1().rev()
1201 1201 return subset.filter(lambda r: r == p)
1202 1202
1203 1203 ps = set()
1204 1204 cl = repo.changelog
1205 1205 for r in getset(repo, spanset(repo), x):
1206 1206 ps.add(cl.parentrevs(r)[0])
1207 1207 return subset & ps
1208 1208
1209 1209 def p2(repo, subset, x):
1210 1210 """``p2([set])``
1211 1211 Second parent of changesets in set, or the working directory.
1212 1212 """
1213 1213 if x is None:
1214 1214 ps = repo[x].parents()
1215 1215 try:
1216 1216 p = ps[1].rev()
1217 1217 return subset.filter(lambda r: r == p)
1218 1218 except IndexError:
1219 1219 return baseset([])
1220 1220
1221 1221 ps = set()
1222 1222 cl = repo.changelog
1223 1223 for r in getset(repo, spanset(repo), x):
1224 1224 ps.add(cl.parentrevs(r)[1])
1225 1225 return subset & ps
1226 1226
1227 1227 def parents(repo, subset, x):
1228 1228 """``parents([set])``
1229 1229 The set of all parents for all changesets in set, or the working directory.
1230 1230 """
1231 1231 if x is None:
1232 1232 ps = tuple(p.rev() for p in repo[x].parents())
1233 1233 return subset & ps
1234 1234
1235 1235 ps = set()
1236 1236 cl = repo.changelog
1237 1237 for r in getset(repo, spanset(repo), x):
1238 1238 ps.update(cl.parentrevs(r))
1239 1239 return subset & ps
1240 1240
1241 1241 def parentspec(repo, subset, x, n):
1242 1242 """``set^0``
1243 1243 The set.
1244 1244 ``set^1`` (or ``set^``), ``set^2``
1245 1245 First or second parent, respectively, of all changesets in set.
1246 1246 """
1247 1247 try:
1248 1248 n = int(n[1])
1249 1249 if n not in (0, 1, 2):
1250 1250 raise ValueError
1251 1251 except (TypeError, ValueError):
1252 1252 raise error.ParseError(_("^ expects a number 0, 1, or 2"))
1253 1253 ps = set()
1254 1254 cl = repo.changelog
1255 1255 for r in getset(repo, baseset(cl), x):
1256 1256 if n == 0:
1257 1257 ps.add(r)
1258 1258 elif n == 1:
1259 1259 ps.add(cl.parentrevs(r)[0])
1260 1260 elif n == 2:
1261 1261 parents = cl.parentrevs(r)
1262 1262 if len(parents) > 1:
1263 1263 ps.add(parents[1])
1264 1264 return subset & ps
1265 1265
1266 1266 def present(repo, subset, x):
1267 1267 """``present(set)``
1268 1268 An empty set, if any revision in set isn't found; otherwise,
1269 1269 all revisions in set.
1270 1270
1271 1271 If any of specified revisions is not present in the local repository,
1272 1272 the query is normally aborted. But this predicate allows the query
1273 1273 to continue even in such cases.
1274 1274 """
1275 1275 try:
1276 1276 return getset(repo, subset, x)
1277 1277 except error.RepoLookupError:
1278 1278 return baseset([])
1279 1279
1280 1280 def public(repo, subset, x):
1281 1281 """``public()``
1282 1282 Changeset in public phase."""
1283 1283 # i18n: "public" is a keyword
1284 1284 getargs(x, 0, 0, _("public takes no arguments"))
1285 1285 pc = repo._phasecache
1286 1286 return subset.filter(lambda r: pc.phase(repo, r) == phases.public)
1287 1287
1288 1288 def remote(repo, subset, x):
1289 1289 """``remote([id [,path]])``
1290 1290 Local revision that corresponds to the given identifier in a
1291 1291 remote repository, if present. Here, the '.' identifier is a
1292 1292 synonym for the current local branch.
1293 1293 """
1294 1294
1295 1295 import hg # avoid start-up nasties
1296 1296 # i18n: "remote" is a keyword
1297 1297 l = getargs(x, 0, 2, _("remote takes one, two or no arguments"))
1298 1298
1299 1299 q = '.'
1300 1300 if len(l) > 0:
1301 1301 # i18n: "remote" is a keyword
1302 1302 q = getstring(l[0], _("remote requires a string id"))
1303 1303 if q == '.':
1304 1304 q = repo['.'].branch()
1305 1305
1306 1306 dest = ''
1307 1307 if len(l) > 1:
1308 1308 # i18n: "remote" is a keyword
1309 1309 dest = getstring(l[1], _("remote requires a repository path"))
1310 1310 dest = repo.ui.expandpath(dest or 'default')
1311 1311 dest, branches = hg.parseurl(dest)
1312 1312 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
1313 1313 if revs:
1314 1314 revs = [repo.lookup(rev) for rev in revs]
1315 1315 other = hg.peer(repo, {}, dest)
1316 1316 n = other.lookup(q)
1317 1317 if n in repo:
1318 1318 r = repo[n].rev()
1319 1319 if r in subset:
1320 1320 return baseset([r])
1321 1321 return baseset([])
1322 1322
1323 1323 def removes(repo, subset, x):
1324 1324 """``removes(pattern)``
1325 1325 Changesets which remove files matching pattern.
1326 1326
1327 1327 The pattern without explicit kind like ``glob:`` is expected to be
1328 1328 relative to the current directory and match against a file or a
1329 1329 directory.
1330 1330 """
1331 1331 # i18n: "removes" is a keyword
1332 1332 pat = getstring(x, _("removes requires a pattern"))
1333 1333 return checkstatus(repo, subset, pat, 2)
1334 1334
1335 1335 def rev(repo, subset, x):
1336 1336 """``rev(number)``
1337 1337 Revision with the given numeric identifier.
1338 1338 """
1339 1339 # i18n: "rev" is a keyword
1340 1340 l = getargs(x, 1, 1, _("rev requires one argument"))
1341 1341 try:
1342 1342 # i18n: "rev" is a keyword
1343 1343 l = int(getstring(l[0], _("rev requires a number")))
1344 1344 except (TypeError, ValueError):
1345 1345 # i18n: "rev" is a keyword
1346 1346 raise error.ParseError(_("rev expects a number"))
1347 1347 return subset.filter(lambda r: r == l)
1348 1348
1349 1349 def matching(repo, subset, x):
1350 1350 """``matching(revision [, field])``
1351 1351 Changesets in which a given set of fields match the set of fields in the
1352 1352 selected revision or set.
1353 1353
1354 1354 To match more than one field pass the list of fields to match separated
1355 1355 by spaces (e.g. ``author description``).
1356 1356
1357 1357 Valid fields are most regular revision fields and some special fields.
1358 1358
1359 1359 Regular revision fields are ``description``, ``author``, ``branch``,
1360 1360 ``date``, ``files``, ``phase``, ``parents``, ``substate``, ``user``
1361 1361 and ``diff``.
1362 1362 Note that ``author`` and ``user`` are synonyms. ``diff`` refers to the
1363 1363 contents of the revision. Two revisions matching their ``diff`` will
1364 1364 also match their ``files``.
1365 1365
1366 1366 Special fields are ``summary`` and ``metadata``:
1367 1367 ``summary`` matches the first line of the description.
1368 1368 ``metadata`` is equivalent to matching ``description user date``
1369 1369 (i.e. it matches the main metadata fields).
1370 1370
1371 1371 ``metadata`` is the default field which is used when no fields are
1372 1372 specified. You can match more than one field at a time.
1373 1373 """
1374 1374 # i18n: "matching" is a keyword
1375 1375 l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments"))
1376 1376
1377 1377 revs = getset(repo, baseset(repo.changelog), l[0])
1378 1378
1379 1379 fieldlist = ['metadata']
1380 1380 if len(l) > 1:
1381 1381 fieldlist = getstring(l[1],
1382 1382 # i18n: "matching" is a keyword
1383 1383 _("matching requires a string "
1384 1384 "as its second argument")).split()
1385 1385
1386 1386 # Make sure that there are no repeated fields,
1387 1387 # expand the 'special' 'metadata' field type
1388 1388 # and check the 'files' whenever we check the 'diff'
1389 1389 fields = []
1390 1390 for field in fieldlist:
1391 1391 if field == 'metadata':
1392 1392 fields += ['user', 'description', 'date']
1393 1393 elif field == 'diff':
1394 1394 # a revision matching the diff must also match the files
1395 1395 # since matching the diff is very costly, make sure to
1396 1396 # also match the files first
1397 1397 fields += ['files', 'diff']
1398 1398 else:
1399 1399 if field == 'author':
1400 1400 field = 'user'
1401 1401 fields.append(field)
1402 1402 fields = set(fields)
1403 1403 if 'summary' in fields and 'description' in fields:
1404 1404 # If a revision matches its description it also matches its summary
1405 1405 fields.discard('summary')
1406 1406
1407 1407 # We may want to match more than one field
1408 1408 # Not all fields take the same amount of time to be matched
1409 1409 # Sort the selected fields in order of increasing matching cost
1410 1410 fieldorder = ['phase', 'parents', 'user', 'date', 'branch', 'summary',
1411 1411 'files', 'description', 'substate', 'diff']
1412 1412 def fieldkeyfunc(f):
1413 1413 try:
1414 1414 return fieldorder.index(f)
1415 1415 except ValueError:
1416 1416 # assume an unknown field is very costly
1417 1417 return len(fieldorder)
1418 1418 fields = list(fields)
1419 1419 fields.sort(key=fieldkeyfunc)
1420 1420
1421 1421 # Each field will be matched with its own "getfield" function
1422 1422 # which will be added to the getfieldfuncs array of functions
1423 1423 getfieldfuncs = []
1424 1424 _funcs = {
1425 1425 'user': lambda r: repo[r].user(),
1426 1426 'branch': lambda r: repo[r].branch(),
1427 1427 'date': lambda r: repo[r].date(),
1428 1428 'description': lambda r: repo[r].description(),
1429 1429 'files': lambda r: repo[r].files(),
1430 1430 'parents': lambda r: repo[r].parents(),
1431 1431 'phase': lambda r: repo[r].phase(),
1432 1432 'substate': lambda r: repo[r].substate,
1433 1433 'summary': lambda r: repo[r].description().splitlines()[0],
1434 1434 'diff': lambda r: list(repo[r].diff(git=True),)
1435 1435 }
1436 1436 for info in fields:
1437 1437 getfield = _funcs.get(info, None)
1438 1438 if getfield is None:
1439 1439 raise error.ParseError(
1440 1440 # i18n: "matching" is a keyword
1441 1441 _("unexpected field name passed to matching: %s") % info)
1442 1442 getfieldfuncs.append(getfield)
1443 1443 # convert the getfield array of functions into a "getinfo" function
1444 1444 # which returns an array of field values (or a single value if there
1445 1445 # is only one field to match)
1446 1446 getinfo = lambda r: [f(r) for f in getfieldfuncs]
1447 1447
1448 1448 def matches(x):
1449 1449 for rev in revs:
1450 1450 target = getinfo(rev)
1451 1451 match = True
1452 1452 for n, f in enumerate(getfieldfuncs):
1453 1453 if target[n] != f(x):
1454 1454 match = False
1455 1455 if match:
1456 1456 return True
1457 1457 return False
1458 1458
1459 1459 return subset.filter(matches)
1460 1460
1461 1461 def reverse(repo, subset, x):
1462 1462 """``reverse(set)``
1463 1463 Reverse order of set.
1464 1464 """
1465 1465 l = getset(repo, subset, x)
1466 1466 l.reverse()
1467 1467 return l
1468 1468
1469 1469 def roots(repo, subset, x):
1470 1470 """``roots(set)``
1471 1471 Changesets in set with no parent changeset in set.
1472 1472 """
1473 1473 s = getset(repo, baseset(repo.changelog), x).set()
1474 1474 subset = baseset([r for r in subset if r in s])
1475 1475 cs = _children(repo, subset, s)
1476 1476 return subset - cs
1477 1477
1478 1478 def secret(repo, subset, x):
1479 1479 """``secret()``
1480 1480 Changeset in secret phase."""
1481 1481 # i18n: "secret" is a keyword
1482 1482 getargs(x, 0, 0, _("secret takes no arguments"))
1483 1483 pc = repo._phasecache
1484 1484 return subset.filter(lambda x: pc.phase(repo, x) == phases.secret)
1485 1485
1486 1486 def sort(repo, subset, x):
1487 1487 """``sort(set[, [-]key...])``
1488 1488 Sort set by keys. The default sort order is ascending, specify a key
1489 1489 as ``-key`` to sort in descending order.
1490 1490
1491 1491 The keys can be:
1492 1492
1493 1493 - ``rev`` for the revision number,
1494 1494 - ``branch`` for the branch name,
1495 1495 - ``desc`` for the commit message (description),
1496 1496 - ``user`` for user name (``author`` can be used as an alias),
1497 1497 - ``date`` for the commit date
1498 1498 """
1499 1499 # i18n: "sort" is a keyword
1500 1500 l = getargs(x, 1, 2, _("sort requires one or two arguments"))
1501 1501 keys = "rev"
1502 1502 if len(l) == 2:
1503 1503 # i18n: "sort" is a keyword
1504 1504 keys = getstring(l[1], _("sort spec must be a string"))
1505 1505
1506 1506 s = l[0]
1507 1507 keys = keys.split()
1508 1508 l = []
1509 1509 def invert(s):
1510 1510 return "".join(chr(255 - ord(c)) for c in s)
1511 1511 revs = getset(repo, subset, s)
1512 1512 if keys == ["rev"]:
1513 1513 revs.sort()
1514 1514 return revs
1515 1515 elif keys == ["-rev"]:
1516 1516 revs.sort(reverse=True)
1517 1517 return revs
1518 1518 for r in revs:
1519 1519 c = repo[r]
1520 1520 e = []
1521 1521 for k in keys:
1522 1522 if k == 'rev':
1523 1523 e.append(r)
1524 1524 elif k == '-rev':
1525 1525 e.append(-r)
1526 1526 elif k == 'branch':
1527 1527 e.append(c.branch())
1528 1528 elif k == '-branch':
1529 1529 e.append(invert(c.branch()))
1530 1530 elif k == 'desc':
1531 1531 e.append(c.description())
1532 1532 elif k == '-desc':
1533 1533 e.append(invert(c.description()))
1534 1534 elif k in 'user author':
1535 1535 e.append(c.user())
1536 1536 elif k in '-user -author':
1537 1537 e.append(invert(c.user()))
1538 1538 elif k == 'date':
1539 1539 e.append(c.date()[0])
1540 1540 elif k == '-date':
1541 1541 e.append(-c.date()[0])
1542 1542 else:
1543 1543 raise error.ParseError(_("unknown sort key %r") % k)
1544 1544 e.append(r)
1545 1545 l.append(e)
1546 1546 l.sort()
1547 1547 return baseset([e[-1] for e in l])
1548 1548
1549 1549 def _stringmatcher(pattern):
1550 1550 """
1551 1551 accepts a string, possibly starting with 're:' or 'literal:' prefix.
1552 1552 returns the matcher name, pattern, and matcher function.
1553 1553 missing or unknown prefixes are treated as literal matches.
1554 1554
1555 1555 helper for tests:
1556 1556 >>> def test(pattern, *tests):
1557 1557 ... kind, pattern, matcher = _stringmatcher(pattern)
1558 1558 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
1559 1559
1560 1560 exact matching (no prefix):
1561 1561 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
1562 1562 ('literal', 'abcdefg', [False, False, True])
1563 1563
1564 1564 regex matching ('re:' prefix)
1565 1565 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
1566 1566 ('re', 'a.+b', [False, False, True])
1567 1567
1568 1568 force exact matches ('literal:' prefix)
1569 1569 >>> test('literal:re:foobar', 'foobar', 're:foobar')
1570 1570 ('literal', 're:foobar', [False, True])
1571 1571
1572 1572 unknown prefixes are ignored and treated as literals
1573 1573 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
1574 1574 ('literal', 'foo:bar', [False, False, True])
1575 1575 """
1576 1576 if pattern.startswith('re:'):
1577 1577 pattern = pattern[3:]
1578 1578 try:
1579 1579 regex = re.compile(pattern)
1580 1580 except re.error, e:
1581 1581 raise error.ParseError(_('invalid regular expression: %s')
1582 1582 % e)
1583 1583 return 're', pattern, regex.search
1584 1584 elif pattern.startswith('literal:'):
1585 1585 pattern = pattern[8:]
1586 1586 return 'literal', pattern, pattern.__eq__
1587 1587
1588 1588 def _substringmatcher(pattern):
1589 1589 kind, pattern, matcher = _stringmatcher(pattern)
1590 1590 if kind == 'literal':
1591 1591 matcher = lambda s: pattern in s
1592 1592 return kind, pattern, matcher
1593 1593
1594 1594 def tag(repo, subset, x):
1595 1595 """``tag([name])``
1596 1596 The specified tag by name, or all tagged revisions if no name is given.
1597
1598 If `name` starts with `re:`, the remainder of the name is treated as
1599 a regular expression. To match a tag that actually starts with `re:`,
1600 use the prefix `literal:`.
1597 1601 """
1598 1602 # i18n: "tag" is a keyword
1599 1603 args = getargs(x, 0, 1, _("tag takes one or no arguments"))
1600 1604 cl = repo.changelog
1601 1605 if args:
1602 1606 pattern = getstring(args[0],
1603 1607 # i18n: "tag" is a keyword
1604 1608 _('the argument to tag must be a string'))
1605 1609 kind, pattern, matcher = _stringmatcher(pattern)
1606 1610 if kind == 'literal':
1607 1611 # avoid resolving all tags
1608 1612 tn = repo._tagscache.tags.get(pattern, None)
1609 1613 if tn is None:
1610 1614 raise util.Abort(_("tag '%s' does not exist") % pattern)
1611 1615 s = set([repo[tn].rev()])
1612 1616 else:
1613 1617 s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)])
1614 1618 else:
1615 1619 s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
1616 1620 return subset & s
1617 1621
1618 1622 def tagged(repo, subset, x):
1619 1623 return tag(repo, subset, x)
1620 1624
1621 1625 def unstable(repo, subset, x):
1622 1626 """``unstable()``
1623 1627 Non-obsolete changesets with obsolete ancestors.
1624 1628 """
1625 1629 # i18n: "unstable" is a keyword
1626 1630 getargs(x, 0, 0, _("unstable takes no arguments"))
1627 1631 unstables = obsmod.getrevs(repo, 'unstable')
1628 1632 return subset & unstables
1629 1633
1630 1634
1631 1635 def user(repo, subset, x):
1632 1636 """``user(string)``
1633 1637 User name contains string. The match is case-insensitive.
1634 1638
1635 1639 If `string` starts with `re:`, the remainder of the string is treated as
1636 1640 a regular expression. To match a user that actually contains `re:`, use
1637 1641 the prefix `literal:`.
1638 1642 """
1639 1643 return author(repo, subset, x)
1640 1644
1641 1645 # for internal use
1642 1646 def _list(repo, subset, x):
1643 1647 s = getstring(x, "internal error")
1644 1648 if not s:
1645 1649 return baseset([])
1646 1650 ls = [repo[r].rev() for r in s.split('\0')]
1647 1651 s = subset.set()
1648 1652 return baseset([r for r in ls if r in s])
1649 1653
1650 1654 # for internal use
1651 1655 def _intlist(repo, subset, x):
1652 1656 s = getstring(x, "internal error")
1653 1657 if not s:
1654 1658 return baseset([])
1655 1659 ls = [int(r) for r in s.split('\0')]
1656 1660 s = subset.set()
1657 1661 return baseset([r for r in ls if r in s])
1658 1662
1659 1663 # for internal use
1660 1664 def _hexlist(repo, subset, x):
1661 1665 s = getstring(x, "internal error")
1662 1666 if not s:
1663 1667 return baseset([])
1664 1668 cl = repo.changelog
1665 1669 ls = [cl.rev(node.bin(r)) for r in s.split('\0')]
1666 1670 s = subset.set()
1667 1671 return baseset([r for r in ls if r in s])
1668 1672
1669 1673 symbols = {
1670 1674 "adds": adds,
1671 1675 "all": getall,
1672 1676 "ancestor": ancestor,
1673 1677 "ancestors": ancestors,
1674 1678 "_firstancestors": _firstancestors,
1675 1679 "author": author,
1676 1680 "only": only,
1677 1681 "bisect": bisect,
1678 1682 "bisected": bisected,
1679 1683 "bookmark": bookmark,
1680 1684 "branch": branch,
1681 1685 "branchpoint": branchpoint,
1682 1686 "bumped": bumped,
1683 1687 "bundle": bundle,
1684 1688 "children": children,
1685 1689 "closed": closed,
1686 1690 "contains": contains,
1687 1691 "converted": converted,
1688 1692 "date": date,
1689 1693 "desc": desc,
1690 1694 "descendants": descendants,
1691 1695 "_firstdescendants": _firstdescendants,
1692 1696 "destination": destination,
1693 1697 "divergent": divergent,
1694 1698 "draft": draft,
1695 1699 "extinct": extinct,
1696 1700 "extra": extra,
1697 1701 "file": hasfile,
1698 1702 "filelog": filelog,
1699 1703 "first": first,
1700 1704 "follow": follow,
1701 1705 "_followfirst": _followfirst,
1702 1706 "grep": grep,
1703 1707 "head": head,
1704 1708 "heads": heads,
1705 1709 "hidden": hidden,
1706 1710 "id": node_,
1707 1711 "keyword": keyword,
1708 1712 "last": last,
1709 1713 "limit": limit,
1710 1714 "_matchfiles": _matchfiles,
1711 1715 "max": maxrev,
1712 1716 "merge": merge,
1713 1717 "min": minrev,
1714 1718 "_missingancestors": _missingancestors,
1715 1719 "modifies": modifies,
1716 1720 "obsolete": obsolete,
1717 1721 "origin": origin,
1718 1722 "outgoing": outgoing,
1719 1723 "p1": p1,
1720 1724 "p2": p2,
1721 1725 "parents": parents,
1722 1726 "present": present,
1723 1727 "public": public,
1724 1728 "remote": remote,
1725 1729 "removes": removes,
1726 1730 "rev": rev,
1727 1731 "reverse": reverse,
1728 1732 "roots": roots,
1729 1733 "sort": sort,
1730 1734 "secret": secret,
1731 1735 "matching": matching,
1732 1736 "tag": tag,
1733 1737 "tagged": tagged,
1734 1738 "user": user,
1735 1739 "unstable": unstable,
1736 1740 "_list": _list,
1737 1741 "_intlist": _intlist,
1738 1742 "_hexlist": _hexlist,
1739 1743 }
1740 1744
1741 1745 # symbols which can't be used for a DoS attack for any given input
1742 1746 # (e.g. those which accept regexes as plain strings shouldn't be included)
1743 1747 # functions that just return a lot of changesets (like all) don't count here
1744 1748 safesymbols = set([
1745 1749 "adds",
1746 1750 "all",
1747 1751 "ancestor",
1748 1752 "ancestors",
1749 1753 "_firstancestors",
1750 1754 "author",
1751 1755 "bisect",
1752 1756 "bisected",
1753 1757 "bookmark",
1754 1758 "branch",
1755 1759 "branchpoint",
1756 1760 "bumped",
1757 1761 "bundle",
1758 1762 "children",
1759 1763 "closed",
1760 1764 "converted",
1761 1765 "date",
1762 1766 "desc",
1763 1767 "descendants",
1764 1768 "_firstdescendants",
1765 1769 "destination",
1766 1770 "divergent",
1767 1771 "draft",
1768 1772 "extinct",
1769 1773 "extra",
1770 1774 "file",
1771 1775 "filelog",
1772 1776 "first",
1773 1777 "follow",
1774 1778 "_followfirst",
1775 1779 "head",
1776 1780 "heads",
1777 1781 "hidden",
1778 1782 "id",
1779 1783 "keyword",
1780 1784 "last",
1781 1785 "limit",
1782 1786 "_matchfiles",
1783 1787 "max",
1784 1788 "merge",
1785 1789 "min",
1786 1790 "_missingancestors",
1787 1791 "modifies",
1788 1792 "obsolete",
1789 1793 "origin",
1790 1794 "outgoing",
1791 1795 "p1",
1792 1796 "p2",
1793 1797 "parents",
1794 1798 "present",
1795 1799 "public",
1796 1800 "remote",
1797 1801 "removes",
1798 1802 "rev",
1799 1803 "reverse",
1800 1804 "roots",
1801 1805 "sort",
1802 1806 "secret",
1803 1807 "matching",
1804 1808 "tag",
1805 1809 "tagged",
1806 1810 "user",
1807 1811 "unstable",
1808 1812 "_list",
1809 1813 "_intlist",
1810 1814 "_hexlist",
1811 1815 ])
1812 1816
1813 1817 methods = {
1814 1818 "range": rangeset,
1815 1819 "dagrange": dagrange,
1816 1820 "string": stringset,
1817 1821 "symbol": symbolset,
1818 1822 "and": andset,
1819 1823 "or": orset,
1820 1824 "not": notset,
1821 1825 "list": listset,
1822 1826 "func": func,
1823 1827 "ancestor": ancestorspec,
1824 1828 "parent": parentspec,
1825 1829 "parentpost": p1,
1826 1830 }
1827 1831
1828 1832 def optimize(x, small):
1829 1833 if x is None:
1830 1834 return 0, x
1831 1835
1832 1836 smallbonus = 1
1833 1837 if small:
1834 1838 smallbonus = .5
1835 1839
1836 1840 op = x[0]
1837 1841 if op == 'minus':
1838 1842 return optimize(('and', x[1], ('not', x[2])), small)
1839 1843 elif op == 'dagrangepre':
1840 1844 return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
1841 1845 elif op == 'dagrangepost':
1842 1846 return optimize(('func', ('symbol', 'descendants'), x[1]), small)
1843 1847 elif op == 'rangepre':
1844 1848 return optimize(('range', ('string', '0'), x[1]), small)
1845 1849 elif op == 'rangepost':
1846 1850 return optimize(('range', x[1], ('string', 'tip')), small)
1847 1851 elif op == 'negate':
1848 1852 return optimize(('string',
1849 1853 '-' + getstring(x[1], _("can't negate that"))), small)
1850 1854 elif op in 'string symbol negate':
1851 1855 return smallbonus, x # single revisions are small
1852 1856 elif op == 'and':
1853 1857 wa, ta = optimize(x[1], True)
1854 1858 wb, tb = optimize(x[2], True)
1855 1859
1856 1860 # (::x and not ::y)/(not ::y and ::x) have a fast path
1857 1861 def ismissingancestors(revs, bases):
1858 1862 return (
1859 1863 revs[0] == 'func'
1860 1864 and getstring(revs[1], _('not a symbol')) == 'ancestors'
1861 1865 and bases[0] == 'not'
1862 1866 and bases[1][0] == 'func'
1863 1867 and getstring(bases[1][1], _('not a symbol')) == 'ancestors')
1864 1868
1865 1869 w = min(wa, wb)
1866 1870 if ismissingancestors(ta, tb):
1867 1871 return w, ('func', ('symbol', '_missingancestors'),
1868 1872 ('list', ta[2], tb[1][2]))
1869 1873 if ismissingancestors(tb, ta):
1870 1874 return w, ('func', ('symbol', '_missingancestors'),
1871 1875 ('list', tb[2], ta[1][2]))
1872 1876
1873 1877 if wa > wb:
1874 1878 return w, (op, tb, ta)
1875 1879 return w, (op, ta, tb)
1876 1880 elif op == 'or':
1877 1881 wa, ta = optimize(x[1], False)
1878 1882 wb, tb = optimize(x[2], False)
1879 1883 if wb < wa:
1880 1884 wb, wa = wa, wb
1881 1885 return max(wa, wb), (op, ta, tb)
1882 1886 elif op == 'not':
1883 1887 o = optimize(x[1], not small)
1884 1888 return o[0], (op, o[1])
1885 1889 elif op == 'parentpost':
1886 1890 o = optimize(x[1], small)
1887 1891 return o[0], (op, o[1])
1888 1892 elif op == 'group':
1889 1893 return optimize(x[1], small)
1890 1894 elif op in 'dagrange range list parent ancestorspec':
1891 1895 if op == 'parent':
1892 1896 # x^:y means (x^) : y, not x ^ (:y)
1893 1897 post = ('parentpost', x[1])
1894 1898 if x[2][0] == 'dagrangepre':
1895 1899 return optimize(('dagrange', post, x[2][1]), small)
1896 1900 elif x[2][0] == 'rangepre':
1897 1901 return optimize(('range', post, x[2][1]), small)
1898 1902
1899 1903 wa, ta = optimize(x[1], small)
1900 1904 wb, tb = optimize(x[2], small)
1901 1905 return wa + wb, (op, ta, tb)
1902 1906 elif op == 'func':
1903 1907 f = getstring(x[1], _("not a symbol"))
1904 1908 wa, ta = optimize(x[2], small)
1905 1909 if f in ("author branch closed date desc file grep keyword "
1906 1910 "outgoing user"):
1907 1911 w = 10 # slow
1908 1912 elif f in "modifies adds removes":
1909 1913 w = 30 # slower
1910 1914 elif f == "contains":
1911 1915 w = 100 # very slow
1912 1916 elif f == "ancestor":
1913 1917 w = 1 * smallbonus
1914 1918 elif f in "reverse limit first":
1915 1919 w = 0
1916 1920 elif f in "sort":
1917 1921 w = 10 # assume most sorts look at changelog
1918 1922 else:
1919 1923 w = 1
1920 1924 return w + wa, (op, x[1], ta)
1921 1925 return 1, x
1922 1926
1923 1927 _aliasarg = ('func', ('symbol', '_aliasarg'))
1924 1928 def _getaliasarg(tree):
1925 1929 """If tree matches ('func', ('symbol', '_aliasarg'), ('string', X))
1926 1930 return X, None otherwise.
1927 1931 """
1928 1932 if (len(tree) == 3 and tree[:2] == _aliasarg
1929 1933 and tree[2][0] == 'string'):
1930 1934 return tree[2][1]
1931 1935 return None
1932 1936
1933 1937 def _checkaliasarg(tree, known=None):
1934 1938 """Check tree contains no _aliasarg construct or only ones which
1935 1939 value is in known. Used to avoid alias placeholders injection.
1936 1940 """
1937 1941 if isinstance(tree, tuple):
1938 1942 arg = _getaliasarg(tree)
1939 1943 if arg is not None and (not known or arg not in known):
1940 1944 raise error.ParseError(_("not a function: %s") % '_aliasarg')
1941 1945 for t in tree:
1942 1946 _checkaliasarg(t, known)
1943 1947
1944 1948 class revsetalias(object):
1945 1949 funcre = re.compile('^([^(]+)\(([^)]+)\)$')
1946 1950 args = None
1947 1951
1948 1952 def __init__(self, name, value):
1949 1953 '''Aliases like:
1950 1954
1951 1955 h = heads(default)
1952 1956 b($1) = ancestors($1) - ancestors(default)
1953 1957 '''
1954 1958 m = self.funcre.search(name)
1955 1959 if m:
1956 1960 self.name = m.group(1)
1957 1961 self.tree = ('func', ('symbol', m.group(1)))
1958 1962 self.args = [x.strip() for x in m.group(2).split(',')]
1959 1963 for arg in self.args:
1960 1964 # _aliasarg() is an unknown symbol only used separate
1961 1965 # alias argument placeholders from regular strings.
1962 1966 value = value.replace(arg, '_aliasarg(%r)' % (arg,))
1963 1967 else:
1964 1968 self.name = name
1965 1969 self.tree = ('symbol', name)
1966 1970
1967 1971 self.replacement, pos = parse(value)
1968 1972 if pos != len(value):
1969 1973 raise error.ParseError(_('invalid token'), pos)
1970 1974 # Check for placeholder injection
1971 1975 _checkaliasarg(self.replacement, self.args)
1972 1976
1973 1977 def _getalias(aliases, tree):
1974 1978 """If tree looks like an unexpanded alias, return it. Return None
1975 1979 otherwise.
1976 1980 """
1977 1981 if isinstance(tree, tuple) and tree:
1978 1982 if tree[0] == 'symbol' and len(tree) == 2:
1979 1983 name = tree[1]
1980 1984 alias = aliases.get(name)
1981 1985 if alias and alias.args is None and alias.tree == tree:
1982 1986 return alias
1983 1987 if tree[0] == 'func' and len(tree) > 1:
1984 1988 if tree[1][0] == 'symbol' and len(tree[1]) == 2:
1985 1989 name = tree[1][1]
1986 1990 alias = aliases.get(name)
1987 1991 if alias and alias.args is not None and alias.tree == tree[:2]:
1988 1992 return alias
1989 1993 return None
1990 1994
1991 1995 def _expandargs(tree, args):
1992 1996 """Replace _aliasarg instances with the substitution value of the
1993 1997 same name in args, recursively.
1994 1998 """
1995 1999 if not tree or not isinstance(tree, tuple):
1996 2000 return tree
1997 2001 arg = _getaliasarg(tree)
1998 2002 if arg is not None:
1999 2003 return args[arg]
2000 2004 return tuple(_expandargs(t, args) for t in tree)
2001 2005
2002 2006 def _expandaliases(aliases, tree, expanding, cache):
2003 2007 """Expand aliases in tree, recursively.
2004 2008
2005 2009 'aliases' is a dictionary mapping user defined aliases to
2006 2010 revsetalias objects.
2007 2011 """
2008 2012 if not isinstance(tree, tuple):
2009 2013 # Do not expand raw strings
2010 2014 return tree
2011 2015 alias = _getalias(aliases, tree)
2012 2016 if alias is not None:
2013 2017 if alias in expanding:
2014 2018 raise error.ParseError(_('infinite expansion of revset alias "%s" '
2015 2019 'detected') % alias.name)
2016 2020 expanding.append(alias)
2017 2021 if alias.name not in cache:
2018 2022 cache[alias.name] = _expandaliases(aliases, alias.replacement,
2019 2023 expanding, cache)
2020 2024 result = cache[alias.name]
2021 2025 expanding.pop()
2022 2026 if alias.args is not None:
2023 2027 l = getlist(tree[2])
2024 2028 if len(l) != len(alias.args):
2025 2029 raise error.ParseError(
2026 2030 _('invalid number of arguments: %s') % len(l))
2027 2031 l = [_expandaliases(aliases, a, [], cache) for a in l]
2028 2032 result = _expandargs(result, dict(zip(alias.args, l)))
2029 2033 else:
2030 2034 result = tuple(_expandaliases(aliases, t, expanding, cache)
2031 2035 for t in tree)
2032 2036 return result
2033 2037
2034 2038 def findaliases(ui, tree):
2035 2039 _checkaliasarg(tree)
2036 2040 aliases = {}
2037 2041 for k, v in ui.configitems('revsetalias'):
2038 2042 alias = revsetalias(k, v)
2039 2043 aliases[alias.name] = alias
2040 2044 return _expandaliases(aliases, tree, [], {})
2041 2045
2042 2046 def parse(spec, lookup=None):
2043 2047 p = parser.parser(tokenize, elements)
2044 2048 return p.parse(spec, lookup=lookup)
2045 2049
2046 2050 def match(ui, spec, repo=None):
2047 2051 if not spec:
2048 2052 raise error.ParseError(_("empty query"))
2049 2053 lookup = None
2050 2054 if repo:
2051 2055 lookup = repo.__contains__
2052 2056 tree, pos = parse(spec, lookup)
2053 2057 if (pos != len(spec)):
2054 2058 raise error.ParseError(_("invalid token"), pos)
2055 2059 if ui:
2056 2060 tree = findaliases(ui, tree)
2057 2061 weight, tree = optimize(tree, True)
2058 2062 def mfunc(repo, subset):
2059 2063 if util.safehasattr(subset, 'set'):
2060 2064 return getset(repo, subset, tree)
2061 2065 return getset(repo, baseset(subset), tree)
2062 2066 return mfunc
2063 2067
2064 2068 def formatspec(expr, *args):
2065 2069 '''
2066 2070 This is a convenience function for using revsets internally, and
2067 2071 escapes arguments appropriately. Aliases are intentionally ignored
2068 2072 so that intended expression behavior isn't accidentally subverted.
2069 2073
2070 2074 Supported arguments:
2071 2075
2072 2076 %r = revset expression, parenthesized
2073 2077 %d = int(arg), no quoting
2074 2078 %s = string(arg), escaped and single-quoted
2075 2079 %b = arg.branch(), escaped and single-quoted
2076 2080 %n = hex(arg), single-quoted
2077 2081 %% = a literal '%'
2078 2082
2079 2083 Prefixing the type with 'l' specifies a parenthesized list of that type.
2080 2084
2081 2085 >>> formatspec('%r:: and %lr', '10 or 11', ("this()", "that()"))
2082 2086 '(10 or 11):: and ((this()) or (that()))'
2083 2087 >>> formatspec('%d:: and not %d::', 10, 20)
2084 2088 '10:: and not 20::'
2085 2089 >>> formatspec('%ld or %ld', [], [1])
2086 2090 "_list('') or 1"
2087 2091 >>> formatspec('keyword(%s)', 'foo\\xe9')
2088 2092 "keyword('foo\\\\xe9')"
2089 2093 >>> b = lambda: 'default'
2090 2094 >>> b.branch = b
2091 2095 >>> formatspec('branch(%b)', b)
2092 2096 "branch('default')"
2093 2097 >>> formatspec('root(%ls)', ['a', 'b', 'c', 'd'])
2094 2098 "root(_list('a\\x00b\\x00c\\x00d'))"
2095 2099 '''
2096 2100
2097 2101 def quote(s):
2098 2102 return repr(str(s))
2099 2103
2100 2104 def argtype(c, arg):
2101 2105 if c == 'd':
2102 2106 return str(int(arg))
2103 2107 elif c == 's':
2104 2108 return quote(arg)
2105 2109 elif c == 'r':
2106 2110 parse(arg) # make sure syntax errors are confined
2107 2111 return '(%s)' % arg
2108 2112 elif c == 'n':
2109 2113 return quote(node.hex(arg))
2110 2114 elif c == 'b':
2111 2115 return quote(arg.branch())
2112 2116
2113 2117 def listexp(s, t):
2114 2118 l = len(s)
2115 2119 if l == 0:
2116 2120 return "_list('')"
2117 2121 elif l == 1:
2118 2122 return argtype(t, s[0])
2119 2123 elif t == 'd':
2120 2124 return "_intlist('%s')" % "\0".join(str(int(a)) for a in s)
2121 2125 elif t == 's':
2122 2126 return "_list('%s')" % "\0".join(s)
2123 2127 elif t == 'n':
2124 2128 return "_hexlist('%s')" % "\0".join(node.hex(a) for a in s)
2125 2129 elif t == 'b':
2126 2130 return "_list('%s')" % "\0".join(a.branch() for a in s)
2127 2131
2128 2132 m = l // 2
2129 2133 return '(%s or %s)' % (listexp(s[:m], t), listexp(s[m:], t))
2130 2134
2131 2135 ret = ''
2132 2136 pos = 0
2133 2137 arg = 0
2134 2138 while pos < len(expr):
2135 2139 c = expr[pos]
2136 2140 if c == '%':
2137 2141 pos += 1
2138 2142 d = expr[pos]
2139 2143 if d == '%':
2140 2144 ret += d
2141 2145 elif d in 'dsnbr':
2142 2146 ret += argtype(d, args[arg])
2143 2147 arg += 1
2144 2148 elif d == 'l':
2145 2149 # a list of some type
2146 2150 pos += 1
2147 2151 d = expr[pos]
2148 2152 ret += listexp(list(args[arg]), d)
2149 2153 arg += 1
2150 2154 else:
2151 2155 raise util.Abort('unexpected revspec format character %s' % d)
2152 2156 else:
2153 2157 ret += c
2154 2158 pos += 1
2155 2159
2156 2160 return ret
2157 2161
2158 2162 def prettyformat(tree):
2159 2163 def _prettyformat(tree, level, lines):
2160 2164 if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
2161 2165 lines.append((level, str(tree)))
2162 2166 else:
2163 2167 lines.append((level, '(%s' % tree[0]))
2164 2168 for s in tree[1:]:
2165 2169 _prettyformat(s, level + 1, lines)
2166 2170 lines[-1:] = [(lines[-1][0], lines[-1][1] + ')')]
2167 2171
2168 2172 lines = []
2169 2173 _prettyformat(tree, 0, lines)
2170 2174 output = '\n'.join((' '*l + s) for l, s in lines)
2171 2175 return output
2172 2176
2173 2177 def depth(tree):
2174 2178 if isinstance(tree, tuple):
2175 2179 return max(map(depth, tree)) + 1
2176 2180 else:
2177 2181 return 0
2178 2182
2179 2183 def funcsused(tree):
2180 2184 if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
2181 2185 return set()
2182 2186 else:
2183 2187 funcs = set()
2184 2188 for s in tree[1:]:
2185 2189 funcs |= funcsused(s)
2186 2190 if tree[0] == 'func':
2187 2191 funcs.add(tree[1][1])
2188 2192 return funcs
2189 2193
2190 2194 class baseset(list):
2191 2195 """Basic data structure that represents a revset and contains the basic
2192 2196 operation that it should be able to perform.
2193 2197
2194 2198 Every method in this class should be implemented by any smartset class.
2195 2199 """
2196 2200 def __init__(self, data=()):
2197 2201 super(baseset, self).__init__(data)
2198 2202 self._set = None
2199 2203
2200 2204 def ascending(self):
2201 2205 """Sorts the set in ascending order (in place).
2202 2206
2203 2207 This is part of the mandatory API for smartset."""
2204 2208 self.sort()
2205 2209
2206 2210 def descending(self):
2207 2211 """Sorts the set in descending order (in place).
2208 2212
2209 2213 This is part of the mandatory API for smartset."""
2210 2214 self.sort(reverse=True)
2211 2215
2212 2216 def min(self):
2213 2217 return min(self)
2214 2218
2215 2219 def max(self):
2216 2220 return max(self)
2217 2221
2218 2222 def set(self):
2219 2223 """Returns a set or a smartset containing all the elements.
2220 2224
2221 2225 The returned structure should be the fastest option for membership
2222 2226 testing.
2223 2227
2224 2228 This is part of the mandatory API for smartset."""
2225 2229 if not self._set:
2226 2230 self._set = set(self)
2227 2231 return self._set
2228 2232
2229 2233 def __sub__(self, other):
2230 2234 """Returns a new object with the substraction of the two collections.
2231 2235
2232 2236 This is part of the mandatory API for smartset."""
2233 2237 if isinstance(other, baseset):
2234 2238 s = other.set()
2235 2239 else:
2236 2240 s = set(other)
2237 2241 return baseset(self.set() - s)
2238 2242
2239 2243 def __and__(self, other):
2240 2244 """Returns a new object with the intersection of the two collections.
2241 2245
2242 2246 This is part of the mandatory API for smartset."""
2243 2247 if isinstance(other, baseset):
2244 2248 other = other.set()
2245 2249 return baseset([y for y in self if y in other])
2246 2250
2247 2251 def __add__(self, other):
2248 2252 """Returns a new object with the union of the two collections.
2249 2253
2250 2254 This is part of the mandatory API for smartset."""
2251 2255 s = self.set()
2252 2256 l = [r for r in other if r not in s]
2253 2257 return baseset(list(self) + l)
2254 2258
2255 2259 def isascending(self):
2256 2260 """Returns True if the collection is ascending order, False if not.
2257 2261
2258 2262 This is part of the mandatory API for smartset."""
2259 2263 return False
2260 2264
2261 2265 def isdescending(self):
2262 2266 """Returns True if the collection is descending order, False if not.
2263 2267
2264 2268 This is part of the mandatory API for smartset."""
2265 2269 return False
2266 2270
2267 2271 def filter(self, condition):
2268 2272 """Returns this smartset filtered by condition as a new smartset.
2269 2273
2270 2274 `condition` is a callable which takes a revision number and returns a
2271 2275 boolean.
2272 2276
2273 2277 This is part of the mandatory API for smartset."""
2274 2278 return lazyset(self, condition)
2275 2279
2276 2280 class _orderedsetmixin(object):
2277 2281 """Mixin class with utility methods for smartsets
2278 2282
2279 2283 This should be extended by smartsets which have the isascending(),
2280 2284 isdescending() and reverse() methods"""
2281 2285
2282 2286 def _first(self):
2283 2287 """return the first revision in the set"""
2284 2288 for r in self:
2285 2289 return r
2286 2290 return None
2287 2291
2288 2292 def _last(self):
2289 2293 """return the last revision in the set"""
2290 2294 self.reverse()
2291 2295 m = self._first()
2292 2296 self.reverse()
2293 2297 return m
2294 2298
2295 2299 def min(self):
2296 2300 """return the smallest element in the set"""
2297 2301 if self.isascending():
2298 2302 return self._first()
2299 2303 return self._last()
2300 2304
2301 2305 def max(self):
2302 2306 """return the largest element in the set"""
2303 2307 if self.isascending():
2304 2308 return self._last()
2305 2309 return self._first()
2306 2310
2307 2311 class lazyset(object):
2308 2312 """Duck type for baseset class which iterates lazily over the revisions in
2309 2313 the subset and contains a function which tests for membership in the
2310 2314 revset
2311 2315 """
2312 2316 def __init__(self, subset, condition=lambda x: True):
2313 2317 """
2314 2318 condition: a function that decide whether a revision in the subset
2315 2319 belongs to the revset or not.
2316 2320 """
2317 2321 self._subset = subset
2318 2322 self._condition = condition
2319 2323 self._cache = {}
2320 2324
2321 2325 def ascending(self):
2322 2326 self._subset.sort()
2323 2327
2324 2328 def descending(self):
2325 2329 self._subset.sort(reverse=True)
2326 2330
2327 2331 def min(self):
2328 2332 return min(self)
2329 2333
2330 2334 def max(self):
2331 2335 return max(self)
2332 2336
2333 2337 def __contains__(self, x):
2334 2338 c = self._cache
2335 2339 if x not in c:
2336 2340 c[x] = x in self._subset and self._condition(x)
2337 2341 return c[x]
2338 2342
2339 2343 def __iter__(self):
2340 2344 cond = self._condition
2341 2345 for x in self._subset:
2342 2346 if cond(x):
2343 2347 yield x
2344 2348
2345 2349 def __and__(self, x):
2346 2350 return lazyset(self, lambda r: r in x)
2347 2351
2348 2352 def __sub__(self, x):
2349 2353 return lazyset(self, lambda r: r not in x)
2350 2354
2351 2355 def __add__(self, x):
2352 2356 return _addset(self, x)
2353 2357
2354 2358 def __nonzero__(self):
2355 2359 for r in self:
2356 2360 return True
2357 2361 return False
2358 2362
2359 2363 def __len__(self):
2360 2364 # Basic implementation to be changed in future patches.
2361 2365 l = baseset([r for r in self])
2362 2366 return len(l)
2363 2367
2364 2368 def __getitem__(self, x):
2365 2369 # Basic implementation to be changed in future patches.
2366 2370 l = baseset([r for r in self])
2367 2371 return l[x]
2368 2372
2369 2373 def sort(self, reverse=False):
2370 2374 if not util.safehasattr(self._subset, 'sort'):
2371 2375 self._subset = baseset(self._subset)
2372 2376 self._subset.sort(reverse=reverse)
2373 2377
2374 2378 def reverse(self):
2375 2379 self._subset.reverse()
2376 2380
2377 2381 def set(self):
2378 2382 return set([r for r in self])
2379 2383
2380 2384 def isascending(self):
2381 2385 return False
2382 2386
2383 2387 def isdescending(self):
2384 2388 return False
2385 2389
2386 2390 def filter(self, l):
2387 2391 return lazyset(self, l)
2388 2392
2389 2393 class orderedlazyset(_orderedsetmixin, lazyset):
2390 2394 """Subclass of lazyset which subset can be ordered either ascending or
2391 2395 descendingly
2392 2396 """
2393 2397 def __init__(self, subset, condition, ascending=True):
2394 2398 super(orderedlazyset, self).__init__(subset, condition)
2395 2399 self._ascending = ascending
2396 2400
2397 2401 def filter(self, l):
2398 2402 return orderedlazyset(self, l, ascending=self._ascending)
2399 2403
2400 2404 def ascending(self):
2401 2405 if not self._ascending:
2402 2406 self.reverse()
2403 2407
2404 2408 def descending(self):
2405 2409 if self._ascending:
2406 2410 self.reverse()
2407 2411
2408 2412 def __and__(self, x):
2409 2413 return orderedlazyset(self, lambda r: r in x,
2410 2414 ascending=self._ascending)
2411 2415
2412 2416 def __sub__(self, x):
2413 2417 return orderedlazyset(self, lambda r: r not in x,
2414 2418 ascending=self._ascending)
2415 2419
2416 2420 def __add__(self, x):
2417 2421 kwargs = {}
2418 2422 if self.isascending() and x.isascending():
2419 2423 kwargs['ascending'] = True
2420 2424 if self.isdescending() and x.isdescending():
2421 2425 kwargs['ascending'] = False
2422 2426 return _addset(self, x, **kwargs)
2423 2427
2424 2428 def sort(self, reverse=False):
2425 2429 if reverse:
2426 2430 if self._ascending:
2427 2431 self._subset.sort(reverse=reverse)
2428 2432 else:
2429 2433 if not self._ascending:
2430 2434 self._subset.sort(reverse=reverse)
2431 2435 self._ascending = not reverse
2432 2436
2433 2437 def isascending(self):
2434 2438 return self._ascending
2435 2439
2436 2440 def isdescending(self):
2437 2441 return not self._ascending
2438 2442
2439 2443 def reverse(self):
2440 2444 self._subset.reverse()
2441 2445 self._ascending = not self._ascending
2442 2446
2443 2447 class _addset(_orderedsetmixin):
2444 2448 """Represent the addition of two sets
2445 2449
2446 2450 Wrapper structure for lazily adding two structures without losing much
2447 2451 performance on the __contains__ method
2448 2452
2449 2453 If the ascending attribute is set, that means the two structures are
2450 2454 ordered in either an ascending or descending way. Therefore, we can add
2451 2455 them mantaining the order by iterating over both at the same time
2452 2456
2453 2457 This class does not duck-type baseset and it's only supposed to be used
2454 2458 internally
2455 2459 """
2456 2460 def __init__(self, revs1, revs2, ascending=None):
2457 2461 self._r1 = revs1
2458 2462 self._r2 = revs2
2459 2463 self._iter = None
2460 2464 self._ascending = ascending
2461 2465 self._genlist = None
2462 2466
2463 2467 @util.propertycache
2464 2468 def _list(self):
2465 2469 if not self._genlist:
2466 2470 self._genlist = baseset(self._iterator())
2467 2471 return self._genlist
2468 2472
2469 2473 def filter(self, condition):
2470 2474 if self._ascending is not None:
2471 2475 return orderedlazyset(self, condition, ascending=self._ascending)
2472 2476 return lazyset(self, condition)
2473 2477
2474 2478 def ascending(self):
2475 2479 if self._ascending is None:
2476 2480 self.sort()
2477 2481 self._ascending = True
2478 2482 else:
2479 2483 if not self._ascending:
2480 2484 self.reverse()
2481 2485
2482 2486 def descending(self):
2483 2487 if self._ascending is None:
2484 2488 self.sort(reverse=True)
2485 2489 self._ascending = False
2486 2490 else:
2487 2491 if self._ascending:
2488 2492 self.reverse()
2489 2493
2490 2494 def __and__(self, other):
2491 2495 filterfunc = other.__contains__
2492 2496 if self._ascending is not None:
2493 2497 return orderedlazyset(self, filterfunc, ascending=self._ascending)
2494 2498 return lazyset(self, filterfunc)
2495 2499
2496 2500 def __sub__(self, other):
2497 2501 filterfunc = lambda r: r not in other
2498 2502 if self._ascending is not None:
2499 2503 return orderedlazyset(self, filterfunc, ascending=self._ascending)
2500 2504 return lazyset(self, filterfunc)
2501 2505
2502 2506 def __add__(self, other):
2503 2507 """When both collections are ascending or descending, preserve the order
2504 2508 """
2505 2509 kwargs = {}
2506 2510 if self._ascending is not None:
2507 2511 if self.isascending() and other.isascending():
2508 2512 kwargs['ascending'] = True
2509 2513 if self.isdescending() and other.isdescending():
2510 2514 kwargs['ascending'] = False
2511 2515 return _addset(self, other, **kwargs)
2512 2516
2513 2517 def _iterator(self):
2514 2518 """Iterate over both collections without repeating elements
2515 2519
2516 2520 If the ascending attribute is not set, iterate over the first one and
2517 2521 then over the second one checking for membership on the first one so we
2518 2522 dont yield any duplicates.
2519 2523
2520 2524 If the ascending attribute is set, iterate over both collections at the
2521 2525 same time, yielding only one value at a time in the given order.
2522 2526 """
2523 2527 if not self._iter:
2524 2528 def gen():
2525 2529 if self._ascending is None:
2526 2530 for r in self._r1:
2527 2531 yield r
2528 2532 s = self._r1.set()
2529 2533 for r in self._r2:
2530 2534 if r not in s:
2531 2535 yield r
2532 2536 else:
2533 2537 iter1 = iter(self._r1)
2534 2538 iter2 = iter(self._r2)
2535 2539
2536 2540 val1 = None
2537 2541 val2 = None
2538 2542
2539 2543 choice = max
2540 2544 if self._ascending:
2541 2545 choice = min
2542 2546 try:
2543 2547 # Consume both iterators in an ordered way until one is
2544 2548 # empty
2545 2549 while True:
2546 2550 if val1 is None:
2547 2551 val1 = iter1.next()
2548 2552 if val2 is None:
2549 2553 val2 = iter2.next()
2550 2554 next = choice(val1, val2)
2551 2555 yield next
2552 2556 if val1 == next:
2553 2557 val1 = None
2554 2558 if val2 == next:
2555 2559 val2 = None
2556 2560 except StopIteration:
2557 2561 # Flush any remaining values and consume the other one
2558 2562 it = iter2
2559 2563 if val1 is not None:
2560 2564 yield val1
2561 2565 it = iter1
2562 2566 elif val2 is not None:
2563 2567 # might have been equality and both are empty
2564 2568 yield val2
2565 2569 for val in it:
2566 2570 yield val
2567 2571
2568 2572 self._iter = _generatorset(gen())
2569 2573
2570 2574 return self._iter
2571 2575
2572 2576 def __iter__(self):
2573 2577 if self._genlist:
2574 2578 return iter(self._genlist)
2575 2579 return iter(self._iterator())
2576 2580
2577 2581 def __contains__(self, x):
2578 2582 return x in self._r1 or x in self._r2
2579 2583
2580 2584 def set(self):
2581 2585 return self
2582 2586
2583 2587 def sort(self, reverse=False):
2584 2588 """Sort the added set
2585 2589
2586 2590 For this we use the cached list with all the generated values and if we
2587 2591 know they are ascending or descending we can sort them in a smart way.
2588 2592 """
2589 2593 if self._ascending is None:
2590 2594 self._list.sort(reverse=reverse)
2591 2595 self._ascending = not reverse
2592 2596 else:
2593 2597 if bool(self._ascending) == bool(reverse):
2594 2598 self.reverse()
2595 2599
2596 2600 def isascending(self):
2597 2601 return self._ascending is not None and self._ascending
2598 2602
2599 2603 def isdescending(self):
2600 2604 return self._ascending is not None and not self._ascending
2601 2605
2602 2606 def reverse(self):
2603 2607 self._list.reverse()
2604 2608 if self._ascending is not None:
2605 2609 self._ascending = not self._ascending
2606 2610
2607 2611 class _generatorset(object):
2608 2612 """Wrap a generator for lazy iteration
2609 2613
2610 2614 Wrapper structure for generators that provides lazy membership and can
2611 2615 be iterated more than once.
2612 2616 When asked for membership it generates values until either it finds the
2613 2617 requested one or has gone through all the elements in the generator
2614 2618
2615 2619 This class does not duck-type baseset and it's only supposed to be used
2616 2620 internally
2617 2621 """
2618 2622 def __init__(self, gen):
2619 2623 """
2620 2624 gen: a generator producing the values for the generatorset.
2621 2625 """
2622 2626 self._gen = gen
2623 2627 self._iter = iter(gen)
2624 2628 self._cache = {}
2625 2629 self._genlist = baseset([])
2626 2630 self._iterated = False
2627 2631 self._finished = False
2628 2632
2629 2633 def __contains__(self, x):
2630 2634 if x in self._cache:
2631 2635 return self._cache[x]
2632 2636
2633 2637 # Use new values only, as existing values would be cached.
2634 2638 for l in self._consumegen():
2635 2639 if l == x:
2636 2640 return True
2637 2641
2638 2642 self._finished = True
2639 2643 self._cache[x] = False
2640 2644 return False
2641 2645
2642 2646 def __iter__(self):
2643 2647 if self._iterated:
2644 2648 # At least a part of the list should be cached if iteration has
2645 2649 # started over the generatorset.
2646 2650 for l in self._genlist:
2647 2651 yield l
2648 2652
2649 2653 for item in self._consumegen():
2650 2654 yield item
2651 2655
2652 2656 def _consumegen(self):
2653 2657 self._iterated = True
2654 2658
2655 2659 for item in self._gen:
2656 2660 self._cache[item] = True
2657 2661 self._genlist.append(item)
2658 2662 yield item
2659 2663
2660 2664 self._finished = True
2661 2665
2662 2666 def set(self):
2663 2667 return self
2664 2668
2665 2669 def sort(self, reverse=False):
2666 2670 if not self._finished:
2667 2671 for i in self:
2668 2672 continue
2669 2673 self._genlist.sort(reverse=reverse)
2670 2674
2671 2675 class _ascgeneratorset(_generatorset):
2672 2676 """Wrap a generator of ascending elements for lazy iteration
2673 2677
2674 2678 Same structure as _generatorset but stops iterating after it goes past
2675 2679 the value when asked for membership and the element is not contained
2676 2680
2677 2681 This class does not duck-type baseset and it's only supposed to be used
2678 2682 internally
2679 2683 """
2680 2684 def __contains__(self, x):
2681 2685 if x in self._cache:
2682 2686 return self._cache[x]
2683 2687
2684 2688 # Use new values only, as existing values would be cached.
2685 2689 for l in self._consumegen():
2686 2690 if l == x:
2687 2691 return True
2688 2692 if l > x:
2689 2693 break
2690 2694
2691 2695 self._cache[x] = False
2692 2696 return False
2693 2697
2694 2698 class _descgeneratorset(_generatorset):
2695 2699 """Wrap a generator of descending elements for lazy iteration
2696 2700
2697 2701 Same structure as _generatorset but stops iterating after it goes past
2698 2702 the value when asked for membership and the element is not contained
2699 2703
2700 2704 This class does not duck-type baseset and it's only supposed to be used
2701 2705 internally
2702 2706 """
2703 2707 def __contains__(self, x):
2704 2708 if x in self._cache:
2705 2709 return self._cache[x]
2706 2710
2707 2711 # Use new values only, as existing values would be cached.
2708 2712 for l in self._consumegen():
2709 2713 if l == x:
2710 2714 return True
2711 2715 if l < x:
2712 2716 break
2713 2717
2714 2718 self._cache[x] = False
2715 2719 return False
2716 2720
2717 2721 class spanset(_orderedsetmixin):
2718 2722 """Duck type for baseset class which represents a range of revisions and
2719 2723 can work lazily and without having all the range in memory
2720 2724
2721 2725 Note that spanset(x, y) behave almost like xrange(x, y) except for two
2722 2726 notable points:
2723 2727 - when x < y it will be automatically descending,
2724 2728 - revision filtered with this repoview will be skipped.
2725 2729
2726 2730 """
2727 2731 def __init__(self, repo, start=0, end=None):
2728 2732 """
2729 2733 start: first revision included the set
2730 2734 (default to 0)
2731 2735 end: first revision excluded (last+1)
2732 2736 (default to len(repo)
2733 2737
2734 2738 Spanset will be descending if `end` < `start`.
2735 2739 """
2736 2740 self._start = start
2737 2741 if end is not None:
2738 2742 self._end = end
2739 2743 else:
2740 2744 self._end = len(repo)
2741 2745 self._hiddenrevs = repo.changelog.filteredrevs
2742 2746
2743 2747 def ascending(self):
2744 2748 if self._start > self._end:
2745 2749 self.reverse()
2746 2750
2747 2751 def descending(self):
2748 2752 if self._start < self._end:
2749 2753 self.reverse()
2750 2754
2751 2755 def _contained(self, rev):
2752 2756 return (rev <= self._start and rev > self._end) or (rev >= self._start
2753 2757 and rev < self._end)
2754 2758
2755 2759 def __iter__(self):
2756 2760 if self._start <= self._end:
2757 2761 iterrange = xrange(self._start, self._end)
2758 2762 else:
2759 2763 iterrange = xrange(self._start, self._end, -1)
2760 2764
2761 2765 if self._hiddenrevs:
2762 2766 s = self._hiddenrevs
2763 2767 for r in iterrange:
2764 2768 if r not in s:
2765 2769 yield r
2766 2770 else:
2767 2771 for r in iterrange:
2768 2772 yield r
2769 2773
2770 2774 def __contains__(self, x):
2771 2775 return self._contained(x) and not (self._hiddenrevs and rev in
2772 2776 self._hiddenrevs)
2773 2777
2774 2778 def __nonzero__(self):
2775 2779 for r in self:
2776 2780 return True
2777 2781 return False
2778 2782
2779 2783 def __and__(self, x):
2780 2784 if isinstance(x, baseset):
2781 2785 x = x.set()
2782 2786 if self._start <= self._end:
2783 2787 return orderedlazyset(self, lambda r: r in x)
2784 2788 else:
2785 2789 return orderedlazyset(self, lambda r: r in x, ascending=False)
2786 2790
2787 2791 def __sub__(self, x):
2788 2792 if isinstance(x, baseset):
2789 2793 x = x.set()
2790 2794 if self._start <= self._end:
2791 2795 return orderedlazyset(self, lambda r: r not in x)
2792 2796 else:
2793 2797 return orderedlazyset(self, lambda r: r not in x, ascending=False)
2794 2798
2795 2799 def __add__(self, x):
2796 2800 kwargs = {}
2797 2801 if self.isascending() and x.isascending():
2798 2802 kwargs['ascending'] = True
2799 2803 if self.isdescending() and x.isdescending():
2800 2804 kwargs['ascending'] = False
2801 2805 return _addset(self, x, **kwargs)
2802 2806
2803 2807 def __len__(self):
2804 2808 if not self._hiddenrevs:
2805 2809 return abs(self._end - self._start)
2806 2810 else:
2807 2811 count = 0
2808 2812 for rev in self._hiddenrevs:
2809 2813 if self._contained(rev):
2810 2814 count += 1
2811 2815 return abs(self._end - self._start) - count
2812 2816
2813 2817 def __getitem__(self, x):
2814 2818 # Basic implementation to be changed in future patches.
2815 2819 l = baseset([r for r in self])
2816 2820 return l[x]
2817 2821
2818 2822 def sort(self, reverse=False):
2819 2823 if bool(reverse) != (self._start > self._end):
2820 2824 self.reverse()
2821 2825
2822 2826 def reverse(self):
2823 2827 # Just switch the _start and _end parameters
2824 2828 if self._start <= self._end:
2825 2829 self._start, self._end = self._end - 1, self._start - 1
2826 2830 else:
2827 2831 self._start, self._end = self._end + 1, self._start + 1
2828 2832
2829 2833 def set(self):
2830 2834 return self
2831 2835
2832 2836 def isascending(self):
2833 2837 return self._start < self._end
2834 2838
2835 2839 def isdescending(self):
2836 2840 return self._start > self._end
2837 2841
2838 2842 def filter(self, l):
2839 2843 if self._start <= self._end:
2840 2844 return orderedlazyset(self, l)
2841 2845 else:
2842 2846 return orderedlazyset(self, l, ascending=False)
2843 2847
2844 2848 # tell hggettext to extract docstrings from these functions:
2845 2849 i18nfunctions = symbols.values()
@@ -1,623 +1,637 b''
1 1 Prepare repo a:
2 2
3 3 $ hg init a
4 4 $ cd a
5 5 $ echo a > a
6 6 $ hg add a
7 7 $ hg commit -m test
8 8 $ echo first line > b
9 9 $ hg add b
10 10
11 11 Create a non-inlined filelog:
12 12
13 13 $ python -c 'file("data1", "wb").write("".join("%s\n" % x for x in range(10000)))'
14 14 $ for j in 0 1 2 3 4 5 6 7 8 9; do
15 15 > cat data1 >> b
16 16 > hg commit -m test
17 17 > done
18 18
19 19 List files in store/data (should show a 'b.d'):
20 20
21 21 $ for i in .hg/store/data/*; do
22 22 > echo $i
23 23 > done
24 24 .hg/store/data/a.i
25 25 .hg/store/data/b.d
26 26 .hg/store/data/b.i
27 27
28 28 Default operation:
29 29
30 30 $ hg clone . ../b
31 31 updating to branch default
32 32 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
33 33 $ cd ../b
34 34 $ cat a
35 35 a
36 36 $ hg verify
37 37 checking changesets
38 38 checking manifests
39 39 crosschecking files in changesets and manifests
40 40 checking files
41 41 2 files, 11 changesets, 11 total revisions
42 42
43 43 Invalid dest '' must abort:
44 44
45 45 $ hg clone . ''
46 46 abort: empty destination path is not valid
47 47 [255]
48 48
49 49 No update, with debug option:
50 50
51 51 #if hardlink
52 52 $ hg --debug clone -U . ../c
53 53 linked 8 files
54 54 listing keys for "bookmarks"
55 55 #else
56 56 $ hg --debug clone -U . ../c
57 57 copied 8 files
58 58 listing keys for "bookmarks"
59 59 #endif
60 60 $ cd ../c
61 61 $ cat a 2>/dev/null || echo "a not present"
62 62 a not present
63 63 $ hg verify
64 64 checking changesets
65 65 checking manifests
66 66 crosschecking files in changesets and manifests
67 67 checking files
68 68 2 files, 11 changesets, 11 total revisions
69 69
70 70 Default destination:
71 71
72 72 $ mkdir ../d
73 73 $ cd ../d
74 74 $ hg clone ../a
75 75 destination directory: a
76 76 updating to branch default
77 77 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
78 78 $ cd a
79 79 $ hg cat a
80 80 a
81 81 $ cd ../..
82 82
83 83 Check that we drop the 'file:' from the path before writing the .hgrc:
84 84
85 85 $ hg clone file:a e
86 86 updating to branch default
87 87 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
88 88 $ grep 'file:' e/.hg/hgrc
89 89 [1]
90 90
91 91 Check that path aliases are expanded:
92 92
93 93 $ hg clone -q -U --config 'paths.foobar=a#0' foobar f
94 94 $ hg -R f showconfig paths.default
95 95 $TESTTMP/a#0 (glob)
96 96
97 97 Use --pull:
98 98
99 99 $ hg clone --pull a g
100 100 requesting all changes
101 101 adding changesets
102 102 adding manifests
103 103 adding file changes
104 104 added 11 changesets with 11 changes to 2 files
105 105 updating to branch default
106 106 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
107 107 $ hg -R g verify
108 108 checking changesets
109 109 checking manifests
110 110 crosschecking files in changesets and manifests
111 111 checking files
112 112 2 files, 11 changesets, 11 total revisions
113 113
114 114 Invalid dest '' with --pull must abort (issue2528):
115 115
116 116 $ hg clone --pull a ''
117 117 abort: empty destination path is not valid
118 118 [255]
119 119
120 120 Clone to '.':
121 121
122 122 $ mkdir h
123 123 $ cd h
124 124 $ hg clone ../a .
125 125 updating to branch default
126 126 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
127 127 $ cd ..
128 128
129 129
130 130 *** Tests for option -u ***
131 131
132 132 Adding some more history to repo a:
133 133
134 134 $ cd a
135 135 $ hg tag ref1
136 136 $ echo the quick brown fox >a
137 137 $ hg ci -m "hacked default"
138 138 $ hg up ref1
139 139 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
140 140 $ hg branch stable
141 141 marked working directory as branch stable
142 142 (branches are permanent and global, did you want a bookmark?)
143 143 $ echo some text >a
144 144 $ hg ci -m "starting branch stable"
145 145 $ hg tag ref2
146 146 $ echo some more text >a
147 147 $ hg ci -m "another change for branch stable"
148 148 $ hg up ref2
149 149 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
150 150 $ hg parents
151 151 changeset: 13:e8ece76546a6
152 152 branch: stable
153 153 tag: ref2
154 154 parent: 10:a7949464abda
155 155 user: test
156 156 date: Thu Jan 01 00:00:00 1970 +0000
157 157 summary: starting branch stable
158 158
159 159
160 160 Repo a has two heads:
161 161
162 162 $ hg heads
163 163 changeset: 15:0aae7cf88f0d
164 164 branch: stable
165 165 tag: tip
166 166 user: test
167 167 date: Thu Jan 01 00:00:00 1970 +0000
168 168 summary: another change for branch stable
169 169
170 170 changeset: 12:f21241060d6a
171 171 user: test
172 172 date: Thu Jan 01 00:00:00 1970 +0000
173 173 summary: hacked default
174 174
175 175
176 176 $ cd ..
177 177
178 178
179 179 Testing --noupdate with --updaterev (must abort):
180 180
181 181 $ hg clone --noupdate --updaterev 1 a ua
182 182 abort: cannot specify both --noupdate and --updaterev
183 183 [255]
184 184
185 185
186 186 Testing clone -u:
187 187
188 188 $ hg clone -u . a ua
189 189 updating to branch stable
190 190 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
191 191
192 192 Repo ua has both heads:
193 193
194 194 $ hg -R ua heads
195 195 changeset: 15:0aae7cf88f0d
196 196 branch: stable
197 197 tag: tip
198 198 user: test
199 199 date: Thu Jan 01 00:00:00 1970 +0000
200 200 summary: another change for branch stable
201 201
202 202 changeset: 12:f21241060d6a
203 203 user: test
204 204 date: Thu Jan 01 00:00:00 1970 +0000
205 205 summary: hacked default
206 206
207 207
208 208 Same revision checked out in repo a and ua:
209 209
210 210 $ hg -R a parents --template "{node|short}\n"
211 211 e8ece76546a6
212 212 $ hg -R ua parents --template "{node|short}\n"
213 213 e8ece76546a6
214 214
215 215 $ rm -r ua
216 216
217 217
218 218 Testing clone --pull -u:
219 219
220 220 $ hg clone --pull -u . a ua
221 221 requesting all changes
222 222 adding changesets
223 223 adding manifests
224 224 adding file changes
225 225 added 16 changesets with 16 changes to 3 files (+1 heads)
226 226 updating to branch stable
227 227 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
228 228
229 229 Repo ua has both heads:
230 230
231 231 $ hg -R ua heads
232 232 changeset: 15:0aae7cf88f0d
233 233 branch: stable
234 234 tag: tip
235 235 user: test
236 236 date: Thu Jan 01 00:00:00 1970 +0000
237 237 summary: another change for branch stable
238 238
239 239 changeset: 12:f21241060d6a
240 240 user: test
241 241 date: Thu Jan 01 00:00:00 1970 +0000
242 242 summary: hacked default
243 243
244 244
245 245 Same revision checked out in repo a and ua:
246 246
247 247 $ hg -R a parents --template "{node|short}\n"
248 248 e8ece76546a6
249 249 $ hg -R ua parents --template "{node|short}\n"
250 250 e8ece76546a6
251 251
252 252 $ rm -r ua
253 253
254 254
255 255 Testing clone -u <branch>:
256 256
257 257 $ hg clone -u stable a ua
258 258 updating to branch stable
259 259 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
260 260
261 261 Repo ua has both heads:
262 262
263 263 $ hg -R ua heads
264 264 changeset: 15:0aae7cf88f0d
265 265 branch: stable
266 266 tag: tip
267 267 user: test
268 268 date: Thu Jan 01 00:00:00 1970 +0000
269 269 summary: another change for branch stable
270 270
271 271 changeset: 12:f21241060d6a
272 272 user: test
273 273 date: Thu Jan 01 00:00:00 1970 +0000
274 274 summary: hacked default
275 275
276 276
277 277 Branch 'stable' is checked out:
278 278
279 279 $ hg -R ua parents
280 280 changeset: 15:0aae7cf88f0d
281 281 branch: stable
282 282 tag: tip
283 283 user: test
284 284 date: Thu Jan 01 00:00:00 1970 +0000
285 285 summary: another change for branch stable
286 286
287 287
288 288 $ rm -r ua
289 289
290 290
291 291 Testing default checkout:
292 292
293 293 $ hg clone a ua
294 294 updating to branch default
295 295 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
296 296
297 297 Repo ua has both heads:
298 298
299 299 $ hg -R ua heads
300 300 changeset: 15:0aae7cf88f0d
301 301 branch: stable
302 302 tag: tip
303 303 user: test
304 304 date: Thu Jan 01 00:00:00 1970 +0000
305 305 summary: another change for branch stable
306 306
307 307 changeset: 12:f21241060d6a
308 308 user: test
309 309 date: Thu Jan 01 00:00:00 1970 +0000
310 310 summary: hacked default
311 311
312 312
313 313 Branch 'default' is checked out:
314 314
315 315 $ hg -R ua parents
316 316 changeset: 12:f21241060d6a
317 317 user: test
318 318 date: Thu Jan 01 00:00:00 1970 +0000
319 319 summary: hacked default
320 320
321 321 Test clone with a branch named "@" (issue3677)
322 322
323 323 $ hg -R ua branch @
324 324 marked working directory as branch @
325 325 (branches are permanent and global, did you want a bookmark?)
326 326 $ hg -R ua commit -m 'created branch @'
327 327 $ hg clone ua atbranch
328 328 updating to branch default
329 329 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
330 330 $ hg -R atbranch heads
331 331 changeset: 16:798b6d97153e
332 332 branch: @
333 333 tag: tip
334 334 parent: 12:f21241060d6a
335 335 user: test
336 336 date: Thu Jan 01 00:00:00 1970 +0000
337 337 summary: created branch @
338 338
339 339 changeset: 15:0aae7cf88f0d
340 340 branch: stable
341 341 user: test
342 342 date: Thu Jan 01 00:00:00 1970 +0000
343 343 summary: another change for branch stable
344 344
345 345 changeset: 12:f21241060d6a
346 346 user: test
347 347 date: Thu Jan 01 00:00:00 1970 +0000
348 348 summary: hacked default
349 349
350 350 $ hg -R atbranch parents
351 351 changeset: 12:f21241060d6a
352 352 user: test
353 353 date: Thu Jan 01 00:00:00 1970 +0000
354 354 summary: hacked default
355 355
356 356
357 357 $ rm -r ua atbranch
358 358
359 359
360 360 Testing #<branch>:
361 361
362 362 $ hg clone -u . a#stable ua
363 363 adding changesets
364 364 adding manifests
365 365 adding file changes
366 366 added 14 changesets with 14 changes to 3 files
367 367 updating to branch stable
368 368 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
369 369
370 370 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
371 371
372 372 $ hg -R ua heads
373 373 changeset: 13:0aae7cf88f0d
374 374 branch: stable
375 375 tag: tip
376 376 user: test
377 377 date: Thu Jan 01 00:00:00 1970 +0000
378 378 summary: another change for branch stable
379 379
380 380 changeset: 10:a7949464abda
381 381 user: test
382 382 date: Thu Jan 01 00:00:00 1970 +0000
383 383 summary: test
384 384
385 385
386 386 Same revision checked out in repo a and ua:
387 387
388 388 $ hg -R a parents --template "{node|short}\n"
389 389 e8ece76546a6
390 390 $ hg -R ua parents --template "{node|short}\n"
391 391 e8ece76546a6
392 392
393 393 $ rm -r ua
394 394
395 395
396 396 Testing -u -r <branch>:
397 397
398 398 $ hg clone -u . -r stable a ua
399 399 adding changesets
400 400 adding manifests
401 401 adding file changes
402 402 added 14 changesets with 14 changes to 3 files
403 403 updating to branch stable
404 404 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
405 405
406 406 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
407 407
408 408 $ hg -R ua heads
409 409 changeset: 13:0aae7cf88f0d
410 410 branch: stable
411 411 tag: tip
412 412 user: test
413 413 date: Thu Jan 01 00:00:00 1970 +0000
414 414 summary: another change for branch stable
415 415
416 416 changeset: 10:a7949464abda
417 417 user: test
418 418 date: Thu Jan 01 00:00:00 1970 +0000
419 419 summary: test
420 420
421 421
422 422 Same revision checked out in repo a and ua:
423 423
424 424 $ hg -R a parents --template "{node|short}\n"
425 425 e8ece76546a6
426 426 $ hg -R ua parents --template "{node|short}\n"
427 427 e8ece76546a6
428 428
429 429 $ rm -r ua
430 430
431 431
432 432 Testing -r <branch>:
433 433
434 434 $ hg clone -r stable a ua
435 435 adding changesets
436 436 adding manifests
437 437 adding file changes
438 438 added 14 changesets with 14 changes to 3 files
439 439 updating to branch stable
440 440 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
441 441
442 442 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
443 443
444 444 $ hg -R ua heads
445 445 changeset: 13:0aae7cf88f0d
446 446 branch: stable
447 447 tag: tip
448 448 user: test
449 449 date: Thu Jan 01 00:00:00 1970 +0000
450 450 summary: another change for branch stable
451 451
452 452 changeset: 10:a7949464abda
453 453 user: test
454 454 date: Thu Jan 01 00:00:00 1970 +0000
455 455 summary: test
456 456
457 457
458 458 Branch 'stable' is checked out:
459 459
460 460 $ hg -R ua parents
461 461 changeset: 13:0aae7cf88f0d
462 462 branch: stable
463 463 tag: tip
464 464 user: test
465 465 date: Thu Jan 01 00:00:00 1970 +0000
466 466 summary: another change for branch stable
467 467
468 468
469 469 $ rm -r ua
470 470
471 471
472 472 Issue2267: Error in 1.6 hg.py: TypeError: 'NoneType' object is not
473 473 iterable in addbranchrevs()
474 474
475 475 $ cat <<EOF > simpleclone.py
476 476 > from mercurial import ui, hg
477 477 > myui = ui.ui()
478 478 > repo = hg.repository(myui, 'a')
479 479 > hg.clone(myui, {}, repo, dest="ua")
480 480 > EOF
481 481
482 482 $ python simpleclone.py
483 483 updating to branch default
484 484 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
485 485
486 486 $ rm -r ua
487 487
488 488 $ cat <<EOF > branchclone.py
489 489 > from mercurial import ui, hg, extensions
490 490 > myui = ui.ui()
491 491 > extensions.loadall(myui)
492 492 > repo = hg.repository(myui, 'a')
493 493 > hg.clone(myui, {}, repo, dest="ua", branch=["stable",])
494 494 > EOF
495 495
496 496 $ python branchclone.py
497 497 adding changesets
498 498 adding manifests
499 499 adding file changes
500 500 added 14 changesets with 14 changes to 3 files
501 501 updating to branch stable
502 502 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
503 503 $ rm -r ua
504 504
505 505
506 506 Test clone with special '@' bookmark:
507 507 $ cd a
508 508 $ hg bookmark -r a7949464abda @ # branch point of stable from default
509 509 $ hg clone . ../i
510 510 updating to bookmark @
511 511 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
512 512 $ hg id -i ../i
513 513 a7949464abda
514 514 $ rm -r ../i
515 515
516 516 $ hg bookmark -f -r stable @
517 517 $ hg bookmarks
518 518 @ 15:0aae7cf88f0d
519 519 $ hg clone . ../i
520 520 updating to bookmark @ on branch stable
521 521 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
522 522 $ hg id -i ../i
523 523 0aae7cf88f0d
524 524 $ cd "$TESTTMP"
525 525
526 526
527 527 Testing failures:
528 528
529 529 $ mkdir fail
530 530 $ cd fail
531 531
532 532 No local source
533 533
534 534 $ hg clone a b
535 535 abort: repository a not found!
536 536 [255]
537 537
538 538 No remote source
539 539
540 540 $ hg clone http://127.0.0.1:3121/a b
541 541 abort: error: *refused* (glob)
542 542 [255]
543 543 $ rm -rf b # work around bug with http clone
544 544
545 545
546 546 #if unix-permissions no-root
547 547
548 548 Inaccessible source
549 549
550 550 $ mkdir a
551 551 $ chmod 000 a
552 552 $ hg clone a b
553 553 abort: repository a not found!
554 554 [255]
555 555
556 556 Inaccessible destination
557 557
558 558 $ hg init b
559 559 $ cd b
560 560 $ hg clone . ../a
561 561 abort: Permission denied: '../a'
562 562 [255]
563 563 $ cd ..
564 564 $ chmod 700 a
565 565 $ rm -r a b
566 566
567 567 #endif
568 568
569 569
570 570 #if fifo
571 571
572 572 Source of wrong type
573 573
574 574 $ mkfifo a
575 575 $ hg clone a b
576 576 abort: repository a not found!
577 577 [255]
578 578 $ rm a
579 579
580 580 #endif
581 581
582 582 Default destination, same directory
583 583
584 584 $ hg init q
585 585 $ hg clone q
586 586 destination directory: q
587 587 abort: destination 'q' is not empty
588 588 [255]
589 589
590 590 destination directory not empty
591 591
592 592 $ mkdir a
593 593 $ echo stuff > a/a
594 594 $ hg clone q a
595 595 abort: destination 'a' is not empty
596 596 [255]
597 597
598 598
599 599 #if unix-permissions no-root
600 600
601 601 leave existing directory in place after clone failure
602 602
603 603 $ hg init c
604 604 $ cd c
605 605 $ echo c > c
606 606 $ hg commit -A -m test
607 607 adding c
608 608 $ chmod -rx .hg/store/data
609 609 $ cd ..
610 610 $ mkdir d
611 611 $ hg clone c d 2> err
612 612 [255]
613 613 $ test -d d
614 614 $ test -d d/.hg
615 615 [1]
616 616
617 617 re-enable perm to allow deletion
618 618
619 619 $ chmod +rx c/.hg/store/data
620 620
621 621 #endif
622 622
623 623 $ cd ..
624
625 Test clone from the repository in (emulated) revlog format 0 (issue4203):
626
627 $ mkdir issue4203
628 $ mkdir -p src/.hg
629 $ echo foo > src/foo
630 $ hg -R src add src/foo
631 $ hg -R src commit -m '#0'
632 $ hg -R src log -q
633 0:e1bab28bca43
634 $ hg clone -U -q src dst
635 $ hg -R dst log -q
636 0:e1bab28bca43
637 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now