##// END OF EJS Templates
obsolete: compute extinct changesets...
Pierre-Yves David -
r17173:c621f84d default
parent child Browse files
Show More
@@ -1,1299 +1,1314 b''
1 1 # context.py - changeset and file context objects for mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import nullid, nullrev, short, hex, bin
9 9 from i18n import _
10 10 import ancestor, mdiff, error, util, scmutil, subrepo, patch, encoding, phases
11 11 import copies
12 12 import match as matchmod
13 13 import os, errno, stat
14 14
15 15 propertycache = util.propertycache
16 16
17 17 class changectx(object):
18 18 """A changecontext object makes access to data related to a particular
19 19 changeset convenient."""
20 20 def __init__(self, repo, changeid=''):
21 21 """changeid is a revision number, node, or tag"""
22 22 if changeid == '':
23 23 changeid = '.'
24 24 self._repo = repo
25 25
26 26 if isinstance(changeid, int):
27 27 self._rev = changeid
28 28 self._node = repo.changelog.node(changeid)
29 29 return
30 30 if isinstance(changeid, long):
31 31 changeid = str(changeid)
32 32 if changeid == '.':
33 33 self._node = repo.dirstate.p1()
34 34 self._rev = repo.changelog.rev(self._node)
35 35 return
36 36 if changeid == 'null':
37 37 self._node = nullid
38 38 self._rev = nullrev
39 39 return
40 40 if changeid == 'tip':
41 41 self._rev = len(repo.changelog) - 1
42 42 self._node = repo.changelog.node(self._rev)
43 43 return
44 44 if len(changeid) == 20:
45 45 try:
46 46 self._node = changeid
47 47 self._rev = repo.changelog.rev(changeid)
48 48 return
49 49 except LookupError:
50 50 pass
51 51
52 52 try:
53 53 r = int(changeid)
54 54 if str(r) != changeid:
55 55 raise ValueError
56 56 l = len(repo.changelog)
57 57 if r < 0:
58 58 r += l
59 59 if r < 0 or r >= l:
60 60 raise ValueError
61 61 self._rev = r
62 62 self._node = repo.changelog.node(r)
63 63 return
64 64 except (ValueError, OverflowError):
65 65 pass
66 66
67 67 if len(changeid) == 40:
68 68 try:
69 69 self._node = bin(changeid)
70 70 self._rev = repo.changelog.rev(self._node)
71 71 return
72 72 except (TypeError, LookupError):
73 73 pass
74 74
75 75 if changeid in repo._bookmarks:
76 76 self._node = repo._bookmarks[changeid]
77 77 self._rev = repo.changelog.rev(self._node)
78 78 return
79 79 if changeid in repo._tagscache.tags:
80 80 self._node = repo._tagscache.tags[changeid]
81 81 self._rev = repo.changelog.rev(self._node)
82 82 return
83 83 try:
84 84 self._node = repo.branchtip(changeid)
85 85 self._rev = repo.changelog.rev(self._node)
86 86 return
87 87 except error.RepoLookupError:
88 88 pass
89 89
90 90 self._node = repo.changelog._partialmatch(changeid)
91 91 if self._node is not None:
92 92 self._rev = repo.changelog.rev(self._node)
93 93 return
94 94
95 95 # lookup failed
96 96 # check if it might have come from damaged dirstate
97 97 if changeid in repo.dirstate.parents():
98 98 raise error.Abort(_("working directory has unknown parent '%s'!")
99 99 % short(changeid))
100 100 try:
101 101 if len(changeid) == 20:
102 102 changeid = hex(changeid)
103 103 except TypeError:
104 104 pass
105 105 raise error.RepoLookupError(
106 106 _("unknown revision '%s'") % changeid)
107 107
108 108 def __str__(self):
109 109 return short(self.node())
110 110
111 111 def __int__(self):
112 112 return self.rev()
113 113
114 114 def __repr__(self):
115 115 return "<changectx %s>" % str(self)
116 116
117 117 def __hash__(self):
118 118 try:
119 119 return hash(self._rev)
120 120 except AttributeError:
121 121 return id(self)
122 122
123 123 def __eq__(self, other):
124 124 try:
125 125 return self._rev == other._rev
126 126 except AttributeError:
127 127 return False
128 128
129 129 def __ne__(self, other):
130 130 return not (self == other)
131 131
132 132 def __nonzero__(self):
133 133 return self._rev != nullrev
134 134
135 135 @propertycache
136 136 def _changeset(self):
137 137 return self._repo.changelog.read(self.rev())
138 138
139 139 @propertycache
140 140 def _manifest(self):
141 141 return self._repo.manifest.read(self._changeset[0])
142 142
143 143 @propertycache
144 144 def _manifestdelta(self):
145 145 return self._repo.manifest.readdelta(self._changeset[0])
146 146
147 147 @propertycache
148 148 def _parents(self):
149 149 p = self._repo.changelog.parentrevs(self._rev)
150 150 if p[1] == nullrev:
151 151 p = p[:-1]
152 152 return [changectx(self._repo, x) for x in p]
153 153
154 154 @propertycache
155 155 def substate(self):
156 156 return subrepo.state(self, self._repo.ui)
157 157
158 158 def __contains__(self, key):
159 159 return key in self._manifest
160 160
161 161 def __getitem__(self, key):
162 162 return self.filectx(key)
163 163
164 164 def __iter__(self):
165 165 for f in sorted(self._manifest):
166 166 yield f
167 167
168 168 def changeset(self):
169 169 return self._changeset
170 170 def manifest(self):
171 171 return self._manifest
172 172 def manifestnode(self):
173 173 return self._changeset[0]
174 174
175 175 def rev(self):
176 176 return self._rev
177 177 def node(self):
178 178 return self._node
179 179 def hex(self):
180 180 return hex(self._node)
181 181 def user(self):
182 182 return self._changeset[1]
183 183 def date(self):
184 184 return self._changeset[2]
185 185 def files(self):
186 186 return self._changeset[3]
187 187 def description(self):
188 188 return self._changeset[4]
189 189 def branch(self):
190 190 return encoding.tolocal(self._changeset[5].get("branch"))
191 191 def closesbranch(self):
192 192 return 'close' in self._changeset[5]
193 193 def extra(self):
194 194 return self._changeset[5]
195 195 def tags(self):
196 196 return self._repo.nodetags(self._node)
197 197 def bookmarks(self):
198 198 return self._repo.nodebookmarks(self._node)
199 199 def phase(self):
200 200 return self._repo._phasecache.phase(self._repo, self._rev)
201 201 def phasestr(self):
202 202 return phases.phasenames[self.phase()]
203 203 def mutable(self):
204 204 return self.phase() > phases.public
205 205 def hidden(self):
206 206 return self._rev in self._repo.changelog.hiddenrevs
207 207
208 208 def parents(self):
209 209 """return contexts for each parent changeset"""
210 210 return self._parents
211 211
212 212 def p1(self):
213 213 return self._parents[0]
214 214
215 215 def p2(self):
216 216 if len(self._parents) == 2:
217 217 return self._parents[1]
218 218 return changectx(self._repo, -1)
219 219
220 220 def children(self):
221 221 """return contexts for each child changeset"""
222 222 c = self._repo.changelog.children(self._node)
223 223 return [changectx(self._repo, x) for x in c]
224 224
225 225 def ancestors(self):
226 226 for a in self._repo.changelog.ancestors([self._rev]):
227 227 yield changectx(self._repo, a)
228 228
229 229 def descendants(self):
230 230 for d in self._repo.changelog.descendants([self._rev]):
231 231 yield changectx(self._repo, d)
232 232
233 233 def obsolete(self):
234 234 """True if the changeset is obsolete"""
235 235 return (self.node() in self._repo.obsstore.precursors
236 236 and self.phase() > phases.public)
237 237
238 def extinct(self):
239 """True if the changeset is extinct"""
240 # We should just compute a cache a check againts it.
241 # see revset implementation for details
242 #
243 # But this naive implementation does not require cache
244 if self.phase() <= phases.public:
245 return False
246 if not self.obsolete():
247 return False
248 for desc in self.descendants():
249 if not desc.obsolete():
250 return False
251 return True
252
238 253 def unstable(self):
239 254 """True if the changeset is not obsolete but it's ancestor are"""
240 255 # We should just compute /(obsolete()::) - obsolete()/
241 256 # and keep it in a cache.
242 257 #
243 258 # But this naive implementation does not require cache
244 259 if self.phase() <= phases.public:
245 260 return False
246 261 if self.obsolete():
247 262 return False
248 263 for anc in self.ancestors():
249 264 if anc.obsolete():
250 265 return True
251 266 return False
252 267
253 268 def _fileinfo(self, path):
254 269 if '_manifest' in self.__dict__:
255 270 try:
256 271 return self._manifest[path], self._manifest.flags(path)
257 272 except KeyError:
258 273 raise error.LookupError(self._node, path,
259 274 _('not found in manifest'))
260 275 if '_manifestdelta' in self.__dict__ or path in self.files():
261 276 if path in self._manifestdelta:
262 277 return (self._manifestdelta[path],
263 278 self._manifestdelta.flags(path))
264 279 node, flag = self._repo.manifest.find(self._changeset[0], path)
265 280 if not node:
266 281 raise error.LookupError(self._node, path,
267 282 _('not found in manifest'))
268 283
269 284 return node, flag
270 285
271 286 def filenode(self, path):
272 287 return self._fileinfo(path)[0]
273 288
274 289 def flags(self, path):
275 290 try:
276 291 return self._fileinfo(path)[1]
277 292 except error.LookupError:
278 293 return ''
279 294
280 295 def filectx(self, path, fileid=None, filelog=None):
281 296 """get a file context from this changeset"""
282 297 if fileid is None:
283 298 fileid = self.filenode(path)
284 299 return filectx(self._repo, path, fileid=fileid,
285 300 changectx=self, filelog=filelog)
286 301
287 302 def ancestor(self, c2):
288 303 """
289 304 return the ancestor context of self and c2
290 305 """
291 306 # deal with workingctxs
292 307 n2 = c2._node
293 308 if n2 is None:
294 309 n2 = c2._parents[0]._node
295 310 n = self._repo.changelog.ancestor(self._node, n2)
296 311 return changectx(self._repo, n)
297 312
298 313 def walk(self, match):
299 314 fset = set(match.files())
300 315 # for dirstate.walk, files=['.'] means "walk the whole tree".
301 316 # follow that here, too
302 317 fset.discard('.')
303 318 for fn in self:
304 319 if fn in fset:
305 320 # specified pattern is the exact name
306 321 fset.remove(fn)
307 322 if match(fn):
308 323 yield fn
309 324 for fn in sorted(fset):
310 325 if fn in self._dirs:
311 326 # specified pattern is a directory
312 327 continue
313 328 if match.bad(fn, _('no such file in rev %s') % self) and match(fn):
314 329 yield fn
315 330
316 331 def sub(self, path):
317 332 return subrepo.subrepo(self, path)
318 333
319 334 def match(self, pats=[], include=None, exclude=None, default='glob'):
320 335 r = self._repo
321 336 return matchmod.match(r.root, r.getcwd(), pats,
322 337 include, exclude, default,
323 338 auditor=r.auditor, ctx=self)
324 339
325 340 def diff(self, ctx2=None, match=None, **opts):
326 341 """Returns a diff generator for the given contexts and matcher"""
327 342 if ctx2 is None:
328 343 ctx2 = self.p1()
329 344 if ctx2 is not None and not isinstance(ctx2, changectx):
330 345 ctx2 = self._repo[ctx2]
331 346 diffopts = patch.diffopts(self._repo.ui, opts)
332 347 return patch.diff(self._repo, ctx2.node(), self.node(),
333 348 match=match, opts=diffopts)
334 349
335 350 @propertycache
336 351 def _dirs(self):
337 352 dirs = set()
338 353 for f in self._manifest:
339 354 pos = f.rfind('/')
340 355 while pos != -1:
341 356 f = f[:pos]
342 357 if f in dirs:
343 358 break # dirs already contains this and above
344 359 dirs.add(f)
345 360 pos = f.rfind('/')
346 361 return dirs
347 362
348 363 def dirs(self):
349 364 return self._dirs
350 365
351 366 class filectx(object):
352 367 """A filecontext object makes access to data related to a particular
353 368 filerevision convenient."""
354 369 def __init__(self, repo, path, changeid=None, fileid=None,
355 370 filelog=None, changectx=None):
356 371 """changeid can be a changeset revision, node, or tag.
357 372 fileid can be a file revision or node."""
358 373 self._repo = repo
359 374 self._path = path
360 375
361 376 assert (changeid is not None
362 377 or fileid is not None
363 378 or changectx is not None), \
364 379 ("bad args: changeid=%r, fileid=%r, changectx=%r"
365 380 % (changeid, fileid, changectx))
366 381
367 382 if filelog:
368 383 self._filelog = filelog
369 384
370 385 if changeid is not None:
371 386 self._changeid = changeid
372 387 if changectx is not None:
373 388 self._changectx = changectx
374 389 if fileid is not None:
375 390 self._fileid = fileid
376 391
377 392 @propertycache
378 393 def _changectx(self):
379 394 return changectx(self._repo, self._changeid)
380 395
381 396 @propertycache
382 397 def _filelog(self):
383 398 return self._repo.file(self._path)
384 399
385 400 @propertycache
386 401 def _changeid(self):
387 402 if '_changectx' in self.__dict__:
388 403 return self._changectx.rev()
389 404 else:
390 405 return self._filelog.linkrev(self._filerev)
391 406
392 407 @propertycache
393 408 def _filenode(self):
394 409 if '_fileid' in self.__dict__:
395 410 return self._filelog.lookup(self._fileid)
396 411 else:
397 412 return self._changectx.filenode(self._path)
398 413
399 414 @propertycache
400 415 def _filerev(self):
401 416 return self._filelog.rev(self._filenode)
402 417
403 418 @propertycache
404 419 def _repopath(self):
405 420 return self._path
406 421
407 422 def __nonzero__(self):
408 423 try:
409 424 self._filenode
410 425 return True
411 426 except error.LookupError:
412 427 # file is missing
413 428 return False
414 429
415 430 def __str__(self):
416 431 return "%s@%s" % (self.path(), short(self.node()))
417 432
418 433 def __repr__(self):
419 434 return "<filectx %s>" % str(self)
420 435
421 436 def __hash__(self):
422 437 try:
423 438 return hash((self._path, self._filenode))
424 439 except AttributeError:
425 440 return id(self)
426 441
427 442 def __eq__(self, other):
428 443 try:
429 444 return (self._path == other._path
430 445 and self._filenode == other._filenode)
431 446 except AttributeError:
432 447 return False
433 448
434 449 def __ne__(self, other):
435 450 return not (self == other)
436 451
437 452 def filectx(self, fileid):
438 453 '''opens an arbitrary revision of the file without
439 454 opening a new filelog'''
440 455 return filectx(self._repo, self._path, fileid=fileid,
441 456 filelog=self._filelog)
442 457
443 458 def filerev(self):
444 459 return self._filerev
445 460 def filenode(self):
446 461 return self._filenode
447 462 def flags(self):
448 463 return self._changectx.flags(self._path)
449 464 def filelog(self):
450 465 return self._filelog
451 466
452 467 def rev(self):
453 468 if '_changectx' in self.__dict__:
454 469 return self._changectx.rev()
455 470 if '_changeid' in self.__dict__:
456 471 return self._changectx.rev()
457 472 return self._filelog.linkrev(self._filerev)
458 473
459 474 def linkrev(self):
460 475 return self._filelog.linkrev(self._filerev)
461 476 def node(self):
462 477 return self._changectx.node()
463 478 def hex(self):
464 479 return hex(self.node())
465 480 def user(self):
466 481 return self._changectx.user()
467 482 def date(self):
468 483 return self._changectx.date()
469 484 def files(self):
470 485 return self._changectx.files()
471 486 def description(self):
472 487 return self._changectx.description()
473 488 def branch(self):
474 489 return self._changectx.branch()
475 490 def extra(self):
476 491 return self._changectx.extra()
477 492 def manifest(self):
478 493 return self._changectx.manifest()
479 494 def changectx(self):
480 495 return self._changectx
481 496
482 497 def data(self):
483 498 return self._filelog.read(self._filenode)
484 499 def path(self):
485 500 return self._path
486 501 def size(self):
487 502 return self._filelog.size(self._filerev)
488 503
489 504 def isbinary(self):
490 505 try:
491 506 return util.binary(self.data())
492 507 except IOError:
493 508 return False
494 509
495 510 def cmp(self, fctx):
496 511 """compare with other file context
497 512
498 513 returns True if different than fctx.
499 514 """
500 515 if (fctx._filerev is None
501 516 and (self._repo._encodefilterpats
502 517 # if file data starts with '\1\n', empty metadata block is
503 518 # prepended, which adds 4 bytes to filelog.size().
504 519 or self.size() - 4 == fctx.size())
505 520 or self.size() == fctx.size()):
506 521 return self._filelog.cmp(self._filenode, fctx.data())
507 522
508 523 return True
509 524
510 525 def renamed(self):
511 526 """check if file was actually renamed in this changeset revision
512 527
513 528 If rename logged in file revision, we report copy for changeset only
514 529 if file revisions linkrev points back to the changeset in question
515 530 or both changeset parents contain different file revisions.
516 531 """
517 532
518 533 renamed = self._filelog.renamed(self._filenode)
519 534 if not renamed:
520 535 return renamed
521 536
522 537 if self.rev() == self.linkrev():
523 538 return renamed
524 539
525 540 name = self.path()
526 541 fnode = self._filenode
527 542 for p in self._changectx.parents():
528 543 try:
529 544 if fnode == p.filenode(name):
530 545 return None
531 546 except error.LookupError:
532 547 pass
533 548 return renamed
534 549
535 550 def parents(self):
536 551 p = self._path
537 552 fl = self._filelog
538 553 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
539 554
540 555 r = self._filelog.renamed(self._filenode)
541 556 if r:
542 557 pl[0] = (r[0], r[1], None)
543 558
544 559 return [filectx(self._repo, p, fileid=n, filelog=l)
545 560 for p, n, l in pl if n != nullid]
546 561
547 562 def p1(self):
548 563 return self.parents()[0]
549 564
550 565 def p2(self):
551 566 p = self.parents()
552 567 if len(p) == 2:
553 568 return p[1]
554 569 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
555 570
556 571 def children(self):
557 572 # hard for renames
558 573 c = self._filelog.children(self._filenode)
559 574 return [filectx(self._repo, self._path, fileid=x,
560 575 filelog=self._filelog) for x in c]
561 576
562 577 def annotate(self, follow=False, linenumber=None, diffopts=None):
563 578 '''returns a list of tuples of (ctx, line) for each line
564 579 in the file, where ctx is the filectx of the node where
565 580 that line was last changed.
566 581 This returns tuples of ((ctx, linenumber), line) for each line,
567 582 if "linenumber" parameter is NOT "None".
568 583 In such tuples, linenumber means one at the first appearance
569 584 in the managed file.
570 585 To reduce annotation cost,
571 586 this returns fixed value(False is used) as linenumber,
572 587 if "linenumber" parameter is "False".'''
573 588
574 589 def decorate_compat(text, rev):
575 590 return ([rev] * len(text.splitlines()), text)
576 591
577 592 def without_linenumber(text, rev):
578 593 return ([(rev, False)] * len(text.splitlines()), text)
579 594
580 595 def with_linenumber(text, rev):
581 596 size = len(text.splitlines())
582 597 return ([(rev, i) for i in xrange(1, size + 1)], text)
583 598
584 599 decorate = (((linenumber is None) and decorate_compat) or
585 600 (linenumber and with_linenumber) or
586 601 without_linenumber)
587 602
588 603 def pair(parent, child):
589 604 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
590 605 refine=True)
591 606 for (a1, a2, b1, b2), t in blocks:
592 607 # Changed blocks ('!') or blocks made only of blank lines ('~')
593 608 # belong to the child.
594 609 if t == '=':
595 610 child[0][b1:b2] = parent[0][a1:a2]
596 611 return child
597 612
598 613 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
599 614 def getctx(path, fileid):
600 615 log = path == self._path and self._filelog or getlog(path)
601 616 return filectx(self._repo, path, fileid=fileid, filelog=log)
602 617 getctx = util.lrucachefunc(getctx)
603 618
604 619 def parents(f):
605 620 # we want to reuse filectx objects as much as possible
606 621 p = f._path
607 622 if f._filerev is None: # working dir
608 623 pl = [(n.path(), n.filerev()) for n in f.parents()]
609 624 else:
610 625 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
611 626
612 627 if follow:
613 628 r = f.renamed()
614 629 if r:
615 630 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
616 631
617 632 return [getctx(p, n) for p, n in pl if n != nullrev]
618 633
619 634 # use linkrev to find the first changeset where self appeared
620 635 if self.rev() != self.linkrev():
621 636 base = self.filectx(self.filerev())
622 637 else:
623 638 base = self
624 639
625 640 # This algorithm would prefer to be recursive, but Python is a
626 641 # bit recursion-hostile. Instead we do an iterative
627 642 # depth-first search.
628 643
629 644 visit = [base]
630 645 hist = {}
631 646 pcache = {}
632 647 needed = {base: 1}
633 648 while visit:
634 649 f = visit[-1]
635 650 if f not in pcache:
636 651 pcache[f] = parents(f)
637 652
638 653 ready = True
639 654 pl = pcache[f]
640 655 for p in pl:
641 656 if p not in hist:
642 657 ready = False
643 658 visit.append(p)
644 659 needed[p] = needed.get(p, 0) + 1
645 660 if ready:
646 661 visit.pop()
647 662 curr = decorate(f.data(), f)
648 663 for p in pl:
649 664 curr = pair(hist[p], curr)
650 665 if needed[p] == 1:
651 666 del hist[p]
652 667 else:
653 668 needed[p] -= 1
654 669
655 670 hist[f] = curr
656 671 pcache[f] = []
657 672
658 673 return zip(hist[base][0], hist[base][1].splitlines(True))
659 674
660 675 def ancestor(self, fc2, actx):
661 676 """
662 677 find the common ancestor file context, if any, of self, and fc2
663 678
664 679 actx must be the changectx of the common ancestor
665 680 of self's and fc2's respective changesets.
666 681 """
667 682
668 683 # the easy case: no (relevant) renames
669 684 if fc2.path() == self.path() and self.path() in actx:
670 685 return actx[self.path()]
671 686
672 687 # the next easiest cases: unambiguous predecessor (name trumps
673 688 # history)
674 689 if self.path() in actx and fc2.path() not in actx:
675 690 return actx[self.path()]
676 691 if fc2.path() in actx and self.path() not in actx:
677 692 return actx[fc2.path()]
678 693
679 694 # prime the ancestor cache for the working directory
680 695 acache = {}
681 696 for c in (self, fc2):
682 697 if c._filerev is None:
683 698 pl = [(n.path(), n.filenode()) for n in c.parents()]
684 699 acache[(c._path, None)] = pl
685 700
686 701 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
687 702 def parents(vertex):
688 703 if vertex in acache:
689 704 return acache[vertex]
690 705 f, n = vertex
691 706 if f not in flcache:
692 707 flcache[f] = self._repo.file(f)
693 708 fl = flcache[f]
694 709 pl = [(f, p) for p in fl.parents(n) if p != nullid]
695 710 re = fl.renamed(n)
696 711 if re:
697 712 pl.append(re)
698 713 acache[vertex] = pl
699 714 return pl
700 715
701 716 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
702 717 v = ancestor.ancestor(a, b, parents)
703 718 if v:
704 719 f, n = v
705 720 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
706 721
707 722 return None
708 723
709 724 def ancestors(self, followfirst=False):
710 725 visit = {}
711 726 c = self
712 727 cut = followfirst and 1 or None
713 728 while True:
714 729 for parent in c.parents()[:cut]:
715 730 visit[(parent.rev(), parent.node())] = parent
716 731 if not visit:
717 732 break
718 733 c = visit.pop(max(visit))
719 734 yield c
720 735
721 736 def copies(self, c2):
722 737 if not util.safehasattr(self, "_copycache"):
723 738 self._copycache = {}
724 739 sc2 = str(c2)
725 740 if sc2 not in self._copycache:
726 741 self._copycache[sc2] = copies.pathcopies(c2)
727 742 return self._copycache[sc2]
728 743
729 744 class workingctx(changectx):
730 745 """A workingctx object makes access to data related to
731 746 the current working directory convenient.
732 747 date - any valid date string or (unixtime, offset), or None.
733 748 user - username string, or None.
734 749 extra - a dictionary of extra values, or None.
735 750 changes - a list of file lists as returned by localrepo.status()
736 751 or None to use the repository status.
737 752 """
738 753 def __init__(self, repo, text="", user=None, date=None, extra=None,
739 754 changes=None):
740 755 self._repo = repo
741 756 self._rev = None
742 757 self._node = None
743 758 self._text = text
744 759 if date:
745 760 self._date = util.parsedate(date)
746 761 if user:
747 762 self._user = user
748 763 if changes:
749 764 self._status = list(changes[:4])
750 765 self._unknown = changes[4]
751 766 self._ignored = changes[5]
752 767 self._clean = changes[6]
753 768 else:
754 769 self._unknown = None
755 770 self._ignored = None
756 771 self._clean = None
757 772
758 773 self._extra = {}
759 774 if extra:
760 775 self._extra = extra.copy()
761 776 if 'branch' not in self._extra:
762 777 try:
763 778 branch = encoding.fromlocal(self._repo.dirstate.branch())
764 779 except UnicodeDecodeError:
765 780 raise util.Abort(_('branch name not in UTF-8!'))
766 781 self._extra['branch'] = branch
767 782 if self._extra['branch'] == '':
768 783 self._extra['branch'] = 'default'
769 784
770 785 def __str__(self):
771 786 return str(self._parents[0]) + "+"
772 787
773 788 def __repr__(self):
774 789 return "<workingctx %s>" % str(self)
775 790
776 791 def __nonzero__(self):
777 792 return True
778 793
779 794 def __contains__(self, key):
780 795 return self._repo.dirstate[key] not in "?r"
781 796
782 797 def _buildflagfunc(self):
783 798 # Create a fallback function for getting file flags when the
784 799 # filesystem doesn't support them
785 800
786 801 copiesget = self._repo.dirstate.copies().get
787 802
788 803 if len(self._parents) < 2:
789 804 # when we have one parent, it's easy: copy from parent
790 805 man = self._parents[0].manifest()
791 806 def func(f):
792 807 f = copiesget(f, f)
793 808 return man.flags(f)
794 809 else:
795 810 # merges are tricky: we try to reconstruct the unstored
796 811 # result from the merge (issue1802)
797 812 p1, p2 = self._parents
798 813 pa = p1.ancestor(p2)
799 814 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
800 815
801 816 def func(f):
802 817 f = copiesget(f, f) # may be wrong for merges with copies
803 818 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
804 819 if fl1 == fl2:
805 820 return fl1
806 821 if fl1 == fla:
807 822 return fl2
808 823 if fl2 == fla:
809 824 return fl1
810 825 return '' # punt for conflicts
811 826
812 827 return func
813 828
814 829 @propertycache
815 830 def _flagfunc(self):
816 831 return self._repo.dirstate.flagfunc(self._buildflagfunc)
817 832
818 833 @propertycache
819 834 def _manifest(self):
820 835 """generate a manifest corresponding to the working directory"""
821 836
822 837 man = self._parents[0].manifest().copy()
823 838 if len(self._parents) > 1:
824 839 man2 = self.p2().manifest()
825 840 def getman(f):
826 841 if f in man:
827 842 return man
828 843 return man2
829 844 else:
830 845 getman = lambda f: man
831 846
832 847 copied = self._repo.dirstate.copies()
833 848 ff = self._flagfunc
834 849 modified, added, removed, deleted = self._status
835 850 for i, l in (("a", added), ("m", modified)):
836 851 for f in l:
837 852 orig = copied.get(f, f)
838 853 man[f] = getman(orig).get(orig, nullid) + i
839 854 try:
840 855 man.set(f, ff(f))
841 856 except OSError:
842 857 pass
843 858
844 859 for f in deleted + removed:
845 860 if f in man:
846 861 del man[f]
847 862
848 863 return man
849 864
850 865 def __iter__(self):
851 866 d = self._repo.dirstate
852 867 for f in d:
853 868 if d[f] != 'r':
854 869 yield f
855 870
856 871 @propertycache
857 872 def _status(self):
858 873 return self._repo.status()[:4]
859 874
860 875 @propertycache
861 876 def _user(self):
862 877 return self._repo.ui.username()
863 878
864 879 @propertycache
865 880 def _date(self):
866 881 return util.makedate()
867 882
868 883 @propertycache
869 884 def _parents(self):
870 885 p = self._repo.dirstate.parents()
871 886 if p[1] == nullid:
872 887 p = p[:-1]
873 888 self._parents = [changectx(self._repo, x) for x in p]
874 889 return self._parents
875 890
876 891 def status(self, ignored=False, clean=False, unknown=False):
877 892 """Explicit status query
878 893 Unless this method is used to query the working copy status, the
879 894 _status property will implicitly read the status using its default
880 895 arguments."""
881 896 stat = self._repo.status(ignored=ignored, clean=clean, unknown=unknown)
882 897 self._unknown = self._ignored = self._clean = None
883 898 if unknown:
884 899 self._unknown = stat[4]
885 900 if ignored:
886 901 self._ignored = stat[5]
887 902 if clean:
888 903 self._clean = stat[6]
889 904 self._status = stat[:4]
890 905 return stat
891 906
892 907 def manifest(self):
893 908 return self._manifest
894 909 def user(self):
895 910 return self._user or self._repo.ui.username()
896 911 def date(self):
897 912 return self._date
898 913 def description(self):
899 914 return self._text
900 915 def files(self):
901 916 return sorted(self._status[0] + self._status[1] + self._status[2])
902 917
903 918 def modified(self):
904 919 return self._status[0]
905 920 def added(self):
906 921 return self._status[1]
907 922 def removed(self):
908 923 return self._status[2]
909 924 def deleted(self):
910 925 return self._status[3]
911 926 def unknown(self):
912 927 assert self._unknown is not None # must call status first
913 928 return self._unknown
914 929 def ignored(self):
915 930 assert self._ignored is not None # must call status first
916 931 return self._ignored
917 932 def clean(self):
918 933 assert self._clean is not None # must call status first
919 934 return self._clean
920 935 def branch(self):
921 936 return encoding.tolocal(self._extra['branch'])
922 937 def closesbranch(self):
923 938 return 'close' in self._extra
924 939 def extra(self):
925 940 return self._extra
926 941
927 942 def tags(self):
928 943 t = []
929 944 for p in self.parents():
930 945 t.extend(p.tags())
931 946 return t
932 947
933 948 def bookmarks(self):
934 949 b = []
935 950 for p in self.parents():
936 951 b.extend(p.bookmarks())
937 952 return b
938 953
939 954 def phase(self):
940 955 phase = phases.draft # default phase to draft
941 956 for p in self.parents():
942 957 phase = max(phase, p.phase())
943 958 return phase
944 959
945 960 def hidden(self):
946 961 return False
947 962
948 963 def children(self):
949 964 return []
950 965
951 966 def flags(self, path):
952 967 if '_manifest' in self.__dict__:
953 968 try:
954 969 return self._manifest.flags(path)
955 970 except KeyError:
956 971 return ''
957 972
958 973 try:
959 974 return self._flagfunc(path)
960 975 except OSError:
961 976 return ''
962 977
963 978 def filectx(self, path, filelog=None):
964 979 """get a file context from the working directory"""
965 980 return workingfilectx(self._repo, path, workingctx=self,
966 981 filelog=filelog)
967 982
968 983 def ancestor(self, c2):
969 984 """return the ancestor context of self and c2"""
970 985 return self._parents[0].ancestor(c2) # punt on two parents for now
971 986
972 987 def walk(self, match):
973 988 return sorted(self._repo.dirstate.walk(match, self.substate.keys(),
974 989 True, False))
975 990
976 991 def dirty(self, missing=False, merge=True, branch=True):
977 992 "check whether a working directory is modified"
978 993 # check subrepos first
979 994 for s in self.substate:
980 995 if self.sub(s).dirty():
981 996 return True
982 997 # check current working dir
983 998 return ((merge and self.p2()) or
984 999 (branch and self.branch() != self.p1().branch()) or
985 1000 self.modified() or self.added() or self.removed() or
986 1001 (missing and self.deleted()))
987 1002
988 1003 def add(self, list, prefix=""):
989 1004 join = lambda f: os.path.join(prefix, f)
990 1005 wlock = self._repo.wlock()
991 1006 ui, ds = self._repo.ui, self._repo.dirstate
992 1007 try:
993 1008 rejected = []
994 1009 for f in list:
995 1010 scmutil.checkportable(ui, join(f))
996 1011 p = self._repo.wjoin(f)
997 1012 try:
998 1013 st = os.lstat(p)
999 1014 except OSError:
1000 1015 ui.warn(_("%s does not exist!\n") % join(f))
1001 1016 rejected.append(f)
1002 1017 continue
1003 1018 if st.st_size > 10000000:
1004 1019 ui.warn(_("%s: up to %d MB of RAM may be required "
1005 1020 "to manage this file\n"
1006 1021 "(use 'hg revert %s' to cancel the "
1007 1022 "pending addition)\n")
1008 1023 % (f, 3 * st.st_size // 1000000, join(f)))
1009 1024 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1010 1025 ui.warn(_("%s not added: only files and symlinks "
1011 1026 "supported currently\n") % join(f))
1012 1027 rejected.append(p)
1013 1028 elif ds[f] in 'amn':
1014 1029 ui.warn(_("%s already tracked!\n") % join(f))
1015 1030 elif ds[f] == 'r':
1016 1031 ds.normallookup(f)
1017 1032 else:
1018 1033 ds.add(f)
1019 1034 return rejected
1020 1035 finally:
1021 1036 wlock.release()
1022 1037
1023 1038 def forget(self, files, prefix=""):
1024 1039 join = lambda f: os.path.join(prefix, f)
1025 1040 wlock = self._repo.wlock()
1026 1041 try:
1027 1042 rejected = []
1028 1043 for f in files:
1029 1044 if f not in self._repo.dirstate:
1030 1045 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1031 1046 rejected.append(f)
1032 1047 elif self._repo.dirstate[f] != 'a':
1033 1048 self._repo.dirstate.remove(f)
1034 1049 else:
1035 1050 self._repo.dirstate.drop(f)
1036 1051 return rejected
1037 1052 finally:
1038 1053 wlock.release()
1039 1054
1040 1055 def ancestors(self):
1041 1056 for a in self._repo.changelog.ancestors(
1042 1057 [p.rev() for p in self._parents]):
1043 1058 yield changectx(self._repo, a)
1044 1059
1045 1060 def undelete(self, list):
1046 1061 pctxs = self.parents()
1047 1062 wlock = self._repo.wlock()
1048 1063 try:
1049 1064 for f in list:
1050 1065 if self._repo.dirstate[f] != 'r':
1051 1066 self._repo.ui.warn(_("%s not removed!\n") % f)
1052 1067 else:
1053 1068 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1054 1069 t = fctx.data()
1055 1070 self._repo.wwrite(f, t, fctx.flags())
1056 1071 self._repo.dirstate.normal(f)
1057 1072 finally:
1058 1073 wlock.release()
1059 1074
1060 1075 def copy(self, source, dest):
1061 1076 p = self._repo.wjoin(dest)
1062 1077 if not os.path.lexists(p):
1063 1078 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1064 1079 elif not (os.path.isfile(p) or os.path.islink(p)):
1065 1080 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1066 1081 "symbolic link\n") % dest)
1067 1082 else:
1068 1083 wlock = self._repo.wlock()
1069 1084 try:
1070 1085 if self._repo.dirstate[dest] in '?r':
1071 1086 self._repo.dirstate.add(dest)
1072 1087 self._repo.dirstate.copy(source, dest)
1073 1088 finally:
1074 1089 wlock.release()
1075 1090
1076 1091 def dirs(self):
1077 1092 return set(self._repo.dirstate.dirs())
1078 1093
1079 1094 class workingfilectx(filectx):
1080 1095 """A workingfilectx object makes access to data related to a particular
1081 1096 file in the working directory convenient."""
1082 1097 def __init__(self, repo, path, filelog=None, workingctx=None):
1083 1098 """changeid can be a changeset revision, node, or tag.
1084 1099 fileid can be a file revision or node."""
1085 1100 self._repo = repo
1086 1101 self._path = path
1087 1102 self._changeid = None
1088 1103 self._filerev = self._filenode = None
1089 1104
1090 1105 if filelog:
1091 1106 self._filelog = filelog
1092 1107 if workingctx:
1093 1108 self._changectx = workingctx
1094 1109
1095 1110 @propertycache
1096 1111 def _changectx(self):
1097 1112 return workingctx(self._repo)
1098 1113
1099 1114 def __nonzero__(self):
1100 1115 return True
1101 1116
1102 1117 def __str__(self):
1103 1118 return "%s@%s" % (self.path(), self._changectx)
1104 1119
1105 1120 def __repr__(self):
1106 1121 return "<workingfilectx %s>" % str(self)
1107 1122
1108 1123 def data(self):
1109 1124 return self._repo.wread(self._path)
1110 1125 def renamed(self):
1111 1126 rp = self._repo.dirstate.copied(self._path)
1112 1127 if not rp:
1113 1128 return None
1114 1129 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1115 1130
1116 1131 def parents(self):
1117 1132 '''return parent filectxs, following copies if necessary'''
1118 1133 def filenode(ctx, path):
1119 1134 return ctx._manifest.get(path, nullid)
1120 1135
1121 1136 path = self._path
1122 1137 fl = self._filelog
1123 1138 pcl = self._changectx._parents
1124 1139 renamed = self.renamed()
1125 1140
1126 1141 if renamed:
1127 1142 pl = [renamed + (None,)]
1128 1143 else:
1129 1144 pl = [(path, filenode(pcl[0], path), fl)]
1130 1145
1131 1146 for pc in pcl[1:]:
1132 1147 pl.append((path, filenode(pc, path), fl))
1133 1148
1134 1149 return [filectx(self._repo, p, fileid=n, filelog=l)
1135 1150 for p, n, l in pl if n != nullid]
1136 1151
1137 1152 def children(self):
1138 1153 return []
1139 1154
1140 1155 def size(self):
1141 1156 return os.lstat(self._repo.wjoin(self._path)).st_size
1142 1157 def date(self):
1143 1158 t, tz = self._changectx.date()
1144 1159 try:
1145 1160 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
1146 1161 except OSError, err:
1147 1162 if err.errno != errno.ENOENT:
1148 1163 raise
1149 1164 return (t, tz)
1150 1165
1151 1166 def cmp(self, fctx):
1152 1167 """compare with other file context
1153 1168
1154 1169 returns True if different than fctx.
1155 1170 """
1156 1171 # fctx should be a filectx (not a wfctx)
1157 1172 # invert comparison to reuse the same code path
1158 1173 return fctx.cmp(self)
1159 1174
1160 1175 class memctx(object):
1161 1176 """Use memctx to perform in-memory commits via localrepo.commitctx().
1162 1177
1163 1178 Revision information is supplied at initialization time while
1164 1179 related files data and is made available through a callback
1165 1180 mechanism. 'repo' is the current localrepo, 'parents' is a
1166 1181 sequence of two parent revisions identifiers (pass None for every
1167 1182 missing parent), 'text' is the commit message and 'files' lists
1168 1183 names of files touched by the revision (normalized and relative to
1169 1184 repository root).
1170 1185
1171 1186 filectxfn(repo, memctx, path) is a callable receiving the
1172 1187 repository, the current memctx object and the normalized path of
1173 1188 requested file, relative to repository root. It is fired by the
1174 1189 commit function for every file in 'files', but calls order is
1175 1190 undefined. If the file is available in the revision being
1176 1191 committed (updated or added), filectxfn returns a memfilectx
1177 1192 object. If the file was removed, filectxfn raises an
1178 1193 IOError. Moved files are represented by marking the source file
1179 1194 removed and the new file added with copy information (see
1180 1195 memfilectx).
1181 1196
1182 1197 user receives the committer name and defaults to current
1183 1198 repository username, date is the commit date in any format
1184 1199 supported by util.parsedate() and defaults to current date, extra
1185 1200 is a dictionary of metadata or is left empty.
1186 1201 """
1187 1202 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1188 1203 date=None, extra=None):
1189 1204 self._repo = repo
1190 1205 self._rev = None
1191 1206 self._node = None
1192 1207 self._text = text
1193 1208 self._date = date and util.parsedate(date) or util.makedate()
1194 1209 self._user = user
1195 1210 parents = [(p or nullid) for p in parents]
1196 1211 p1, p2 = parents
1197 1212 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1198 1213 files = sorted(set(files))
1199 1214 self._status = [files, [], [], [], []]
1200 1215 self._filectxfn = filectxfn
1201 1216
1202 1217 self._extra = extra and extra.copy() or {}
1203 1218 if self._extra.get('branch', '') == '':
1204 1219 self._extra['branch'] = 'default'
1205 1220
1206 1221 def __str__(self):
1207 1222 return str(self._parents[0]) + "+"
1208 1223
1209 1224 def __int__(self):
1210 1225 return self._rev
1211 1226
1212 1227 def __nonzero__(self):
1213 1228 return True
1214 1229
1215 1230 def __getitem__(self, key):
1216 1231 return self.filectx(key)
1217 1232
1218 1233 def p1(self):
1219 1234 return self._parents[0]
1220 1235 def p2(self):
1221 1236 return self._parents[1]
1222 1237
1223 1238 def user(self):
1224 1239 return self._user or self._repo.ui.username()
1225 1240 def date(self):
1226 1241 return self._date
1227 1242 def description(self):
1228 1243 return self._text
1229 1244 def files(self):
1230 1245 return self.modified()
1231 1246 def modified(self):
1232 1247 return self._status[0]
1233 1248 def added(self):
1234 1249 return self._status[1]
1235 1250 def removed(self):
1236 1251 return self._status[2]
1237 1252 def deleted(self):
1238 1253 return self._status[3]
1239 1254 def unknown(self):
1240 1255 return self._status[4]
1241 1256 def ignored(self):
1242 1257 return self._status[5]
1243 1258 def clean(self):
1244 1259 return self._status[6]
1245 1260 def branch(self):
1246 1261 return encoding.tolocal(self._extra['branch'])
1247 1262 def extra(self):
1248 1263 return self._extra
1249 1264 def flags(self, f):
1250 1265 return self[f].flags()
1251 1266
1252 1267 def parents(self):
1253 1268 """return contexts for each parent changeset"""
1254 1269 return self._parents
1255 1270
1256 1271 def filectx(self, path, filelog=None):
1257 1272 """get a file context from the working directory"""
1258 1273 return self._filectxfn(self._repo, self, path)
1259 1274
1260 1275 def commit(self):
1261 1276 """commit context to the repo"""
1262 1277 return self._repo.commitctx(self)
1263 1278
1264 1279 class memfilectx(object):
1265 1280 """memfilectx represents an in-memory file to commit.
1266 1281
1267 1282 See memctx for more details.
1268 1283 """
1269 1284 def __init__(self, path, data, islink=False, isexec=False, copied=None):
1270 1285 """
1271 1286 path is the normalized file path relative to repository root.
1272 1287 data is the file content as a string.
1273 1288 islink is True if the file is a symbolic link.
1274 1289 isexec is True if the file is executable.
1275 1290 copied is the source file path if current file was copied in the
1276 1291 revision being committed, or None."""
1277 1292 self._path = path
1278 1293 self._data = data
1279 1294 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1280 1295 self._copied = None
1281 1296 if copied:
1282 1297 self._copied = (copied, nullid)
1283 1298
1284 1299 def __nonzero__(self):
1285 1300 return True
1286 1301 def __str__(self):
1287 1302 return "%s@%s" % (self.path(), self._changectx)
1288 1303 def path(self):
1289 1304 return self._path
1290 1305 def data(self):
1291 1306 return self._data
1292 1307 def flags(self):
1293 1308 return self._flags
1294 1309 def isexec(self):
1295 1310 return 'x' in self._flags
1296 1311 def islink(self):
1297 1312 return 'l' in self._flags
1298 1313 def renamed(self):
1299 1314 return self._copied
@@ -1,1745 +1,1753 b''
1 1 # revset.py - revision set queries for mercurial
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import re
9 9 import parser, util, error, discovery, hbisect, phases
10 10 import node
11 11 import bookmarks as bookmarksmod
12 12 import match as matchmod
13 13 from i18n import _
14 14 import encoding
15 15
16 16 def _revancestors(repo, revs, followfirst):
17 17 """Like revlog.ancestors(), but supports followfirst."""
18 18 cut = followfirst and 1 or None
19 19 cl = repo.changelog
20 20 visit = util.deque(revs)
21 21 seen = set([node.nullrev])
22 22 while visit:
23 23 for parent in cl.parentrevs(visit.popleft())[:cut]:
24 24 if parent not in seen:
25 25 visit.append(parent)
26 26 seen.add(parent)
27 27 yield parent
28 28
29 29 def _revdescendants(repo, revs, followfirst):
30 30 """Like revlog.descendants() but supports followfirst."""
31 31 cut = followfirst and 1 or None
32 32 cl = repo.changelog
33 33 first = min(revs)
34 34 nullrev = node.nullrev
35 35 if first == nullrev:
36 36 # Are there nodes with a null first parent and a non-null
37 37 # second one? Maybe. Do we care? Probably not.
38 38 for i in cl:
39 39 yield i
40 40 return
41 41
42 42 seen = set(revs)
43 43 for i in xrange(first + 1, len(cl)):
44 44 for x in cl.parentrevs(i)[:cut]:
45 45 if x != nullrev and x in seen:
46 46 seen.add(i)
47 47 yield i
48 48 break
49 49
50 50 def _revsbetween(repo, roots, heads):
51 51 """Return all paths between roots and heads, inclusive of both endpoint
52 52 sets."""
53 53 if not roots:
54 54 return []
55 55 parentrevs = repo.changelog.parentrevs
56 56 visit = heads[:]
57 57 reachable = set()
58 58 seen = {}
59 59 minroot = min(roots)
60 60 roots = set(roots)
61 61 # open-code the post-order traversal due to the tiny size of
62 62 # sys.getrecursionlimit()
63 63 while visit:
64 64 rev = visit.pop()
65 65 if rev in roots:
66 66 reachable.add(rev)
67 67 parents = parentrevs(rev)
68 68 seen[rev] = parents
69 69 for parent in parents:
70 70 if parent >= minroot and parent not in seen:
71 71 visit.append(parent)
72 72 if not reachable:
73 73 return []
74 74 for rev in sorted(seen):
75 75 for parent in seen[rev]:
76 76 if parent in reachable:
77 77 reachable.add(rev)
78 78 return sorted(reachable)
79 79
80 80 elements = {
81 81 "(": (20, ("group", 1, ")"), ("func", 1, ")")),
82 82 "~": (18, None, ("ancestor", 18)),
83 83 "^": (18, None, ("parent", 18), ("parentpost", 18)),
84 84 "-": (5, ("negate", 19), ("minus", 5)),
85 85 "::": (17, ("dagrangepre", 17), ("dagrange", 17),
86 86 ("dagrangepost", 17)),
87 87 "..": (17, ("dagrangepre", 17), ("dagrange", 17),
88 88 ("dagrangepost", 17)),
89 89 ":": (15, ("rangepre", 15), ("range", 15), ("rangepost", 15)),
90 90 "not": (10, ("not", 10)),
91 91 "!": (10, ("not", 10)),
92 92 "and": (5, None, ("and", 5)),
93 93 "&": (5, None, ("and", 5)),
94 94 "or": (4, None, ("or", 4)),
95 95 "|": (4, None, ("or", 4)),
96 96 "+": (4, None, ("or", 4)),
97 97 ",": (2, None, ("list", 2)),
98 98 ")": (0, None, None),
99 99 "symbol": (0, ("symbol",), None),
100 100 "string": (0, ("string",), None),
101 101 "end": (0, None, None),
102 102 }
103 103
104 104 keywords = set(['and', 'or', 'not'])
105 105
106 106 def tokenize(program):
107 107 pos, l = 0, len(program)
108 108 while pos < l:
109 109 c = program[pos]
110 110 if c.isspace(): # skip inter-token whitespace
111 111 pass
112 112 elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully
113 113 yield ('::', None, pos)
114 114 pos += 1 # skip ahead
115 115 elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully
116 116 yield ('..', None, pos)
117 117 pos += 1 # skip ahead
118 118 elif c in "():,-|&+!~^": # handle simple operators
119 119 yield (c, None, pos)
120 120 elif (c in '"\'' or c == 'r' and
121 121 program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings
122 122 if c == 'r':
123 123 pos += 1
124 124 c = program[pos]
125 125 decode = lambda x: x
126 126 else:
127 127 decode = lambda x: x.decode('string-escape')
128 128 pos += 1
129 129 s = pos
130 130 while pos < l: # find closing quote
131 131 d = program[pos]
132 132 if d == '\\': # skip over escaped characters
133 133 pos += 2
134 134 continue
135 135 if d == c:
136 136 yield ('string', decode(program[s:pos]), s)
137 137 break
138 138 pos += 1
139 139 else:
140 140 raise error.ParseError(_("unterminated string"), s)
141 141 # gather up a symbol/keyword
142 142 elif c.isalnum() or c in '._' or ord(c) > 127:
143 143 s = pos
144 144 pos += 1
145 145 while pos < l: # find end of symbol
146 146 d = program[pos]
147 147 if not (d.isalnum() or d in "._/" or ord(d) > 127):
148 148 break
149 149 if d == '.' and program[pos - 1] == '.': # special case for ..
150 150 pos -= 1
151 151 break
152 152 pos += 1
153 153 sym = program[s:pos]
154 154 if sym in keywords: # operator keywords
155 155 yield (sym, None, s)
156 156 else:
157 157 yield ('symbol', sym, s)
158 158 pos -= 1
159 159 else:
160 160 raise error.ParseError(_("syntax error"), pos)
161 161 pos += 1
162 162 yield ('end', None, pos)
163 163
164 164 # helpers
165 165
166 166 def getstring(x, err):
167 167 if x and (x[0] == 'string' or x[0] == 'symbol'):
168 168 return x[1]
169 169 raise error.ParseError(err)
170 170
171 171 def getlist(x):
172 172 if not x:
173 173 return []
174 174 if x[0] == 'list':
175 175 return getlist(x[1]) + [x[2]]
176 176 return [x]
177 177
178 178 def getargs(x, min, max, err):
179 179 l = getlist(x)
180 180 if len(l) < min or (max >= 0 and len(l) > max):
181 181 raise error.ParseError(err)
182 182 return l
183 183
184 184 def getset(repo, subset, x):
185 185 if not x:
186 186 raise error.ParseError(_("missing argument"))
187 187 return methods[x[0]](repo, subset, *x[1:])
188 188
189 189 def _getrevsource(repo, r):
190 190 extra = repo[r].extra()
191 191 for label in ('source', 'transplant_source', 'rebase_source'):
192 192 if label in extra:
193 193 try:
194 194 return repo[extra[label]].rev()
195 195 except error.RepoLookupError:
196 196 pass
197 197 return None
198 198
199 199 # operator methods
200 200
201 201 def stringset(repo, subset, x):
202 202 x = repo[x].rev()
203 203 if x == -1 and len(subset) == len(repo):
204 204 return [-1]
205 205 if len(subset) == len(repo) or x in subset:
206 206 return [x]
207 207 return []
208 208
209 209 def symbolset(repo, subset, x):
210 210 if x in symbols:
211 211 raise error.ParseError(_("can't use %s here") % x)
212 212 return stringset(repo, subset, x)
213 213
214 214 def rangeset(repo, subset, x, y):
215 215 m = getset(repo, subset, x)
216 216 if not m:
217 217 m = getset(repo, range(len(repo)), x)
218 218
219 219 n = getset(repo, subset, y)
220 220 if not n:
221 221 n = getset(repo, range(len(repo)), y)
222 222
223 223 if not m or not n:
224 224 return []
225 225 m, n = m[0], n[-1]
226 226
227 227 if m < n:
228 228 r = range(m, n + 1)
229 229 else:
230 230 r = range(m, n - 1, -1)
231 231 s = set(subset)
232 232 return [x for x in r if x in s]
233 233
234 234 def dagrange(repo, subset, x, y):
235 235 if subset:
236 236 r = range(len(repo))
237 237 xs = _revsbetween(repo, getset(repo, r, x), getset(repo, r, y))
238 238 s = set(subset)
239 239 return [r for r in xs if r in s]
240 240 return []
241 241
242 242 def andset(repo, subset, x, y):
243 243 return getset(repo, getset(repo, subset, x), y)
244 244
245 245 def orset(repo, subset, x, y):
246 246 xl = getset(repo, subset, x)
247 247 s = set(xl)
248 248 yl = getset(repo, [r for r in subset if r not in s], y)
249 249 return xl + yl
250 250
251 251 def notset(repo, subset, x):
252 252 s = set(getset(repo, subset, x))
253 253 return [r for r in subset if r not in s]
254 254
255 255 def listset(repo, subset, a, b):
256 256 raise error.ParseError(_("can't use a list in this context"))
257 257
258 258 def func(repo, subset, a, b):
259 259 if a[0] == 'symbol' and a[1] in symbols:
260 260 return symbols[a[1]](repo, subset, b)
261 261 raise error.ParseError(_("not a function: %s") % a[1])
262 262
263 263 # functions
264 264
265 265 def adds(repo, subset, x):
266 266 """``adds(pattern)``
267 267 Changesets that add a file matching pattern.
268 268 """
269 269 # i18n: "adds" is a keyword
270 270 pat = getstring(x, _("adds requires a pattern"))
271 271 return checkstatus(repo, subset, pat, 1)
272 272
273 273 def ancestor(repo, subset, x):
274 274 """``ancestor(single, single)``
275 275 Greatest common ancestor of the two changesets.
276 276 """
277 277 # i18n: "ancestor" is a keyword
278 278 l = getargs(x, 2, 2, _("ancestor requires two arguments"))
279 279 r = range(len(repo))
280 280 a = getset(repo, r, l[0])
281 281 b = getset(repo, r, l[1])
282 282 if len(a) != 1 or len(b) != 1:
283 283 # i18n: "ancestor" is a keyword
284 284 raise error.ParseError(_("ancestor arguments must be single revisions"))
285 285 an = [repo[a[0]].ancestor(repo[b[0]]).rev()]
286 286
287 287 return [r for r in an if r in subset]
288 288
289 289 def _ancestors(repo, subset, x, followfirst=False):
290 290 args = getset(repo, range(len(repo)), x)
291 291 if not args:
292 292 return []
293 293 s = set(_revancestors(repo, args, followfirst)) | set(args)
294 294 return [r for r in subset if r in s]
295 295
296 296 def ancestors(repo, subset, x):
297 297 """``ancestors(set)``
298 298 Changesets that are ancestors of a changeset in set.
299 299 """
300 300 return _ancestors(repo, subset, x)
301 301
302 302 def _firstancestors(repo, subset, x):
303 303 # ``_firstancestors(set)``
304 304 # Like ``ancestors(set)`` but follows only the first parents.
305 305 return _ancestors(repo, subset, x, followfirst=True)
306 306
307 307 def ancestorspec(repo, subset, x, n):
308 308 """``set~n``
309 309 Changesets that are the Nth ancestor (first parents only) of a changeset
310 310 in set.
311 311 """
312 312 try:
313 313 n = int(n[1])
314 314 except (TypeError, ValueError):
315 315 raise error.ParseError(_("~ expects a number"))
316 316 ps = set()
317 317 cl = repo.changelog
318 318 for r in getset(repo, subset, x):
319 319 for i in range(n):
320 320 r = cl.parentrevs(r)[0]
321 321 ps.add(r)
322 322 return [r for r in subset if r in ps]
323 323
324 324 def author(repo, subset, x):
325 325 """``author(string)``
326 326 Alias for ``user(string)``.
327 327 """
328 328 # i18n: "author" is a keyword
329 329 n = encoding.lower(getstring(x, _("author requires a string")))
330 330 kind, pattern, matcher = _substringmatcher(n)
331 331 return [r for r in subset if matcher(encoding.lower(repo[r].user()))]
332 332
333 333 def bisect(repo, subset, x):
334 334 """``bisect(string)``
335 335 Changesets marked in the specified bisect status:
336 336
337 337 - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip
338 338 - ``goods``, ``bads`` : csets topologicaly good/bad
339 339 - ``range`` : csets taking part in the bisection
340 340 - ``pruned`` : csets that are goods, bads or skipped
341 341 - ``untested`` : csets whose fate is yet unknown
342 342 - ``ignored`` : csets ignored due to DAG topology
343 343 - ``current`` : the cset currently being bisected
344 344 """
345 345 status = getstring(x, _("bisect requires a string")).lower()
346 346 state = set(hbisect.get(repo, status))
347 347 return [r for r in subset if r in state]
348 348
349 349 # Backward-compatibility
350 350 # - no help entry so that we do not advertise it any more
351 351 def bisected(repo, subset, x):
352 352 return bisect(repo, subset, x)
353 353
354 354 def bookmark(repo, subset, x):
355 355 """``bookmark([name])``
356 356 The named bookmark or all bookmarks.
357 357
358 358 If `name` starts with `re:`, the remainder of the name is treated as
359 359 a regular expression. To match a bookmark that actually starts with `re:`,
360 360 use the prefix `literal:`.
361 361 """
362 362 # i18n: "bookmark" is a keyword
363 363 args = getargs(x, 0, 1, _('bookmark takes one or no arguments'))
364 364 if args:
365 365 bm = getstring(args[0],
366 366 # i18n: "bookmark" is a keyword
367 367 _('the argument to bookmark must be a string'))
368 368 kind, pattern, matcher = _stringmatcher(bm)
369 369 if kind == 'literal':
370 370 bmrev = bookmarksmod.listbookmarks(repo).get(bm, None)
371 371 if not bmrev:
372 372 raise util.Abort(_("bookmark '%s' does not exist") % bm)
373 373 bmrev = repo[bmrev].rev()
374 374 return [r for r in subset if r == bmrev]
375 375 else:
376 376 matchrevs = set()
377 377 for name, bmrev in bookmarksmod.listbookmarks(repo).iteritems():
378 378 if matcher(name):
379 379 matchrevs.add(bmrev)
380 380 if not matchrevs:
381 381 raise util.Abort(_("no bookmarks exist that match '%s'")
382 382 % pattern)
383 383 bmrevs = set()
384 384 for bmrev in matchrevs:
385 385 bmrevs.add(repo[bmrev].rev())
386 386 return [r for r in subset if r in bmrevs]
387 387
388 388 bms = set([repo[r].rev()
389 389 for r in bookmarksmod.listbookmarks(repo).values()])
390 390 return [r for r in subset if r in bms]
391 391
392 392 def branch(repo, subset, x):
393 393 """``branch(string or set)``
394 394 All changesets belonging to the given branch or the branches of the given
395 395 changesets.
396 396
397 397 If `string` starts with `re:`, the remainder of the name is treated as
398 398 a regular expression. To match a branch that actually starts with `re:`,
399 399 use the prefix `literal:`.
400 400 """
401 401 try:
402 402 b = getstring(x, '')
403 403 except error.ParseError:
404 404 # not a string, but another revspec, e.g. tip()
405 405 pass
406 406 else:
407 407 kind, pattern, matcher = _stringmatcher(b)
408 408 if kind == 'literal':
409 409 # note: falls through to the revspec case if no branch with
410 410 # this name exists
411 411 if pattern in repo.branchmap():
412 412 return [r for r in subset if matcher(repo[r].branch())]
413 413 else:
414 414 return [r for r in subset if matcher(repo[r].branch())]
415 415
416 416 s = getset(repo, range(len(repo)), x)
417 417 b = set()
418 418 for r in s:
419 419 b.add(repo[r].branch())
420 420 s = set(s)
421 421 return [r for r in subset if r in s or repo[r].branch() in b]
422 422
423 423 def checkstatus(repo, subset, pat, field):
424 424 m = None
425 425 s = []
426 426 hasset = matchmod.patkind(pat) == 'set'
427 427 fname = None
428 428 for r in subset:
429 429 c = repo[r]
430 430 if not m or hasset:
431 431 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
432 432 if not m.anypats() and len(m.files()) == 1:
433 433 fname = m.files()[0]
434 434 if fname is not None:
435 435 if fname not in c.files():
436 436 continue
437 437 else:
438 438 for f in c.files():
439 439 if m(f):
440 440 break
441 441 else:
442 442 continue
443 443 files = repo.status(c.p1().node(), c.node())[field]
444 444 if fname is not None:
445 445 if fname in files:
446 446 s.append(r)
447 447 else:
448 448 for f in files:
449 449 if m(f):
450 450 s.append(r)
451 451 break
452 452 return s
453 453
454 454 def _children(repo, narrow, parentset):
455 455 cs = set()
456 456 pr = repo.changelog.parentrevs
457 457 for r in narrow:
458 458 for p in pr(r):
459 459 if p in parentset:
460 460 cs.add(r)
461 461 return cs
462 462
463 463 def children(repo, subset, x):
464 464 """``children(set)``
465 465 Child changesets of changesets in set.
466 466 """
467 467 s = set(getset(repo, range(len(repo)), x))
468 468 cs = _children(repo, subset, s)
469 469 return [r for r in subset if r in cs]
470 470
471 471 def closed(repo, subset, x):
472 472 """``closed()``
473 473 Changeset is closed.
474 474 """
475 475 # i18n: "closed" is a keyword
476 476 getargs(x, 0, 0, _("closed takes no arguments"))
477 477 return [r for r in subset if repo[r].closesbranch()]
478 478
479 479 def contains(repo, subset, x):
480 480 """``contains(pattern)``
481 481 Revision contains a file matching pattern. See :hg:`help patterns`
482 482 for information about file patterns.
483 483 """
484 484 # i18n: "contains" is a keyword
485 485 pat = getstring(x, _("contains requires a pattern"))
486 486 m = None
487 487 s = []
488 488 if not matchmod.patkind(pat):
489 489 for r in subset:
490 490 if pat in repo[r]:
491 491 s.append(r)
492 492 else:
493 493 for r in subset:
494 494 c = repo[r]
495 495 if not m or matchmod.patkind(pat) == 'set':
496 496 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
497 497 for f in c.manifest():
498 498 if m(f):
499 499 s.append(r)
500 500 break
501 501 return s
502 502
503 503 def converted(repo, subset, x):
504 504 """``converted([id])``
505 505 Changesets converted from the given identifier in the old repository if
506 506 present, or all converted changesets if no identifier is specified.
507 507 """
508 508
509 509 # There is exactly no chance of resolving the revision, so do a simple
510 510 # string compare and hope for the best
511 511
512 512 # i18n: "converted" is a keyword
513 513 rev = None
514 514 l = getargs(x, 0, 1, _('converted takes one or no arguments'))
515 515 if l:
516 516 rev = getstring(l[0], _('converted requires a revision'))
517 517
518 518 def _matchvalue(r):
519 519 source = repo[r].extra().get('convert_revision', None)
520 520 return source is not None and (rev is None or source.startswith(rev))
521 521
522 522 return [r for r in subset if _matchvalue(r)]
523 523
524 524 def date(repo, subset, x):
525 525 """``date(interval)``
526 526 Changesets within the interval, see :hg:`help dates`.
527 527 """
528 528 # i18n: "date" is a keyword
529 529 ds = getstring(x, _("date requires a string"))
530 530 dm = util.matchdate(ds)
531 531 return [r for r in subset if dm(repo[r].date()[0])]
532 532
533 533 def desc(repo, subset, x):
534 534 """``desc(string)``
535 535 Search commit message for string. The match is case-insensitive.
536 536 """
537 537 # i18n: "desc" is a keyword
538 538 ds = encoding.lower(getstring(x, _("desc requires a string")))
539 539 l = []
540 540 for r in subset:
541 541 c = repo[r]
542 542 if ds in encoding.lower(c.description()):
543 543 l.append(r)
544 544 return l
545 545
546 546 def _descendants(repo, subset, x, followfirst=False):
547 547 args = getset(repo, range(len(repo)), x)
548 548 if not args:
549 549 return []
550 550 s = set(_revdescendants(repo, args, followfirst)) | set(args)
551 551 return [r for r in subset if r in s]
552 552
553 553 def descendants(repo, subset, x):
554 554 """``descendants(set)``
555 555 Changesets which are descendants of changesets in set.
556 556 """
557 557 return _descendants(repo, subset, x)
558 558
559 559 def _firstdescendants(repo, subset, x):
560 560 # ``_firstdescendants(set)``
561 561 # Like ``descendants(set)`` but follows only the first parents.
562 562 return _descendants(repo, subset, x, followfirst=True)
563 563
564 564 def draft(repo, subset, x):
565 565 """``draft()``
566 566 Changeset in draft phase."""
567 567 getargs(x, 0, 0, _("draft takes no arguments"))
568 568 pc = repo._phasecache
569 569 return [r for r in subset if pc.phase(repo, r) == phases.draft]
570 570
571 def extinct(repo, subset, x):
572 """``extinct()``
573 obsolete changeset with obsolete descendant only."""
574 getargs(x, 0, 0, _("obsolete takes no arguments"))
575 extinctset = set(repo.revs('(obsolete()::) - (::(not obsolete()))'))
576 return [r for r in subset if r in extinctset]
577
571 578 def extra(repo, subset, x):
572 579 """``extra(label, [value])``
573 580 Changesets with the given label in the extra metadata, with the given
574 581 optional value.
575 582
576 583 If `value` starts with `re:`, the remainder of the value is treated as
577 584 a regular expression. To match a value that actually starts with `re:`,
578 585 use the prefix `literal:`.
579 586 """
580 587
581 588 l = getargs(x, 1, 2, _('extra takes at least 1 and at most 2 arguments'))
582 589 label = getstring(l[0], _('first argument to extra must be a string'))
583 590 value = None
584 591
585 592 if len(l) > 1:
586 593 value = getstring(l[1], _('second argument to extra must be a string'))
587 594 kind, value, matcher = _stringmatcher(value)
588 595
589 596 def _matchvalue(r):
590 597 extra = repo[r].extra()
591 598 return label in extra and (value is None or matcher(extra[label]))
592 599
593 600 return [r for r in subset if _matchvalue(r)]
594 601
595 602 def filelog(repo, subset, x):
596 603 """``filelog(pattern)``
597 604 Changesets connected to the specified filelog.
598 605 """
599 606
600 607 pat = getstring(x, _("filelog requires a pattern"))
601 608 m = matchmod.match(repo.root, repo.getcwd(), [pat], default='relpath',
602 609 ctx=repo[None])
603 610 s = set()
604 611
605 612 if not matchmod.patkind(pat):
606 613 for f in m.files():
607 614 fl = repo.file(f)
608 615 for fr in fl:
609 616 s.add(fl.linkrev(fr))
610 617 else:
611 618 for f in repo[None]:
612 619 if m(f):
613 620 fl = repo.file(f)
614 621 for fr in fl:
615 622 s.add(fl.linkrev(fr))
616 623
617 624 return [r for r in subset if r in s]
618 625
619 626 def first(repo, subset, x):
620 627 """``first(set, [n])``
621 628 An alias for limit().
622 629 """
623 630 return limit(repo, subset, x)
624 631
625 632 def _follow(repo, subset, x, name, followfirst=False):
626 633 l = getargs(x, 0, 1, _("%s takes no arguments or a filename") % name)
627 634 c = repo['.']
628 635 if l:
629 636 x = getstring(l[0], _("%s expected a filename") % name)
630 637 if x in c:
631 638 cx = c[x]
632 639 s = set(ctx.rev() for ctx in cx.ancestors(followfirst=followfirst))
633 640 # include the revision responsible for the most recent version
634 641 s.add(cx.linkrev())
635 642 else:
636 643 return []
637 644 else:
638 645 s = set(_revancestors(repo, [c.rev()], followfirst)) | set([c.rev()])
639 646
640 647 return [r for r in subset if r in s]
641 648
642 649 def follow(repo, subset, x):
643 650 """``follow([file])``
644 651 An alias for ``::.`` (ancestors of the working copy's first parent).
645 652 If a filename is specified, the history of the given file is followed,
646 653 including copies.
647 654 """
648 655 return _follow(repo, subset, x, 'follow')
649 656
650 657 def _followfirst(repo, subset, x):
651 658 # ``followfirst([file])``
652 659 # Like ``follow([file])`` but follows only the first parent of
653 660 # every revision or file revision.
654 661 return _follow(repo, subset, x, '_followfirst', followfirst=True)
655 662
656 663 def getall(repo, subset, x):
657 664 """``all()``
658 665 All changesets, the same as ``0:tip``.
659 666 """
660 667 # i18n: "all" is a keyword
661 668 getargs(x, 0, 0, _("all takes no arguments"))
662 669 return subset
663 670
664 671 def grep(repo, subset, x):
665 672 """``grep(regex)``
666 673 Like ``keyword(string)`` but accepts a regex. Use ``grep(r'...')``
667 674 to ensure special escape characters are handled correctly. Unlike
668 675 ``keyword(string)``, the match is case-sensitive.
669 676 """
670 677 try:
671 678 # i18n: "grep" is a keyword
672 679 gr = re.compile(getstring(x, _("grep requires a string")))
673 680 except re.error, e:
674 681 raise error.ParseError(_('invalid match pattern: %s') % e)
675 682 l = []
676 683 for r in subset:
677 684 c = repo[r]
678 685 for e in c.files() + [c.user(), c.description()]:
679 686 if gr.search(e):
680 687 l.append(r)
681 688 break
682 689 return l
683 690
684 691 def _matchfiles(repo, subset, x):
685 692 # _matchfiles takes a revset list of prefixed arguments:
686 693 #
687 694 # [p:foo, i:bar, x:baz]
688 695 #
689 696 # builds a match object from them and filters subset. Allowed
690 697 # prefixes are 'p:' for regular patterns, 'i:' for include
691 698 # patterns and 'x:' for exclude patterns. Use 'r:' prefix to pass
692 699 # a revision identifier, or the empty string to reference the
693 700 # working directory, from which the match object is
694 701 # initialized. Use 'd:' to set the default matching mode, default
695 702 # to 'glob'. At most one 'r:' and 'd:' argument can be passed.
696 703
697 704 # i18n: "_matchfiles" is a keyword
698 705 l = getargs(x, 1, -1, _("_matchfiles requires at least one argument"))
699 706 pats, inc, exc = [], [], []
700 707 hasset = False
701 708 rev, default = None, None
702 709 for arg in l:
703 710 s = getstring(arg, _("_matchfiles requires string arguments"))
704 711 prefix, value = s[:2], s[2:]
705 712 if prefix == 'p:':
706 713 pats.append(value)
707 714 elif prefix == 'i:':
708 715 inc.append(value)
709 716 elif prefix == 'x:':
710 717 exc.append(value)
711 718 elif prefix == 'r:':
712 719 if rev is not None:
713 720 raise error.ParseError(_('_matchfiles expected at most one '
714 721 'revision'))
715 722 rev = value
716 723 elif prefix == 'd:':
717 724 if default is not None:
718 725 raise error.ParseError(_('_matchfiles expected at most one '
719 726 'default mode'))
720 727 default = value
721 728 else:
722 729 raise error.ParseError(_('invalid _matchfiles prefix: %s') % prefix)
723 730 if not hasset and matchmod.patkind(value) == 'set':
724 731 hasset = True
725 732 if not default:
726 733 default = 'glob'
727 734 m = None
728 735 s = []
729 736 for r in subset:
730 737 c = repo[r]
731 738 if not m or (hasset and rev is None):
732 739 ctx = c
733 740 if rev is not None:
734 741 ctx = repo[rev or None]
735 742 m = matchmod.match(repo.root, repo.getcwd(), pats, include=inc,
736 743 exclude=exc, ctx=ctx, default=default)
737 744 for f in c.files():
738 745 if m(f):
739 746 s.append(r)
740 747 break
741 748 return s
742 749
743 750 def hasfile(repo, subset, x):
744 751 """``file(pattern)``
745 752 Changesets affecting files matched by pattern.
746 753 """
747 754 # i18n: "file" is a keyword
748 755 pat = getstring(x, _("file requires a pattern"))
749 756 return _matchfiles(repo, subset, ('string', 'p:' + pat))
750 757
751 758 def head(repo, subset, x):
752 759 """``head()``
753 760 Changeset is a named branch head.
754 761 """
755 762 # i18n: "head" is a keyword
756 763 getargs(x, 0, 0, _("head takes no arguments"))
757 764 hs = set()
758 765 for b, ls in repo.branchmap().iteritems():
759 766 hs.update(repo[h].rev() for h in ls)
760 767 return [r for r in subset if r in hs]
761 768
762 769 def heads(repo, subset, x):
763 770 """``heads(set)``
764 771 Members of set with no children in set.
765 772 """
766 773 s = getset(repo, subset, x)
767 774 ps = set(parents(repo, subset, x))
768 775 return [r for r in s if r not in ps]
769 776
770 777 def keyword(repo, subset, x):
771 778 """``keyword(string)``
772 779 Search commit message, user name, and names of changed files for
773 780 string. The match is case-insensitive.
774 781 """
775 782 # i18n: "keyword" is a keyword
776 783 kw = encoding.lower(getstring(x, _("keyword requires a string")))
777 784 l = []
778 785 for r in subset:
779 786 c = repo[r]
780 787 t = " ".join(c.files() + [c.user(), c.description()])
781 788 if kw in encoding.lower(t):
782 789 l.append(r)
783 790 return l
784 791
785 792 def limit(repo, subset, x):
786 793 """``limit(set, [n])``
787 794 First n members of set, defaulting to 1.
788 795 """
789 796 # i18n: "limit" is a keyword
790 797 l = getargs(x, 1, 2, _("limit requires one or two arguments"))
791 798 try:
792 799 lim = 1
793 800 if len(l) == 2:
794 801 # i18n: "limit" is a keyword
795 802 lim = int(getstring(l[1], _("limit requires a number")))
796 803 except (TypeError, ValueError):
797 804 # i18n: "limit" is a keyword
798 805 raise error.ParseError(_("limit expects a number"))
799 806 ss = set(subset)
800 807 os = getset(repo, range(len(repo)), l[0])[:lim]
801 808 return [r for r in os if r in ss]
802 809
803 810 def last(repo, subset, x):
804 811 """``last(set, [n])``
805 812 Last n members of set, defaulting to 1.
806 813 """
807 814 # i18n: "last" is a keyword
808 815 l = getargs(x, 1, 2, _("last requires one or two arguments"))
809 816 try:
810 817 lim = 1
811 818 if len(l) == 2:
812 819 # i18n: "last" is a keyword
813 820 lim = int(getstring(l[1], _("last requires a number")))
814 821 except (TypeError, ValueError):
815 822 # i18n: "last" is a keyword
816 823 raise error.ParseError(_("last expects a number"))
817 824 ss = set(subset)
818 825 os = getset(repo, range(len(repo)), l[0])[-lim:]
819 826 return [r for r in os if r in ss]
820 827
821 828 def maxrev(repo, subset, x):
822 829 """``max(set)``
823 830 Changeset with highest revision number in set.
824 831 """
825 832 os = getset(repo, range(len(repo)), x)
826 833 if os:
827 834 m = max(os)
828 835 if m in subset:
829 836 return [m]
830 837 return []
831 838
832 839 def merge(repo, subset, x):
833 840 """``merge()``
834 841 Changeset is a merge changeset.
835 842 """
836 843 # i18n: "merge" is a keyword
837 844 getargs(x, 0, 0, _("merge takes no arguments"))
838 845 cl = repo.changelog
839 846 return [r for r in subset if cl.parentrevs(r)[1] != -1]
840 847
841 848 def minrev(repo, subset, x):
842 849 """``min(set)``
843 850 Changeset with lowest revision number in set.
844 851 """
845 852 os = getset(repo, range(len(repo)), x)
846 853 if os:
847 854 m = min(os)
848 855 if m in subset:
849 856 return [m]
850 857 return []
851 858
852 859 def modifies(repo, subset, x):
853 860 """``modifies(pattern)``
854 861 Changesets modifying files matched by pattern.
855 862 """
856 863 # i18n: "modifies" is a keyword
857 864 pat = getstring(x, _("modifies requires a pattern"))
858 865 return checkstatus(repo, subset, pat, 0)
859 866
860 867 def node_(repo, subset, x):
861 868 """``id(string)``
862 869 Revision non-ambiguously specified by the given hex string prefix.
863 870 """
864 871 # i18n: "id" is a keyword
865 872 l = getargs(x, 1, 1, _("id requires one argument"))
866 873 # i18n: "id" is a keyword
867 874 n = getstring(l[0], _("id requires a string"))
868 875 if len(n) == 40:
869 876 rn = repo[n].rev()
870 877 else:
871 878 rn = None
872 879 pm = repo.changelog._partialmatch(n)
873 880 if pm is not None:
874 881 rn = repo.changelog.rev(pm)
875 882
876 883 return [r for r in subset if r == rn]
877 884
878 885 def obsolete(repo, subset, x):
879 886 """``obsolete()``
880 887 Mutable changeset with a newer version."""
881 888 getargs(x, 0, 0, _("obsolete takes no arguments"))
882 889 return [r for r in subset if repo[r].obsolete()]
883 890
884 891 def outgoing(repo, subset, x):
885 892 """``outgoing([path])``
886 893 Changesets not found in the specified destination repository, or the
887 894 default push location.
888 895 """
889 896 import hg # avoid start-up nasties
890 897 # i18n: "outgoing" is a keyword
891 898 l = getargs(x, 0, 1, _("outgoing takes one or no arguments"))
892 899 # i18n: "outgoing" is a keyword
893 900 dest = l and getstring(l[0], _("outgoing requires a repository path")) or ''
894 901 dest = repo.ui.expandpath(dest or 'default-push', dest or 'default')
895 902 dest, branches = hg.parseurl(dest)
896 903 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
897 904 if revs:
898 905 revs = [repo.lookup(rev) for rev in revs]
899 906 other = hg.peer(repo, {}, dest)
900 907 repo.ui.pushbuffer()
901 908 outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs)
902 909 repo.ui.popbuffer()
903 910 cl = repo.changelog
904 911 o = set([cl.rev(r) for r in outgoing.missing])
905 912 return [r for r in subset if r in o]
906 913
907 914 def p1(repo, subset, x):
908 915 """``p1([set])``
909 916 First parent of changesets in set, or the working directory.
910 917 """
911 918 if x is None:
912 919 p = repo[x].p1().rev()
913 920 return [r for r in subset if r == p]
914 921
915 922 ps = set()
916 923 cl = repo.changelog
917 924 for r in getset(repo, range(len(repo)), x):
918 925 ps.add(cl.parentrevs(r)[0])
919 926 return [r for r in subset if r in ps]
920 927
921 928 def p2(repo, subset, x):
922 929 """``p2([set])``
923 930 Second parent of changesets in set, or the working directory.
924 931 """
925 932 if x is None:
926 933 ps = repo[x].parents()
927 934 try:
928 935 p = ps[1].rev()
929 936 return [r for r in subset if r == p]
930 937 except IndexError:
931 938 return []
932 939
933 940 ps = set()
934 941 cl = repo.changelog
935 942 for r in getset(repo, range(len(repo)), x):
936 943 ps.add(cl.parentrevs(r)[1])
937 944 return [r for r in subset if r in ps]
938 945
939 946 def parents(repo, subset, x):
940 947 """``parents([set])``
941 948 The set of all parents for all changesets in set, or the working directory.
942 949 """
943 950 if x is None:
944 951 ps = tuple(p.rev() for p in repo[x].parents())
945 952 return [r for r in subset if r in ps]
946 953
947 954 ps = set()
948 955 cl = repo.changelog
949 956 for r in getset(repo, range(len(repo)), x):
950 957 ps.update(cl.parentrevs(r))
951 958 return [r for r in subset if r in ps]
952 959
953 960 def parentspec(repo, subset, x, n):
954 961 """``set^0``
955 962 The set.
956 963 ``set^1`` (or ``set^``), ``set^2``
957 964 First or second parent, respectively, of all changesets in set.
958 965 """
959 966 try:
960 967 n = int(n[1])
961 968 if n not in (0, 1, 2):
962 969 raise ValueError
963 970 except (TypeError, ValueError):
964 971 raise error.ParseError(_("^ expects a number 0, 1, or 2"))
965 972 ps = set()
966 973 cl = repo.changelog
967 974 for r in getset(repo, subset, x):
968 975 if n == 0:
969 976 ps.add(r)
970 977 elif n == 1:
971 978 ps.add(cl.parentrevs(r)[0])
972 979 elif n == 2:
973 980 parents = cl.parentrevs(r)
974 981 if len(parents) > 1:
975 982 ps.add(parents[1])
976 983 return [r for r in subset if r in ps]
977 984
978 985 def present(repo, subset, x):
979 986 """``present(set)``
980 987 An empty set, if any revision in set isn't found; otherwise,
981 988 all revisions in set.
982 989
983 990 If any of specified revisions is not present in the local repository,
984 991 the query is normally aborted. But this predicate allows the query
985 992 to continue even in such cases.
986 993 """
987 994 try:
988 995 return getset(repo, subset, x)
989 996 except error.RepoLookupError:
990 997 return []
991 998
992 999 def public(repo, subset, x):
993 1000 """``public()``
994 1001 Changeset in public phase."""
995 1002 getargs(x, 0, 0, _("public takes no arguments"))
996 1003 pc = repo._phasecache
997 1004 return [r for r in subset if pc.phase(repo, r) == phases.public]
998 1005
999 1006 def remote(repo, subset, x):
1000 1007 """``remote([id [,path]])``
1001 1008 Local revision that corresponds to the given identifier in a
1002 1009 remote repository, if present. Here, the '.' identifier is a
1003 1010 synonym for the current local branch.
1004 1011 """
1005 1012
1006 1013 import hg # avoid start-up nasties
1007 1014 # i18n: "remote" is a keyword
1008 1015 l = getargs(x, 0, 2, _("remote takes one, two or no arguments"))
1009 1016
1010 1017 q = '.'
1011 1018 if len(l) > 0:
1012 1019 # i18n: "remote" is a keyword
1013 1020 q = getstring(l[0], _("remote requires a string id"))
1014 1021 if q == '.':
1015 1022 q = repo['.'].branch()
1016 1023
1017 1024 dest = ''
1018 1025 if len(l) > 1:
1019 1026 # i18n: "remote" is a keyword
1020 1027 dest = getstring(l[1], _("remote requires a repository path"))
1021 1028 dest = repo.ui.expandpath(dest or 'default')
1022 1029 dest, branches = hg.parseurl(dest)
1023 1030 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
1024 1031 if revs:
1025 1032 revs = [repo.lookup(rev) for rev in revs]
1026 1033 other = hg.peer(repo, {}, dest)
1027 1034 n = other.lookup(q)
1028 1035 if n in repo:
1029 1036 r = repo[n].rev()
1030 1037 if r in subset:
1031 1038 return [r]
1032 1039 return []
1033 1040
1034 1041 def removes(repo, subset, x):
1035 1042 """``removes(pattern)``
1036 1043 Changesets which remove files matching pattern.
1037 1044 """
1038 1045 # i18n: "removes" is a keyword
1039 1046 pat = getstring(x, _("removes requires a pattern"))
1040 1047 return checkstatus(repo, subset, pat, 2)
1041 1048
1042 1049 def rev(repo, subset, x):
1043 1050 """``rev(number)``
1044 1051 Revision with the given numeric identifier.
1045 1052 """
1046 1053 # i18n: "rev" is a keyword
1047 1054 l = getargs(x, 1, 1, _("rev requires one argument"))
1048 1055 try:
1049 1056 # i18n: "rev" is a keyword
1050 1057 l = int(getstring(l[0], _("rev requires a number")))
1051 1058 except (TypeError, ValueError):
1052 1059 # i18n: "rev" is a keyword
1053 1060 raise error.ParseError(_("rev expects a number"))
1054 1061 return [r for r in subset if r == l]
1055 1062
1056 1063 def matching(repo, subset, x):
1057 1064 """``matching(revision [, field])``
1058 1065 Changesets in which a given set of fields match the set of fields in the
1059 1066 selected revision or set.
1060 1067
1061 1068 To match more than one field pass the list of fields to match separated
1062 1069 by spaces (e.g. ``author description``).
1063 1070
1064 1071 Valid fields are most regular revision fields and some special fields.
1065 1072
1066 1073 Regular revision fields are ``description``, ``author``, ``branch``,
1067 1074 ``date``, ``files``, ``phase``, ``parents``, ``substate``, ``user``
1068 1075 and ``diff``.
1069 1076 Note that ``author`` and ``user`` are synonyms. ``diff`` refers to the
1070 1077 contents of the revision. Two revisions matching their ``diff`` will
1071 1078 also match their ``files``.
1072 1079
1073 1080 Special fields are ``summary`` and ``metadata``:
1074 1081 ``summary`` matches the first line of the description.
1075 1082 ``metadata`` is equivalent to matching ``description user date``
1076 1083 (i.e. it matches the main metadata fields).
1077 1084
1078 1085 ``metadata`` is the default field which is used when no fields are
1079 1086 specified. You can match more than one field at a time.
1080 1087 """
1081 1088 l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments"))
1082 1089
1083 1090 revs = getset(repo, xrange(len(repo)), l[0])
1084 1091
1085 1092 fieldlist = ['metadata']
1086 1093 if len(l) > 1:
1087 1094 fieldlist = getstring(l[1],
1088 1095 _("matching requires a string "
1089 1096 "as its second argument")).split()
1090 1097
1091 1098 # Make sure that there are no repeated fields,
1092 1099 # expand the 'special' 'metadata' field type
1093 1100 # and check the 'files' whenever we check the 'diff'
1094 1101 fields = []
1095 1102 for field in fieldlist:
1096 1103 if field == 'metadata':
1097 1104 fields += ['user', 'description', 'date']
1098 1105 elif field == 'diff':
1099 1106 # a revision matching the diff must also match the files
1100 1107 # since matching the diff is very costly, make sure to
1101 1108 # also match the files first
1102 1109 fields += ['files', 'diff']
1103 1110 else:
1104 1111 if field == 'author':
1105 1112 field = 'user'
1106 1113 fields.append(field)
1107 1114 fields = set(fields)
1108 1115 if 'summary' in fields and 'description' in fields:
1109 1116 # If a revision matches its description it also matches its summary
1110 1117 fields.discard('summary')
1111 1118
1112 1119 # We may want to match more than one field
1113 1120 # Not all fields take the same amount of time to be matched
1114 1121 # Sort the selected fields in order of increasing matching cost
1115 1122 fieldorder = ['phase', 'parents', 'user', 'date', 'branch', 'summary',
1116 1123 'files', 'description', 'substate', 'diff']
1117 1124 def fieldkeyfunc(f):
1118 1125 try:
1119 1126 return fieldorder.index(f)
1120 1127 except ValueError:
1121 1128 # assume an unknown field is very costly
1122 1129 return len(fieldorder)
1123 1130 fields = list(fields)
1124 1131 fields.sort(key=fieldkeyfunc)
1125 1132
1126 1133 # Each field will be matched with its own "getfield" function
1127 1134 # which will be added to the getfieldfuncs array of functions
1128 1135 getfieldfuncs = []
1129 1136 _funcs = {
1130 1137 'user': lambda r: repo[r].user(),
1131 1138 'branch': lambda r: repo[r].branch(),
1132 1139 'date': lambda r: repo[r].date(),
1133 1140 'description': lambda r: repo[r].description(),
1134 1141 'files': lambda r: repo[r].files(),
1135 1142 'parents': lambda r: repo[r].parents(),
1136 1143 'phase': lambda r: repo[r].phase(),
1137 1144 'substate': lambda r: repo[r].substate,
1138 1145 'summary': lambda r: repo[r].description().splitlines()[0],
1139 1146 'diff': lambda r: list(repo[r].diff(git=True),)
1140 1147 }
1141 1148 for info in fields:
1142 1149 getfield = _funcs.get(info, None)
1143 1150 if getfield is None:
1144 1151 raise error.ParseError(
1145 1152 _("unexpected field name passed to matching: %s") % info)
1146 1153 getfieldfuncs.append(getfield)
1147 1154 # convert the getfield array of functions into a "getinfo" function
1148 1155 # which returns an array of field values (or a single value if there
1149 1156 # is only one field to match)
1150 1157 getinfo = lambda r: [f(r) for f in getfieldfuncs]
1151 1158
1152 1159 matches = set()
1153 1160 for rev in revs:
1154 1161 target = getinfo(rev)
1155 1162 for r in subset:
1156 1163 match = True
1157 1164 for n, f in enumerate(getfieldfuncs):
1158 1165 if target[n] != f(r):
1159 1166 match = False
1160 1167 break
1161 1168 if match:
1162 1169 matches.add(r)
1163 1170 return [r for r in subset if r in matches]
1164 1171
1165 1172 def reverse(repo, subset, x):
1166 1173 """``reverse(set)``
1167 1174 Reverse order of set.
1168 1175 """
1169 1176 l = getset(repo, subset, x)
1170 1177 if not isinstance(l, list):
1171 1178 l = list(l)
1172 1179 l.reverse()
1173 1180 return l
1174 1181
1175 1182 def roots(repo, subset, x):
1176 1183 """``roots(set)``
1177 1184 Changesets in set with no parent changeset in set.
1178 1185 """
1179 1186 s = set(getset(repo, xrange(len(repo)), x))
1180 1187 subset = [r for r in subset if r in s]
1181 1188 cs = _children(repo, subset, s)
1182 1189 return [r for r in subset if r not in cs]
1183 1190
1184 1191 def secret(repo, subset, x):
1185 1192 """``secret()``
1186 1193 Changeset in secret phase."""
1187 1194 getargs(x, 0, 0, _("secret takes no arguments"))
1188 1195 pc = repo._phasecache
1189 1196 return [r for r in subset if pc.phase(repo, r) == phases.secret]
1190 1197
1191 1198 def sort(repo, subset, x):
1192 1199 """``sort(set[, [-]key...])``
1193 1200 Sort set by keys. The default sort order is ascending, specify a key
1194 1201 as ``-key`` to sort in descending order.
1195 1202
1196 1203 The keys can be:
1197 1204
1198 1205 - ``rev`` for the revision number,
1199 1206 - ``branch`` for the branch name,
1200 1207 - ``desc`` for the commit message (description),
1201 1208 - ``user`` for user name (``author`` can be used as an alias),
1202 1209 - ``date`` for the commit date
1203 1210 """
1204 1211 # i18n: "sort" is a keyword
1205 1212 l = getargs(x, 1, 2, _("sort requires one or two arguments"))
1206 1213 keys = "rev"
1207 1214 if len(l) == 2:
1208 1215 keys = getstring(l[1], _("sort spec must be a string"))
1209 1216
1210 1217 s = l[0]
1211 1218 keys = keys.split()
1212 1219 l = []
1213 1220 def invert(s):
1214 1221 return "".join(chr(255 - ord(c)) for c in s)
1215 1222 for r in getset(repo, subset, s):
1216 1223 c = repo[r]
1217 1224 e = []
1218 1225 for k in keys:
1219 1226 if k == 'rev':
1220 1227 e.append(r)
1221 1228 elif k == '-rev':
1222 1229 e.append(-r)
1223 1230 elif k == 'branch':
1224 1231 e.append(c.branch())
1225 1232 elif k == '-branch':
1226 1233 e.append(invert(c.branch()))
1227 1234 elif k == 'desc':
1228 1235 e.append(c.description())
1229 1236 elif k == '-desc':
1230 1237 e.append(invert(c.description()))
1231 1238 elif k in 'user author':
1232 1239 e.append(c.user())
1233 1240 elif k in '-user -author':
1234 1241 e.append(invert(c.user()))
1235 1242 elif k == 'date':
1236 1243 e.append(c.date()[0])
1237 1244 elif k == '-date':
1238 1245 e.append(-c.date()[0])
1239 1246 else:
1240 1247 raise error.ParseError(_("unknown sort key %r") % k)
1241 1248 e.append(r)
1242 1249 l.append(e)
1243 1250 l.sort()
1244 1251 return [e[-1] for e in l]
1245 1252
1246 1253 def _stringmatcher(pattern):
1247 1254 """
1248 1255 accepts a string, possibly starting with 're:' or 'literal:' prefix.
1249 1256 returns the matcher name, pattern, and matcher function.
1250 1257 missing or unknown prefixes are treated as literal matches.
1251 1258
1252 1259 helper for tests:
1253 1260 >>> def test(pattern, *tests):
1254 1261 ... kind, pattern, matcher = _stringmatcher(pattern)
1255 1262 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
1256 1263
1257 1264 exact matching (no prefix):
1258 1265 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
1259 1266 ('literal', 'abcdefg', [False, False, True])
1260 1267
1261 1268 regex matching ('re:' prefix)
1262 1269 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
1263 1270 ('re', 'a.+b', [False, False, True])
1264 1271
1265 1272 force exact matches ('literal:' prefix)
1266 1273 >>> test('literal:re:foobar', 'foobar', 're:foobar')
1267 1274 ('literal', 're:foobar', [False, True])
1268 1275
1269 1276 unknown prefixes are ignored and treated as literals
1270 1277 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
1271 1278 ('literal', 'foo:bar', [False, False, True])
1272 1279 """
1273 1280 if pattern.startswith('re:'):
1274 1281 pattern = pattern[3:]
1275 1282 try:
1276 1283 regex = re.compile(pattern)
1277 1284 except re.error, e:
1278 1285 raise error.ParseError(_('invalid regular expression: %s')
1279 1286 % e)
1280 1287 return 're', pattern, regex.search
1281 1288 elif pattern.startswith('literal:'):
1282 1289 pattern = pattern[8:]
1283 1290 return 'literal', pattern, pattern.__eq__
1284 1291
1285 1292 def _substringmatcher(pattern):
1286 1293 kind, pattern, matcher = _stringmatcher(pattern)
1287 1294 if kind == 'literal':
1288 1295 matcher = lambda s: pattern in s
1289 1296 return kind, pattern, matcher
1290 1297
1291 1298 def tag(repo, subset, x):
1292 1299 """``tag([name])``
1293 1300 The specified tag by name, or all tagged revisions if no name is given.
1294 1301 """
1295 1302 # i18n: "tag" is a keyword
1296 1303 args = getargs(x, 0, 1, _("tag takes one or no arguments"))
1297 1304 cl = repo.changelog
1298 1305 if args:
1299 1306 pattern = getstring(args[0],
1300 1307 # i18n: "tag" is a keyword
1301 1308 _('the argument to tag must be a string'))
1302 1309 kind, pattern, matcher = _stringmatcher(pattern)
1303 1310 if kind == 'literal':
1304 1311 # avoid resolving all tags
1305 1312 tn = repo._tagscache.tags.get(pattern, None)
1306 1313 if tn is None:
1307 1314 raise util.Abort(_("tag '%s' does not exist") % pattern)
1308 1315 s = set([repo[tn].rev()])
1309 1316 else:
1310 1317 s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)])
1311 1318 if not s:
1312 1319 raise util.Abort(_("no tags exist that match '%s'") % pattern)
1313 1320 else:
1314 1321 s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
1315 1322 return [r for r in subset if r in s]
1316 1323
1317 1324 def tagged(repo, subset, x):
1318 1325 return tag(repo, subset, x)
1319 1326
1320 1327 def unstable(repo, subset, x):
1321 1328 """``unstable()``
1322 1329 Unstable changesets are non-obsolete with obsolete descendants."""
1323 1330 getargs(x, 0, 0, _("obsolete takes no arguments"))
1324 1331 unstableset = set(repo.revs('(obsolete()::) - obsolete()'))
1325 1332 return [r for r in subset if r in unstableset]
1326 1333
1327 1334
1328 1335 def user(repo, subset, x):
1329 1336 """``user(string)``
1330 1337 User name contains string. The match is case-insensitive.
1331 1338
1332 1339 If `string` starts with `re:`, the remainder of the string is treated as
1333 1340 a regular expression. To match a user that actually contains `re:`, use
1334 1341 the prefix `literal:`.
1335 1342 """
1336 1343 return author(repo, subset, x)
1337 1344
1338 1345 # for internal use
1339 1346 def _list(repo, subset, x):
1340 1347 s = getstring(x, "internal error")
1341 1348 if not s:
1342 1349 return []
1343 1350 if not isinstance(subset, set):
1344 1351 subset = set(subset)
1345 1352 ls = [repo[r].rev() for r in s.split('\0')]
1346 1353 return [r for r in ls if r in subset]
1347 1354
1348 1355 symbols = {
1349 1356 "adds": adds,
1350 1357 "all": getall,
1351 1358 "ancestor": ancestor,
1352 1359 "ancestors": ancestors,
1353 1360 "_firstancestors": _firstancestors,
1354 1361 "author": author,
1355 1362 "bisect": bisect,
1356 1363 "bisected": bisected,
1357 1364 "bookmark": bookmark,
1358 1365 "branch": branch,
1359 1366 "children": children,
1360 1367 "closed": closed,
1361 1368 "contains": contains,
1362 1369 "converted": converted,
1363 1370 "date": date,
1364 1371 "desc": desc,
1365 1372 "descendants": descendants,
1366 1373 "_firstdescendants": _firstdescendants,
1367 1374 "draft": draft,
1375 "extinct": extinct,
1368 1376 "extra": extra,
1369 1377 "file": hasfile,
1370 1378 "filelog": filelog,
1371 1379 "first": first,
1372 1380 "follow": follow,
1373 1381 "_followfirst": _followfirst,
1374 1382 "grep": grep,
1375 1383 "head": head,
1376 1384 "heads": heads,
1377 1385 "id": node_,
1378 1386 "keyword": keyword,
1379 1387 "last": last,
1380 1388 "limit": limit,
1381 1389 "_matchfiles": _matchfiles,
1382 1390 "max": maxrev,
1383 1391 "merge": merge,
1384 1392 "min": minrev,
1385 1393 "modifies": modifies,
1386 1394 "obsolete": obsolete,
1387 1395 "outgoing": outgoing,
1388 1396 "p1": p1,
1389 1397 "p2": p2,
1390 1398 "parents": parents,
1391 1399 "present": present,
1392 1400 "public": public,
1393 1401 "remote": remote,
1394 1402 "removes": removes,
1395 1403 "rev": rev,
1396 1404 "reverse": reverse,
1397 1405 "roots": roots,
1398 1406 "sort": sort,
1399 1407 "secret": secret,
1400 1408 "matching": matching,
1401 1409 "tag": tag,
1402 1410 "tagged": tagged,
1403 1411 "user": user,
1404 1412 "unstable": unstable,
1405 1413 "_list": _list,
1406 1414 }
1407 1415
1408 1416 methods = {
1409 1417 "range": rangeset,
1410 1418 "dagrange": dagrange,
1411 1419 "string": stringset,
1412 1420 "symbol": symbolset,
1413 1421 "and": andset,
1414 1422 "or": orset,
1415 1423 "not": notset,
1416 1424 "list": listset,
1417 1425 "func": func,
1418 1426 "ancestor": ancestorspec,
1419 1427 "parent": parentspec,
1420 1428 "parentpost": p1,
1421 1429 }
1422 1430
1423 1431 def optimize(x, small):
1424 1432 if x is None:
1425 1433 return 0, x
1426 1434
1427 1435 smallbonus = 1
1428 1436 if small:
1429 1437 smallbonus = .5
1430 1438
1431 1439 op = x[0]
1432 1440 if op == 'minus':
1433 1441 return optimize(('and', x[1], ('not', x[2])), small)
1434 1442 elif op == 'dagrangepre':
1435 1443 return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
1436 1444 elif op == 'dagrangepost':
1437 1445 return optimize(('func', ('symbol', 'descendants'), x[1]), small)
1438 1446 elif op == 'rangepre':
1439 1447 return optimize(('range', ('string', '0'), x[1]), small)
1440 1448 elif op == 'rangepost':
1441 1449 return optimize(('range', x[1], ('string', 'tip')), small)
1442 1450 elif op == 'negate':
1443 1451 return optimize(('string',
1444 1452 '-' + getstring(x[1], _("can't negate that"))), small)
1445 1453 elif op in 'string symbol negate':
1446 1454 return smallbonus, x # single revisions are small
1447 1455 elif op == 'and':
1448 1456 wa, ta = optimize(x[1], True)
1449 1457 wb, tb = optimize(x[2], True)
1450 1458 w = min(wa, wb)
1451 1459 if wa > wb:
1452 1460 return w, (op, tb, ta)
1453 1461 return w, (op, ta, tb)
1454 1462 elif op == 'or':
1455 1463 wa, ta = optimize(x[1], False)
1456 1464 wb, tb = optimize(x[2], False)
1457 1465 if wb < wa:
1458 1466 wb, wa = wa, wb
1459 1467 return max(wa, wb), (op, ta, tb)
1460 1468 elif op == 'not':
1461 1469 o = optimize(x[1], not small)
1462 1470 return o[0], (op, o[1])
1463 1471 elif op == 'parentpost':
1464 1472 o = optimize(x[1], small)
1465 1473 return o[0], (op, o[1])
1466 1474 elif op == 'group':
1467 1475 return optimize(x[1], small)
1468 1476 elif op in 'dagrange range list parent ancestorspec':
1469 1477 if op == 'parent':
1470 1478 # x^:y means (x^) : y, not x ^ (:y)
1471 1479 post = ('parentpost', x[1])
1472 1480 if x[2][0] == 'dagrangepre':
1473 1481 return optimize(('dagrange', post, x[2][1]), small)
1474 1482 elif x[2][0] == 'rangepre':
1475 1483 return optimize(('range', post, x[2][1]), small)
1476 1484
1477 1485 wa, ta = optimize(x[1], small)
1478 1486 wb, tb = optimize(x[2], small)
1479 1487 return wa + wb, (op, ta, tb)
1480 1488 elif op == 'func':
1481 1489 f = getstring(x[1], _("not a symbol"))
1482 1490 wa, ta = optimize(x[2], small)
1483 1491 if f in ("author branch closed date desc file grep keyword "
1484 1492 "outgoing user"):
1485 1493 w = 10 # slow
1486 1494 elif f in "modifies adds removes":
1487 1495 w = 30 # slower
1488 1496 elif f == "contains":
1489 1497 w = 100 # very slow
1490 1498 elif f == "ancestor":
1491 1499 w = 1 * smallbonus
1492 1500 elif f in "reverse limit first":
1493 1501 w = 0
1494 1502 elif f in "sort":
1495 1503 w = 10 # assume most sorts look at changelog
1496 1504 else:
1497 1505 w = 1
1498 1506 return w + wa, (op, x[1], ta)
1499 1507 return 1, x
1500 1508
1501 1509 _aliasarg = ('func', ('symbol', '_aliasarg'))
1502 1510 def _getaliasarg(tree):
1503 1511 """If tree matches ('func', ('symbol', '_aliasarg'), ('string', X))
1504 1512 return X, None otherwise.
1505 1513 """
1506 1514 if (len(tree) == 3 and tree[:2] == _aliasarg
1507 1515 and tree[2][0] == 'string'):
1508 1516 return tree[2][1]
1509 1517 return None
1510 1518
1511 1519 def _checkaliasarg(tree, known=None):
1512 1520 """Check tree contains no _aliasarg construct or only ones which
1513 1521 value is in known. Used to avoid alias placeholders injection.
1514 1522 """
1515 1523 if isinstance(tree, tuple):
1516 1524 arg = _getaliasarg(tree)
1517 1525 if arg is not None and (not known or arg not in known):
1518 1526 raise error.ParseError(_("not a function: %s") % '_aliasarg')
1519 1527 for t in tree:
1520 1528 _checkaliasarg(t, known)
1521 1529
1522 1530 class revsetalias(object):
1523 1531 funcre = re.compile('^([^(]+)\(([^)]+)\)$')
1524 1532 args = None
1525 1533
1526 1534 def __init__(self, name, value):
1527 1535 '''Aliases like:
1528 1536
1529 1537 h = heads(default)
1530 1538 b($1) = ancestors($1) - ancestors(default)
1531 1539 '''
1532 1540 m = self.funcre.search(name)
1533 1541 if m:
1534 1542 self.name = m.group(1)
1535 1543 self.tree = ('func', ('symbol', m.group(1)))
1536 1544 self.args = [x.strip() for x in m.group(2).split(',')]
1537 1545 for arg in self.args:
1538 1546 # _aliasarg() is an unknown symbol only used separate
1539 1547 # alias argument placeholders from regular strings.
1540 1548 value = value.replace(arg, '_aliasarg(%r)' % (arg,))
1541 1549 else:
1542 1550 self.name = name
1543 1551 self.tree = ('symbol', name)
1544 1552
1545 1553 self.replacement, pos = parse(value)
1546 1554 if pos != len(value):
1547 1555 raise error.ParseError(_('invalid token'), pos)
1548 1556 # Check for placeholder injection
1549 1557 _checkaliasarg(self.replacement, self.args)
1550 1558
1551 1559 def _getalias(aliases, tree):
1552 1560 """If tree looks like an unexpanded alias, return it. Return None
1553 1561 otherwise.
1554 1562 """
1555 1563 if isinstance(tree, tuple) and tree:
1556 1564 if tree[0] == 'symbol' and len(tree) == 2:
1557 1565 name = tree[1]
1558 1566 alias = aliases.get(name)
1559 1567 if alias and alias.args is None and alias.tree == tree:
1560 1568 return alias
1561 1569 if tree[0] == 'func' and len(tree) > 1:
1562 1570 if tree[1][0] == 'symbol' and len(tree[1]) == 2:
1563 1571 name = tree[1][1]
1564 1572 alias = aliases.get(name)
1565 1573 if alias and alias.args is not None and alias.tree == tree[:2]:
1566 1574 return alias
1567 1575 return None
1568 1576
1569 1577 def _expandargs(tree, args):
1570 1578 """Replace _aliasarg instances with the substitution value of the
1571 1579 same name in args, recursively.
1572 1580 """
1573 1581 if not tree or not isinstance(tree, tuple):
1574 1582 return tree
1575 1583 arg = _getaliasarg(tree)
1576 1584 if arg is not None:
1577 1585 return args[arg]
1578 1586 return tuple(_expandargs(t, args) for t in tree)
1579 1587
1580 1588 def _expandaliases(aliases, tree, expanding, cache):
1581 1589 """Expand aliases in tree, recursively.
1582 1590
1583 1591 'aliases' is a dictionary mapping user defined aliases to
1584 1592 revsetalias objects.
1585 1593 """
1586 1594 if not isinstance(tree, tuple):
1587 1595 # Do not expand raw strings
1588 1596 return tree
1589 1597 alias = _getalias(aliases, tree)
1590 1598 if alias is not None:
1591 1599 if alias in expanding:
1592 1600 raise error.ParseError(_('infinite expansion of revset alias "%s" '
1593 1601 'detected') % alias.name)
1594 1602 expanding.append(alias)
1595 1603 if alias.name not in cache:
1596 1604 cache[alias.name] = _expandaliases(aliases, alias.replacement,
1597 1605 expanding, cache)
1598 1606 result = cache[alias.name]
1599 1607 expanding.pop()
1600 1608 if alias.args is not None:
1601 1609 l = getlist(tree[2])
1602 1610 if len(l) != len(alias.args):
1603 1611 raise error.ParseError(
1604 1612 _('invalid number of arguments: %s') % len(l))
1605 1613 l = [_expandaliases(aliases, a, [], cache) for a in l]
1606 1614 result = _expandargs(result, dict(zip(alias.args, l)))
1607 1615 else:
1608 1616 result = tuple(_expandaliases(aliases, t, expanding, cache)
1609 1617 for t in tree)
1610 1618 return result
1611 1619
1612 1620 def findaliases(ui, tree):
1613 1621 _checkaliasarg(tree)
1614 1622 aliases = {}
1615 1623 for k, v in ui.configitems('revsetalias'):
1616 1624 alias = revsetalias(k, v)
1617 1625 aliases[alias.name] = alias
1618 1626 return _expandaliases(aliases, tree, [], {})
1619 1627
1620 1628 parse = parser.parser(tokenize, elements).parse
1621 1629
1622 1630 def match(ui, spec):
1623 1631 if not spec:
1624 1632 raise error.ParseError(_("empty query"))
1625 1633 tree, pos = parse(spec)
1626 1634 if (pos != len(spec)):
1627 1635 raise error.ParseError(_("invalid token"), pos)
1628 1636 if ui:
1629 1637 tree = findaliases(ui, tree)
1630 1638 weight, tree = optimize(tree, True)
1631 1639 def mfunc(repo, subset):
1632 1640 return getset(repo, subset, tree)
1633 1641 return mfunc
1634 1642
1635 1643 def formatspec(expr, *args):
1636 1644 '''
1637 1645 This is a convenience function for using revsets internally, and
1638 1646 escapes arguments appropriately. Aliases are intentionally ignored
1639 1647 so that intended expression behavior isn't accidentally subverted.
1640 1648
1641 1649 Supported arguments:
1642 1650
1643 1651 %r = revset expression, parenthesized
1644 1652 %d = int(arg), no quoting
1645 1653 %s = string(arg), escaped and single-quoted
1646 1654 %b = arg.branch(), escaped and single-quoted
1647 1655 %n = hex(arg), single-quoted
1648 1656 %% = a literal '%'
1649 1657
1650 1658 Prefixing the type with 'l' specifies a parenthesized list of that type.
1651 1659
1652 1660 >>> formatspec('%r:: and %lr', '10 or 11', ("this()", "that()"))
1653 1661 '(10 or 11):: and ((this()) or (that()))'
1654 1662 >>> formatspec('%d:: and not %d::', 10, 20)
1655 1663 '10:: and not 20::'
1656 1664 >>> formatspec('%ld or %ld', [], [1])
1657 1665 "_list('') or 1"
1658 1666 >>> formatspec('keyword(%s)', 'foo\\xe9')
1659 1667 "keyword('foo\\\\xe9')"
1660 1668 >>> b = lambda: 'default'
1661 1669 >>> b.branch = b
1662 1670 >>> formatspec('branch(%b)', b)
1663 1671 "branch('default')"
1664 1672 >>> formatspec('root(%ls)', ['a', 'b', 'c', 'd'])
1665 1673 "root(_list('a\\x00b\\x00c\\x00d'))"
1666 1674 '''
1667 1675
1668 1676 def quote(s):
1669 1677 return repr(str(s))
1670 1678
1671 1679 def argtype(c, arg):
1672 1680 if c == 'd':
1673 1681 return str(int(arg))
1674 1682 elif c == 's':
1675 1683 return quote(arg)
1676 1684 elif c == 'r':
1677 1685 parse(arg) # make sure syntax errors are confined
1678 1686 return '(%s)' % arg
1679 1687 elif c == 'n':
1680 1688 return quote(node.hex(arg))
1681 1689 elif c == 'b':
1682 1690 return quote(arg.branch())
1683 1691
1684 1692 def listexp(s, t):
1685 1693 l = len(s)
1686 1694 if l == 0:
1687 1695 return "_list('')"
1688 1696 elif l == 1:
1689 1697 return argtype(t, s[0])
1690 1698 elif t == 'd':
1691 1699 return "_list('%s')" % "\0".join(str(int(a)) for a in s)
1692 1700 elif t == 's':
1693 1701 return "_list('%s')" % "\0".join(s)
1694 1702 elif t == 'n':
1695 1703 return "_list('%s')" % "\0".join(node.hex(a) for a in s)
1696 1704 elif t == 'b':
1697 1705 return "_list('%s')" % "\0".join(a.branch() for a in s)
1698 1706
1699 1707 m = l // 2
1700 1708 return '(%s or %s)' % (listexp(s[:m], t), listexp(s[m:], t))
1701 1709
1702 1710 ret = ''
1703 1711 pos = 0
1704 1712 arg = 0
1705 1713 while pos < len(expr):
1706 1714 c = expr[pos]
1707 1715 if c == '%':
1708 1716 pos += 1
1709 1717 d = expr[pos]
1710 1718 if d == '%':
1711 1719 ret += d
1712 1720 elif d in 'dsnbr':
1713 1721 ret += argtype(d, args[arg])
1714 1722 arg += 1
1715 1723 elif d == 'l':
1716 1724 # a list of some type
1717 1725 pos += 1
1718 1726 d = expr[pos]
1719 1727 ret += listexp(list(args[arg]), d)
1720 1728 arg += 1
1721 1729 else:
1722 1730 raise util.Abort('unexpected revspec format character %s' % d)
1723 1731 else:
1724 1732 ret += c
1725 1733 pos += 1
1726 1734
1727 1735 return ret
1728 1736
1729 1737 def prettyformat(tree):
1730 1738 def _prettyformat(tree, level, lines):
1731 1739 if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
1732 1740 lines.append((level, str(tree)))
1733 1741 else:
1734 1742 lines.append((level, '(%s' % tree[0]))
1735 1743 for s in tree[1:]:
1736 1744 _prettyformat(s, level + 1, lines)
1737 1745 lines[-1:] = [(lines[-1][0], lines[-1][1] + ')')]
1738 1746
1739 1747 lines = []
1740 1748 _prettyformat(tree, 0, lines)
1741 1749 output = '\n'.join((' '*l + s) for l, s in lines)
1742 1750 return output
1743 1751
1744 1752 # tell hggettext to extract docstrings from these functions:
1745 1753 i18nfunctions = symbols.values()
@@ -1,322 +1,332 b''
1 1 $ cat >> $HGRCPATH << EOF
2 2 > [extensions]
3 3 > graphlog=
4 4 > [phases]
5 5 > # public changeset are not obsolete
6 6 > publish=false
7 7 > EOF
8 8 $ mkcommit() {
9 9 > echo "$1" > "$1"
10 10 > hg add "$1"
11 11 > hg ci -m "add $1"
12 12 > }
13 13 $ getid() {
14 14 > hg id --debug -ir "desc('$1')"
15 15 > }
16 16
17 17
18 18 $ hg init tmpa
19 19 $ cd tmpa
20 20
21 21 Killing a single changeset without replacement
22 22
23 23 $ mkcommit kill_me
24 24 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
25 25 $ hg debugobsolete
26 26 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 {'date': '0 0', 'user': 'babar'}
27 27 $ cd ..
28 28
29 29 Killing a single changeset with replacement
30 30
31 31 $ hg init tmpb
32 32 $ cd tmpb
33 33 $ mkcommit a
34 34 $ mkcommit b
35 35 $ mkcommit original_c
36 36 $ hg up "desc('b')"
37 37 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
38 38 $ mkcommit new_c
39 39 created new head
40 40 $ hg debugobsolete `getid original_c` `getid new_c` -d '56 12'
41 41 $ hg debugobsolete
42 42 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
43 43
44 44 do it again (it read the obsstore before adding new changeset)
45 45
46 46 $ hg up '.^'
47 47 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
48 48 $ mkcommit new_2_c
49 49 created new head
50 50 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
51 51 $ hg debugobsolete
52 52 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
53 53 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
54 54
55 55 Register two markers with a missing node
56 56
57 57 $ hg up '.^'
58 58 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
59 59 $ mkcommit new_3_c
60 60 created new head
61 61 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
62 62 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
63 63 $ hg debugobsolete
64 64 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
65 65 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
66 66 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
67 67 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
68 68
69 69 Check that graphlog detect that a changeset is obsolete:
70 70
71 71 $ hg glog
72 72 @ changeset: 5:5601fb93a350
73 73 | tag: tip
74 74 | parent: 1:7c3bad9141dc
75 75 | user: test
76 76 | date: Thu Jan 01 00:00:00 1970 +0000
77 77 | summary: add new_3_c
78 78 |
79 79 | x changeset: 4:ca819180edb9
80 80 |/ parent: 1:7c3bad9141dc
81 81 | user: test
82 82 | date: Thu Jan 01 00:00:00 1970 +0000
83 83 | summary: add new_2_c
84 84 |
85 85 | x changeset: 3:cdbce2fbb163
86 86 |/ parent: 1:7c3bad9141dc
87 87 | user: test
88 88 | date: Thu Jan 01 00:00:00 1970 +0000
89 89 | summary: add new_c
90 90 |
91 91 | x changeset: 2:245bde4270cd
92 92 |/ user: test
93 93 | date: Thu Jan 01 00:00:00 1970 +0000
94 94 | summary: add original_c
95 95 |
96 96 o changeset: 1:7c3bad9141dc
97 97 | user: test
98 98 | date: Thu Jan 01 00:00:00 1970 +0000
99 99 | summary: add b
100 100 |
101 101 o changeset: 0:1f0dee641bb7
102 102 user: test
103 103 date: Thu Jan 01 00:00:00 1970 +0000
104 104 summary: add a
105 105
106 106
107 107 Check that public changeset are not accounted as obsolete:
108 108
109 109 $ hg phase --public 2
110 110 $ hg --config 'extensions.graphlog=' glog
111 111 @ changeset: 5:5601fb93a350
112 112 | tag: tip
113 113 | parent: 1:7c3bad9141dc
114 114 | user: test
115 115 | date: Thu Jan 01 00:00:00 1970 +0000
116 116 | summary: add new_3_c
117 117 |
118 118 | x changeset: 4:ca819180edb9
119 119 |/ parent: 1:7c3bad9141dc
120 120 | user: test
121 121 | date: Thu Jan 01 00:00:00 1970 +0000
122 122 | summary: add new_2_c
123 123 |
124 124 | x changeset: 3:cdbce2fbb163
125 125 |/ parent: 1:7c3bad9141dc
126 126 | user: test
127 127 | date: Thu Jan 01 00:00:00 1970 +0000
128 128 | summary: add new_c
129 129 |
130 130 | o changeset: 2:245bde4270cd
131 131 |/ user: test
132 132 | date: Thu Jan 01 00:00:00 1970 +0000
133 133 | summary: add original_c
134 134 |
135 135 o changeset: 1:7c3bad9141dc
136 136 | user: test
137 137 | date: Thu Jan 01 00:00:00 1970 +0000
138 138 | summary: add b
139 139 |
140 140 o changeset: 0:1f0dee641bb7
141 141 user: test
142 142 date: Thu Jan 01 00:00:00 1970 +0000
143 143 summary: add a
144 144
145 145
146 146 $ cd ..
147 147
148 148 Exchange Test
149 149 ============================
150 150
151 151 Destination repo does not have any data
152 152 ---------------------------------------
153 153
154 154 Try to pull markers
155 155
156 156 $ hg init tmpc
157 157 $ cd tmpc
158 158 $ hg pull ../tmpb
159 159 pulling from ../tmpb
160 160 requesting all changes
161 161 adding changesets
162 162 adding manifests
163 163 adding file changes
164 164 added 6 changesets with 6 changes to 6 files (+3 heads)
165 165 (run 'hg heads' to see heads, 'hg merge' to merge)
166 166 $ hg debugobsolete
167 167 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
168 168 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
169 169 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
170 170 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
171 171
172 172 Rollback//Transaction support
173 173
174 174 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
175 175 $ hg debugobsolete
176 176 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
177 177 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
178 178 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
179 179 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
180 180 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 {'date': '1340 0', 'user': 'test'}
181 181 $ hg rollback -n
182 182 repository tip rolled back to revision 5 (undo debugobsolete)
183 183 $ hg rollback
184 184 repository tip rolled back to revision 5 (undo debugobsolete)
185 185 $ hg debugobsolete
186 186 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
187 187 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
188 188 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
189 189 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
190 190
191 191 $ cd ..
192 192
193 193 Try to pull markers
194 194
195 195 $ hg init tmpd
196 196 $ hg -R tmpb push tmpd
197 197 pushing to tmpd
198 198 searching for changes
199 199 abort: push includes an obsolete changeset: ca819180edb9!
200 200 [255]
201 201 $ hg -R tmpd debugobsolete
202 202 $ hg -R tmpb push tmpd --rev 'not obsolete()'
203 203 pushing to tmpd
204 204 searching for changes
205 205 adding changesets
206 206 adding manifests
207 207 adding file changes
208 208 added 4 changesets with 4 changes to 4 files (+1 heads)
209 209 $ hg -R tmpd debugobsolete
210 210 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
211 211 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
212 212 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
213 213 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
214 214
215 215
216 216 Destination repo have existing data
217 217 ---------------------------------------
218 218
219 219 On pull
220 220
221 221 $ hg init tmpe
222 222 $ cd tmpe
223 223 $ hg debugobsolete -d '1339 0' 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339
224 224 $ hg pull ../tmpb
225 225 pulling from ../tmpb
226 226 requesting all changes
227 227 adding changesets
228 228 adding manifests
229 229 adding file changes
230 230 added 6 changesets with 6 changes to 6 files (+3 heads)
231 231 (run 'hg heads' to see heads, 'hg merge' to merge)
232 232 $ hg debugobsolete
233 233 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339 0 {'date': '1339 0', 'user': 'test'}
234 234 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
235 235 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
236 236 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
237 237 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
238 238
239 239
240 240 On push
241 241
242 242 $ hg push ../tmpc
243 243 pushing to ../tmpc
244 244 searching for changes
245 245 no changes found
246 246 [1]
247 247 $ hg -R ../tmpc debugobsolete
248 248 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
249 249 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
250 250 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
251 251 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
252 252 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339 0 {'date': '1339 0', 'user': 'test'}
253 253
254 254 detect outgoing obsolete and unstable
255 255 ---------------------------------------
256 256
257 257 $ hg glog
258 258 o changeset: 5:5601fb93a350
259 259 | tag: tip
260 260 | parent: 1:7c3bad9141dc
261 261 | user: test
262 262 | date: Thu Jan 01 00:00:00 1970 +0000
263 263 | summary: add new_3_c
264 264 |
265 265 | x changeset: 4:ca819180edb9
266 266 |/ parent: 1:7c3bad9141dc
267 267 | user: test
268 268 | date: Thu Jan 01 00:00:00 1970 +0000
269 269 | summary: add new_2_c
270 270 |
271 271 | x changeset: 3:cdbce2fbb163
272 272 |/ parent: 1:7c3bad9141dc
273 273 | user: test
274 274 | date: Thu Jan 01 00:00:00 1970 +0000
275 275 | summary: add new_c
276 276 |
277 277 | o changeset: 2:245bde4270cd
278 278 |/ user: test
279 279 | date: Thu Jan 01 00:00:00 1970 +0000
280 280 | summary: add original_c
281 281 |
282 282 o changeset: 1:7c3bad9141dc
283 283 | user: test
284 284 | date: Thu Jan 01 00:00:00 1970 +0000
285 285 | summary: add b
286 286 |
287 287 o changeset: 0:1f0dee641bb7
288 288 user: test
289 289 date: Thu Jan 01 00:00:00 1970 +0000
290 290 summary: add a
291 291
292 292 $ hg up -q 'desc("new_2_c")'
293 293 $ mkcommit original_d
294 294 $ hg glog -r '::unstable()'
295 295 @ changeset: 6:7878242aeece
296 296 | tag: tip
297 297 | parent: 4:ca819180edb9
298 298 | user: test
299 299 | date: Thu Jan 01 00:00:00 1970 +0000
300 300 | summary: add original_d
301 301 |
302 302 x changeset: 4:ca819180edb9
303 303 | parent: 1:7c3bad9141dc
304 304 | user: test
305 305 | date: Thu Jan 01 00:00:00 1970 +0000
306 306 | summary: add new_2_c
307 307 |
308 308 o changeset: 1:7c3bad9141dc
309 309 | user: test
310 310 | date: Thu Jan 01 00:00:00 1970 +0000
311 311 | summary: add b
312 312 |
313 313 o changeset: 0:1f0dee641bb7
314 314 user: test
315 315 date: Thu Jan 01 00:00:00 1970 +0000
316 316 summary: add a
317 317
318 318 $ hg push ../tmpc/
319 319 pushing to ../tmpc/
320 320 searching for changes
321 321 abort: push includes an unstable changeset: 7878242aeece!
322 322 [255]
323
324 Test that extinct changeset are properly detected
325
326 $ hg log -r 'extinct()'
327 changeset: 3:cdbce2fbb163
328 parent: 1:7c3bad9141dc
329 user: test
330 date: Thu Jan 01 00:00:00 1970 +0000
331 summary: add new_c
332
General Comments 0
You need to be logged in to leave comments. Login now