##// END OF EJS Templates
revset: factor out public optimize() function from recursion...
Yuya Nishihara -
r29119:a032ebea default
parent child Browse files
Show More
@@ -1,7258 +1,7258
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import difflib
11 11 import errno
12 12 import operator
13 13 import os
14 14 import random
15 15 import re
16 16 import shlex
17 17 import socket
18 18 import sys
19 19 import tempfile
20 20 import time
21 21
22 22 from .i18n import _
23 23 from .node import (
24 24 bin,
25 25 hex,
26 26 nullhex,
27 27 nullid,
28 28 nullrev,
29 29 short,
30 30 )
31 31 from . import (
32 32 archival,
33 33 bookmarks,
34 34 bundle2,
35 35 changegroup,
36 36 cmdutil,
37 37 commandserver,
38 38 context,
39 39 copies,
40 40 dagparser,
41 41 dagutil,
42 42 destutil,
43 43 discovery,
44 44 encoding,
45 45 error,
46 46 exchange,
47 47 extensions,
48 48 fileset,
49 49 formatter,
50 50 graphmod,
51 51 hbisect,
52 52 help,
53 53 hg,
54 54 hgweb,
55 55 localrepo,
56 56 lock as lockmod,
57 57 merge as mergemod,
58 58 minirst,
59 59 obsolete,
60 60 patch,
61 61 phases,
62 62 pvec,
63 63 repair,
64 64 revlog,
65 65 revset,
66 66 scmutil,
67 67 setdiscovery,
68 68 simplemerge,
69 69 sshserver,
70 70 streamclone,
71 71 templatekw,
72 72 templater,
73 73 treediscovery,
74 74 ui as uimod,
75 75 util,
76 76 )
77 77
78 78 release = lockmod.release
79 79
80 80 table = {}
81 81
82 82 command = cmdutil.command(table)
83 83
84 84 # label constants
85 85 # until 3.5, bookmarks.current was the advertised name, not
86 86 # bookmarks.active, so we must use both to avoid breaking old
87 87 # custom styles
88 88 activebookmarklabel = 'bookmarks.active bookmarks.current'
89 89
90 90 # common command options
91 91
92 92 globalopts = [
93 93 ('R', 'repository', '',
94 94 _('repository root directory or name of overlay bundle file'),
95 95 _('REPO')),
96 96 ('', 'cwd', '',
97 97 _('change working directory'), _('DIR')),
98 98 ('y', 'noninteractive', None,
99 99 _('do not prompt, automatically pick the first choice for all prompts')),
100 100 ('q', 'quiet', None, _('suppress output')),
101 101 ('v', 'verbose', None, _('enable additional output')),
102 102 ('', 'config', [],
103 103 _('set/override config option (use \'section.name=value\')'),
104 104 _('CONFIG')),
105 105 ('', 'debug', None, _('enable debugging output')),
106 106 ('', 'debugger', None, _('start debugger')),
107 107 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
108 108 _('ENCODE')),
109 109 ('', 'encodingmode', encoding.encodingmode,
110 110 _('set the charset encoding mode'), _('MODE')),
111 111 ('', 'traceback', None, _('always print a traceback on exception')),
112 112 ('', 'time', None, _('time how long the command takes')),
113 113 ('', 'profile', None, _('print command execution profile')),
114 114 ('', 'version', None, _('output version information and exit')),
115 115 ('h', 'help', None, _('display help and exit')),
116 116 ('', 'hidden', False, _('consider hidden changesets')),
117 117 ]
118 118
119 119 dryrunopts = [('n', 'dry-run', None,
120 120 _('do not perform actions, just print output'))]
121 121
122 122 remoteopts = [
123 123 ('e', 'ssh', '',
124 124 _('specify ssh command to use'), _('CMD')),
125 125 ('', 'remotecmd', '',
126 126 _('specify hg command to run on the remote side'), _('CMD')),
127 127 ('', 'insecure', None,
128 128 _('do not verify server certificate (ignoring web.cacerts config)')),
129 129 ]
130 130
131 131 walkopts = [
132 132 ('I', 'include', [],
133 133 _('include names matching the given patterns'), _('PATTERN')),
134 134 ('X', 'exclude', [],
135 135 _('exclude names matching the given patterns'), _('PATTERN')),
136 136 ]
137 137
138 138 commitopts = [
139 139 ('m', 'message', '',
140 140 _('use text as commit message'), _('TEXT')),
141 141 ('l', 'logfile', '',
142 142 _('read commit message from file'), _('FILE')),
143 143 ]
144 144
145 145 commitopts2 = [
146 146 ('d', 'date', '',
147 147 _('record the specified date as commit date'), _('DATE')),
148 148 ('u', 'user', '',
149 149 _('record the specified user as committer'), _('USER')),
150 150 ]
151 151
152 152 # hidden for now
153 153 formatteropts = [
154 154 ('T', 'template', '',
155 155 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
156 156 ]
157 157
158 158 templateopts = [
159 159 ('', 'style', '',
160 160 _('display using template map file (DEPRECATED)'), _('STYLE')),
161 161 ('T', 'template', '',
162 162 _('display with template'), _('TEMPLATE')),
163 163 ]
164 164
165 165 logopts = [
166 166 ('p', 'patch', None, _('show patch')),
167 167 ('g', 'git', None, _('use git extended diff format')),
168 168 ('l', 'limit', '',
169 169 _('limit number of changes displayed'), _('NUM')),
170 170 ('M', 'no-merges', None, _('do not show merges')),
171 171 ('', 'stat', None, _('output diffstat-style summary of changes')),
172 172 ('G', 'graph', None, _("show the revision DAG")),
173 173 ] + templateopts
174 174
175 175 diffopts = [
176 176 ('a', 'text', None, _('treat all files as text')),
177 177 ('g', 'git', None, _('use git extended diff format')),
178 178 ('', 'nodates', None, _('omit dates from diff headers'))
179 179 ]
180 180
181 181 diffwsopts = [
182 182 ('w', 'ignore-all-space', None,
183 183 _('ignore white space when comparing lines')),
184 184 ('b', 'ignore-space-change', None,
185 185 _('ignore changes in the amount of white space')),
186 186 ('B', 'ignore-blank-lines', None,
187 187 _('ignore changes whose lines are all blank')),
188 188 ]
189 189
190 190 diffopts2 = [
191 191 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
192 192 ('p', 'show-function', None, _('show which function each change is in')),
193 193 ('', 'reverse', None, _('produce a diff that undoes the changes')),
194 194 ] + diffwsopts + [
195 195 ('U', 'unified', '',
196 196 _('number of lines of context to show'), _('NUM')),
197 197 ('', 'stat', None, _('output diffstat-style summary of changes')),
198 198 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
199 199 ]
200 200
201 201 mergetoolopts = [
202 202 ('t', 'tool', '', _('specify merge tool')),
203 203 ]
204 204
205 205 similarityopts = [
206 206 ('s', 'similarity', '',
207 207 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
208 208 ]
209 209
210 210 subrepoopts = [
211 211 ('S', 'subrepos', None,
212 212 _('recurse into subrepositories'))
213 213 ]
214 214
215 215 debugrevlogopts = [
216 216 ('c', 'changelog', False, _('open changelog')),
217 217 ('m', 'manifest', False, _('open manifest')),
218 218 ('', 'dir', False, _('open directory manifest')),
219 219 ]
220 220
221 221 # Commands start here, listed alphabetically
222 222
223 223 @command('^add',
224 224 walkopts + subrepoopts + dryrunopts,
225 225 _('[OPTION]... [FILE]...'),
226 226 inferrepo=True)
227 227 def add(ui, repo, *pats, **opts):
228 228 """add the specified files on the next commit
229 229
230 230 Schedule files to be version controlled and added to the
231 231 repository.
232 232
233 233 The files will be added to the repository at the next commit. To
234 234 undo an add before that, see :hg:`forget`.
235 235
236 236 If no names are given, add all files to the repository (except
237 237 files matching ``.hgignore``).
238 238
239 239 .. container:: verbose
240 240
241 241 Examples:
242 242
243 243 - New (unknown) files are added
244 244 automatically by :hg:`add`::
245 245
246 246 $ ls
247 247 foo.c
248 248 $ hg status
249 249 ? foo.c
250 250 $ hg add
251 251 adding foo.c
252 252 $ hg status
253 253 A foo.c
254 254
255 255 - Specific files to be added can be specified::
256 256
257 257 $ ls
258 258 bar.c foo.c
259 259 $ hg status
260 260 ? bar.c
261 261 ? foo.c
262 262 $ hg add bar.c
263 263 $ hg status
264 264 A bar.c
265 265 ? foo.c
266 266
267 267 Returns 0 if all files are successfully added.
268 268 """
269 269
270 270 m = scmutil.match(repo[None], pats, opts)
271 271 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
272 272 return rejected and 1 or 0
273 273
274 274 @command('addremove',
275 275 similarityopts + subrepoopts + walkopts + dryrunopts,
276 276 _('[OPTION]... [FILE]...'),
277 277 inferrepo=True)
278 278 def addremove(ui, repo, *pats, **opts):
279 279 """add all new files, delete all missing files
280 280
281 281 Add all new files and remove all missing files from the
282 282 repository.
283 283
284 284 Unless names are given, new files are ignored if they match any of
285 285 the patterns in ``.hgignore``. As with add, these changes take
286 286 effect at the next commit.
287 287
288 288 Use the -s/--similarity option to detect renamed files. This
289 289 option takes a percentage between 0 (disabled) and 100 (files must
290 290 be identical) as its parameter. With a parameter greater than 0,
291 291 this compares every removed file with every added file and records
292 292 those similar enough as renames. Detecting renamed files this way
293 293 can be expensive. After using this option, :hg:`status -C` can be
294 294 used to check which files were identified as moved or renamed. If
295 295 not specified, -s/--similarity defaults to 100 and only renames of
296 296 identical files are detected.
297 297
298 298 .. container:: verbose
299 299
300 300 Examples:
301 301
302 302 - A number of files (bar.c and foo.c) are new,
303 303 while foobar.c has been removed (without using :hg:`remove`)
304 304 from the repository::
305 305
306 306 $ ls
307 307 bar.c foo.c
308 308 $ hg status
309 309 ! foobar.c
310 310 ? bar.c
311 311 ? foo.c
312 312 $ hg addremove
313 313 adding bar.c
314 314 adding foo.c
315 315 removing foobar.c
316 316 $ hg status
317 317 A bar.c
318 318 A foo.c
319 319 R foobar.c
320 320
321 321 - A file foobar.c was moved to foo.c without using :hg:`rename`.
322 322 Afterwards, it was edited slightly::
323 323
324 324 $ ls
325 325 foo.c
326 326 $ hg status
327 327 ! foobar.c
328 328 ? foo.c
329 329 $ hg addremove --similarity 90
330 330 removing foobar.c
331 331 adding foo.c
332 332 recording removal of foobar.c as rename to foo.c (94% similar)
333 333 $ hg status -C
334 334 A foo.c
335 335 foobar.c
336 336 R foobar.c
337 337
338 338 Returns 0 if all files are successfully added.
339 339 """
340 340 try:
341 341 sim = float(opts.get('similarity') or 100)
342 342 except ValueError:
343 343 raise error.Abort(_('similarity must be a number'))
344 344 if sim < 0 or sim > 100:
345 345 raise error.Abort(_('similarity must be between 0 and 100'))
346 346 matcher = scmutil.match(repo[None], pats, opts)
347 347 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
348 348
349 349 @command('^annotate|blame',
350 350 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
351 351 ('', 'follow', None,
352 352 _('follow copies/renames and list the filename (DEPRECATED)')),
353 353 ('', 'no-follow', None, _("don't follow copies and renames")),
354 354 ('a', 'text', None, _('treat all files as text')),
355 355 ('u', 'user', None, _('list the author (long with -v)')),
356 356 ('f', 'file', None, _('list the filename')),
357 357 ('d', 'date', None, _('list the date (short with -q)')),
358 358 ('n', 'number', None, _('list the revision number (default)')),
359 359 ('c', 'changeset', None, _('list the changeset')),
360 360 ('l', 'line-number', None, _('show line number at the first appearance'))
361 361 ] + diffwsopts + walkopts + formatteropts,
362 362 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
363 363 inferrepo=True)
364 364 def annotate(ui, repo, *pats, **opts):
365 365 """show changeset information by line for each file
366 366
367 367 List changes in files, showing the revision id responsible for
368 368 each line.
369 369
370 370 This command is useful for discovering when a change was made and
371 371 by whom.
372 372
373 373 If you include --file, --user, or --date, the revision number is
374 374 suppressed unless you also include --number.
375 375
376 376 Without the -a/--text option, annotate will avoid processing files
377 377 it detects as binary. With -a, annotate will annotate the file
378 378 anyway, although the results will probably be neither useful
379 379 nor desirable.
380 380
381 381 Returns 0 on success.
382 382 """
383 383 if not pats:
384 384 raise error.Abort(_('at least one filename or pattern is required'))
385 385
386 386 if opts.get('follow'):
387 387 # --follow is deprecated and now just an alias for -f/--file
388 388 # to mimic the behavior of Mercurial before version 1.5
389 389 opts['file'] = True
390 390
391 391 ctx = scmutil.revsingle(repo, opts.get('rev'))
392 392
393 393 fm = ui.formatter('annotate', opts)
394 394 if ui.quiet:
395 395 datefunc = util.shortdate
396 396 else:
397 397 datefunc = util.datestr
398 398 if ctx.rev() is None:
399 399 def hexfn(node):
400 400 if node is None:
401 401 return None
402 402 else:
403 403 return fm.hexfunc(node)
404 404 if opts.get('changeset'):
405 405 # omit "+" suffix which is appended to node hex
406 406 def formatrev(rev):
407 407 if rev is None:
408 408 return '%d' % ctx.p1().rev()
409 409 else:
410 410 return '%d' % rev
411 411 else:
412 412 def formatrev(rev):
413 413 if rev is None:
414 414 return '%d+' % ctx.p1().rev()
415 415 else:
416 416 return '%d ' % rev
417 417 def formathex(hex):
418 418 if hex is None:
419 419 return '%s+' % fm.hexfunc(ctx.p1().node())
420 420 else:
421 421 return '%s ' % hex
422 422 else:
423 423 hexfn = fm.hexfunc
424 424 formatrev = formathex = str
425 425
426 426 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
427 427 ('number', ' ', lambda x: x[0].rev(), formatrev),
428 428 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
429 429 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
430 430 ('file', ' ', lambda x: x[0].path(), str),
431 431 ('line_number', ':', lambda x: x[1], str),
432 432 ]
433 433 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
434 434
435 435 if (not opts.get('user') and not opts.get('changeset')
436 436 and not opts.get('date') and not opts.get('file')):
437 437 opts['number'] = True
438 438
439 439 linenumber = opts.get('line_number') is not None
440 440 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
441 441 raise error.Abort(_('at least one of -n/-c is required for -l'))
442 442
443 443 if fm:
444 444 def makefunc(get, fmt):
445 445 return get
446 446 else:
447 447 def makefunc(get, fmt):
448 448 return lambda x: fmt(get(x))
449 449 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
450 450 if opts.get(op)]
451 451 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
452 452 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
453 453 if opts.get(op))
454 454
455 455 def bad(x, y):
456 456 raise error.Abort("%s: %s" % (x, y))
457 457
458 458 m = scmutil.match(ctx, pats, opts, badfn=bad)
459 459
460 460 follow = not opts.get('no_follow')
461 461 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
462 462 whitespace=True)
463 463 for abs in ctx.walk(m):
464 464 fctx = ctx[abs]
465 465 if not opts.get('text') and util.binary(fctx.data()):
466 466 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
467 467 continue
468 468
469 469 lines = fctx.annotate(follow=follow, linenumber=linenumber,
470 470 diffopts=diffopts)
471 471 formats = []
472 472 pieces = []
473 473
474 474 for f, sep in funcmap:
475 475 l = [f(n) for n, dummy in lines]
476 476 if l:
477 477 if fm:
478 478 formats.append(['%s' for x in l])
479 479 else:
480 480 sizes = [encoding.colwidth(x) for x in l]
481 481 ml = max(sizes)
482 482 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
483 483 pieces.append(l)
484 484
485 485 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
486 486 fm.startitem()
487 487 fm.write(fields, "".join(f), *p)
488 488 fm.write('line', ": %s", l[1])
489 489
490 490 if lines and not lines[-1][1].endswith('\n'):
491 491 fm.plain('\n')
492 492
493 493 fm.end()
494 494
495 495 @command('archive',
496 496 [('', 'no-decode', None, _('do not pass files through decoders')),
497 497 ('p', 'prefix', '', _('directory prefix for files in archive'),
498 498 _('PREFIX')),
499 499 ('r', 'rev', '', _('revision to distribute'), _('REV')),
500 500 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
501 501 ] + subrepoopts + walkopts,
502 502 _('[OPTION]... DEST'))
503 503 def archive(ui, repo, dest, **opts):
504 504 '''create an unversioned archive of a repository revision
505 505
506 506 By default, the revision used is the parent of the working
507 507 directory; use -r/--rev to specify a different revision.
508 508
509 509 The archive type is automatically detected based on file
510 510 extension (to override, use -t/--type).
511 511
512 512 .. container:: verbose
513 513
514 514 Examples:
515 515
516 516 - create a zip file containing the 1.0 release::
517 517
518 518 hg archive -r 1.0 project-1.0.zip
519 519
520 520 - create a tarball excluding .hg files::
521 521
522 522 hg archive project.tar.gz -X ".hg*"
523 523
524 524 Valid types are:
525 525
526 526 :``files``: a directory full of files (default)
527 527 :``tar``: tar archive, uncompressed
528 528 :``tbz2``: tar archive, compressed using bzip2
529 529 :``tgz``: tar archive, compressed using gzip
530 530 :``uzip``: zip archive, uncompressed
531 531 :``zip``: zip archive, compressed using deflate
532 532
533 533 The exact name of the destination archive or directory is given
534 534 using a format string; see :hg:`help export` for details.
535 535
536 536 Each member added to an archive file has a directory prefix
537 537 prepended. Use -p/--prefix to specify a format string for the
538 538 prefix. The default is the basename of the archive, with suffixes
539 539 removed.
540 540
541 541 Returns 0 on success.
542 542 '''
543 543
544 544 ctx = scmutil.revsingle(repo, opts.get('rev'))
545 545 if not ctx:
546 546 raise error.Abort(_('no working directory: please specify a revision'))
547 547 node = ctx.node()
548 548 dest = cmdutil.makefilename(repo, dest, node)
549 549 if os.path.realpath(dest) == repo.root:
550 550 raise error.Abort(_('repository root cannot be destination'))
551 551
552 552 kind = opts.get('type') or archival.guesskind(dest) or 'files'
553 553 prefix = opts.get('prefix')
554 554
555 555 if dest == '-':
556 556 if kind == 'files':
557 557 raise error.Abort(_('cannot archive plain files to stdout'))
558 558 dest = cmdutil.makefileobj(repo, dest)
559 559 if not prefix:
560 560 prefix = os.path.basename(repo.root) + '-%h'
561 561
562 562 prefix = cmdutil.makefilename(repo, prefix, node)
563 563 matchfn = scmutil.match(ctx, [], opts)
564 564 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
565 565 matchfn, prefix, subrepos=opts.get('subrepos'))
566 566
567 567 @command('backout',
568 568 [('', 'merge', None, _('merge with old dirstate parent after backout')),
569 569 ('', 'commit', None,
570 570 _('commit if no conflicts were encountered (DEPRECATED)')),
571 571 ('', 'no-commit', None, _('do not commit')),
572 572 ('', 'parent', '',
573 573 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
574 574 ('r', 'rev', '', _('revision to backout'), _('REV')),
575 575 ('e', 'edit', False, _('invoke editor on commit messages')),
576 576 ] + mergetoolopts + walkopts + commitopts + commitopts2,
577 577 _('[OPTION]... [-r] REV'))
578 578 def backout(ui, repo, node=None, rev=None, **opts):
579 579 '''reverse effect of earlier changeset
580 580
581 581 Prepare a new changeset with the effect of REV undone in the
582 582 current working directory. If no conflicts were encountered,
583 583 it will be committed immediately.
584 584
585 585 If REV is the parent of the working directory, then this new changeset
586 586 is committed automatically (unless --no-commit is specified).
587 587
588 588 .. note::
589 589
590 590 :hg:`backout` cannot be used to fix either an unwanted or
591 591 incorrect merge.
592 592
593 593 .. container:: verbose
594 594
595 595 Examples:
596 596
597 597 - Reverse the effect of the parent of the working directory.
598 598 This backout will be committed immediately::
599 599
600 600 hg backout -r .
601 601
602 602 - Reverse the effect of previous bad revision 23::
603 603
604 604 hg backout -r 23
605 605
606 606 - Reverse the effect of previous bad revision 23 and
607 607 leave changes uncommitted::
608 608
609 609 hg backout -r 23 --no-commit
610 610 hg commit -m "Backout revision 23"
611 611
612 612 By default, the pending changeset will have one parent,
613 613 maintaining a linear history. With --merge, the pending
614 614 changeset will instead have two parents: the old parent of the
615 615 working directory and a new child of REV that simply undoes REV.
616 616
617 617 Before version 1.7, the behavior without --merge was equivalent
618 618 to specifying --merge followed by :hg:`update --clean .` to
619 619 cancel the merge and leave the child of REV as a head to be
620 620 merged separately.
621 621
622 622 See :hg:`help dates` for a list of formats valid for -d/--date.
623 623
624 624 See :hg:`help revert` for a way to restore files to the state
625 625 of another revision.
626 626
627 627 Returns 0 on success, 1 if nothing to backout or there are unresolved
628 628 files.
629 629 '''
630 630 wlock = lock = None
631 631 try:
632 632 wlock = repo.wlock()
633 633 lock = repo.lock()
634 634 return _dobackout(ui, repo, node, rev, **opts)
635 635 finally:
636 636 release(lock, wlock)
637 637
638 638 def _dobackout(ui, repo, node=None, rev=None, **opts):
639 639 if opts.get('commit') and opts.get('no_commit'):
640 640 raise error.Abort(_("cannot use --commit with --no-commit"))
641 641 if opts.get('merge') and opts.get('no_commit'):
642 642 raise error.Abort(_("cannot use --merge with --no-commit"))
643 643
644 644 if rev and node:
645 645 raise error.Abort(_("please specify just one revision"))
646 646
647 647 if not rev:
648 648 rev = node
649 649
650 650 if not rev:
651 651 raise error.Abort(_("please specify a revision to backout"))
652 652
653 653 date = opts.get('date')
654 654 if date:
655 655 opts['date'] = util.parsedate(date)
656 656
657 657 cmdutil.checkunfinished(repo)
658 658 cmdutil.bailifchanged(repo)
659 659 node = scmutil.revsingle(repo, rev).node()
660 660
661 661 op1, op2 = repo.dirstate.parents()
662 662 if not repo.changelog.isancestor(node, op1):
663 663 raise error.Abort(_('cannot backout change that is not an ancestor'))
664 664
665 665 p1, p2 = repo.changelog.parents(node)
666 666 if p1 == nullid:
667 667 raise error.Abort(_('cannot backout a change with no parents'))
668 668 if p2 != nullid:
669 669 if not opts.get('parent'):
670 670 raise error.Abort(_('cannot backout a merge changeset'))
671 671 p = repo.lookup(opts['parent'])
672 672 if p not in (p1, p2):
673 673 raise error.Abort(_('%s is not a parent of %s') %
674 674 (short(p), short(node)))
675 675 parent = p
676 676 else:
677 677 if opts.get('parent'):
678 678 raise error.Abort(_('cannot use --parent on non-merge changeset'))
679 679 parent = p1
680 680
681 681 # the backout should appear on the same branch
682 682 branch = repo.dirstate.branch()
683 683 bheads = repo.branchheads(branch)
684 684 rctx = scmutil.revsingle(repo, hex(parent))
685 685 if not opts.get('merge') and op1 != node:
686 686 dsguard = cmdutil.dirstateguard(repo, 'backout')
687 687 try:
688 688 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
689 689 'backout')
690 690 stats = mergemod.update(repo, parent, True, True, node, False)
691 691 repo.setparents(op1, op2)
692 692 dsguard.close()
693 693 hg._showstats(repo, stats)
694 694 if stats[3]:
695 695 repo.ui.status(_("use 'hg resolve' to retry unresolved "
696 696 "file merges\n"))
697 697 return 1
698 698 finally:
699 699 ui.setconfig('ui', 'forcemerge', '', '')
700 700 lockmod.release(dsguard)
701 701 else:
702 702 hg.clean(repo, node, show_stats=False)
703 703 repo.dirstate.setbranch(branch)
704 704 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
705 705
706 706 if opts.get('no_commit'):
707 707 msg = _("changeset %s backed out, "
708 708 "don't forget to commit.\n")
709 709 ui.status(msg % short(node))
710 710 return 0
711 711
712 712 def commitfunc(ui, repo, message, match, opts):
713 713 editform = 'backout'
714 714 e = cmdutil.getcommiteditor(editform=editform, **opts)
715 715 if not message:
716 716 # we don't translate commit messages
717 717 message = "Backed out changeset %s" % short(node)
718 718 e = cmdutil.getcommiteditor(edit=True, editform=editform)
719 719 return repo.commit(message, opts.get('user'), opts.get('date'),
720 720 match, editor=e)
721 721 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
722 722 if not newnode:
723 723 ui.status(_("nothing changed\n"))
724 724 return 1
725 725 cmdutil.commitstatus(repo, newnode, branch, bheads)
726 726
727 727 def nice(node):
728 728 return '%d:%s' % (repo.changelog.rev(node), short(node))
729 729 ui.status(_('changeset %s backs out changeset %s\n') %
730 730 (nice(repo.changelog.tip()), nice(node)))
731 731 if opts.get('merge') and op1 != node:
732 732 hg.clean(repo, op1, show_stats=False)
733 733 ui.status(_('merging with changeset %s\n')
734 734 % nice(repo.changelog.tip()))
735 735 try:
736 736 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
737 737 'backout')
738 738 return hg.merge(repo, hex(repo.changelog.tip()))
739 739 finally:
740 740 ui.setconfig('ui', 'forcemerge', '', '')
741 741 return 0
742 742
743 743 @command('bisect',
744 744 [('r', 'reset', False, _('reset bisect state')),
745 745 ('g', 'good', False, _('mark changeset good')),
746 746 ('b', 'bad', False, _('mark changeset bad')),
747 747 ('s', 'skip', False, _('skip testing changeset')),
748 748 ('e', 'extend', False, _('extend the bisect range')),
749 749 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
750 750 ('U', 'noupdate', False, _('do not update to target'))],
751 751 _("[-gbsr] [-U] [-c CMD] [REV]"))
752 752 def bisect(ui, repo, rev=None, extra=None, command=None,
753 753 reset=None, good=None, bad=None, skip=None, extend=None,
754 754 noupdate=None):
755 755 """subdivision search of changesets
756 756
757 757 This command helps to find changesets which introduce problems. To
758 758 use, mark the earliest changeset you know exhibits the problem as
759 759 bad, then mark the latest changeset which is free from the problem
760 760 as good. Bisect will update your working directory to a revision
761 761 for testing (unless the -U/--noupdate option is specified). Once
762 762 you have performed tests, mark the working directory as good or
763 763 bad, and bisect will either update to another candidate changeset
764 764 or announce that it has found the bad revision.
765 765
766 766 As a shortcut, you can also use the revision argument to mark a
767 767 revision as good or bad without checking it out first.
768 768
769 769 If you supply a command, it will be used for automatic bisection.
770 770 The environment variable HG_NODE will contain the ID of the
771 771 changeset being tested. The exit status of the command will be
772 772 used to mark revisions as good or bad: status 0 means good, 125
773 773 means to skip the revision, 127 (command not found) will abort the
774 774 bisection, and any other non-zero exit status means the revision
775 775 is bad.
776 776
777 777 .. container:: verbose
778 778
779 779 Some examples:
780 780
781 781 - start a bisection with known bad revision 34, and good revision 12::
782 782
783 783 hg bisect --bad 34
784 784 hg bisect --good 12
785 785
786 786 - advance the current bisection by marking current revision as good or
787 787 bad::
788 788
789 789 hg bisect --good
790 790 hg bisect --bad
791 791
792 792 - mark the current revision, or a known revision, to be skipped (e.g. if
793 793 that revision is not usable because of another issue)::
794 794
795 795 hg bisect --skip
796 796 hg bisect --skip 23
797 797
798 798 - skip all revisions that do not touch directories ``foo`` or ``bar``::
799 799
800 800 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
801 801
802 802 - forget the current bisection::
803 803
804 804 hg bisect --reset
805 805
806 806 - use 'make && make tests' to automatically find the first broken
807 807 revision::
808 808
809 809 hg bisect --reset
810 810 hg bisect --bad 34
811 811 hg bisect --good 12
812 812 hg bisect --command "make && make tests"
813 813
814 814 - see all changesets whose states are already known in the current
815 815 bisection::
816 816
817 817 hg log -r "bisect(pruned)"
818 818
819 819 - see the changeset currently being bisected (especially useful
820 820 if running with -U/--noupdate)::
821 821
822 822 hg log -r "bisect(current)"
823 823
824 824 - see all changesets that took part in the current bisection::
825 825
826 826 hg log -r "bisect(range)"
827 827
828 828 - you can even get a nice graph::
829 829
830 830 hg log --graph -r "bisect(range)"
831 831
832 832 See :hg:`help revsets` for more about the `bisect()` keyword.
833 833
834 834 Returns 0 on success.
835 835 """
836 836 def extendbisectrange(nodes, good):
837 837 # bisect is incomplete when it ends on a merge node and
838 838 # one of the parent was not checked.
839 839 parents = repo[nodes[0]].parents()
840 840 if len(parents) > 1:
841 841 if good:
842 842 side = state['bad']
843 843 else:
844 844 side = state['good']
845 845 num = len(set(i.node() for i in parents) & set(side))
846 846 if num == 1:
847 847 return parents[0].ancestor(parents[1])
848 848 return None
849 849
850 850 def print_result(nodes, good):
851 851 displayer = cmdutil.show_changeset(ui, repo, {})
852 852 if len(nodes) == 1:
853 853 # narrowed it down to a single revision
854 854 if good:
855 855 ui.write(_("The first good revision is:\n"))
856 856 else:
857 857 ui.write(_("The first bad revision is:\n"))
858 858 displayer.show(repo[nodes[0]])
859 859 extendnode = extendbisectrange(nodes, good)
860 860 if extendnode is not None:
861 861 ui.write(_('Not all ancestors of this changeset have been'
862 862 ' checked.\nUse bisect --extend to continue the '
863 863 'bisection from\nthe common ancestor, %s.\n')
864 864 % extendnode)
865 865 else:
866 866 # multiple possible revisions
867 867 if good:
868 868 ui.write(_("Due to skipped revisions, the first "
869 869 "good revision could be any of:\n"))
870 870 else:
871 871 ui.write(_("Due to skipped revisions, the first "
872 872 "bad revision could be any of:\n"))
873 873 for n in nodes:
874 874 displayer.show(repo[n])
875 875 displayer.close()
876 876
877 877 def check_state(state, interactive=True):
878 878 if not state['good'] or not state['bad']:
879 879 if (good or bad or skip or reset) and interactive:
880 880 return
881 881 if not state['good']:
882 882 raise error.Abort(_('cannot bisect (no known good revisions)'))
883 883 else:
884 884 raise error.Abort(_('cannot bisect (no known bad revisions)'))
885 885 return True
886 886
887 887 # backward compatibility
888 888 if rev in "good bad reset init".split():
889 889 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
890 890 cmd, rev, extra = rev, extra, None
891 891 if cmd == "good":
892 892 good = True
893 893 elif cmd == "bad":
894 894 bad = True
895 895 else:
896 896 reset = True
897 897 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
898 898 raise error.Abort(_('incompatible arguments'))
899 899
900 900 cmdutil.checkunfinished(repo)
901 901
902 902 if reset:
903 903 p = repo.join("bisect.state")
904 904 if os.path.exists(p):
905 905 os.unlink(p)
906 906 return
907 907
908 908 state = hbisect.load_state(repo)
909 909
910 910 if command:
911 911 changesets = 1
912 912 if noupdate:
913 913 try:
914 914 node = state['current'][0]
915 915 except LookupError:
916 916 raise error.Abort(_('current bisect revision is unknown - '
917 917 'start a new bisect to fix'))
918 918 else:
919 919 node, p2 = repo.dirstate.parents()
920 920 if p2 != nullid:
921 921 raise error.Abort(_('current bisect revision is a merge'))
922 922 try:
923 923 while changesets:
924 924 # update state
925 925 state['current'] = [node]
926 926 hbisect.save_state(repo, state)
927 927 status = ui.system(command, environ={'HG_NODE': hex(node)})
928 928 if status == 125:
929 929 transition = "skip"
930 930 elif status == 0:
931 931 transition = "good"
932 932 # status < 0 means process was killed
933 933 elif status == 127:
934 934 raise error.Abort(_("failed to execute %s") % command)
935 935 elif status < 0:
936 936 raise error.Abort(_("%s killed") % command)
937 937 else:
938 938 transition = "bad"
939 939 ctx = scmutil.revsingle(repo, rev, node)
940 940 rev = None # clear for future iterations
941 941 state[transition].append(ctx.node())
942 942 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
943 943 check_state(state, interactive=False)
944 944 # bisect
945 945 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
946 946 # update to next check
947 947 node = nodes[0]
948 948 if not noupdate:
949 949 cmdutil.bailifchanged(repo)
950 950 hg.clean(repo, node, show_stats=False)
951 951 finally:
952 952 state['current'] = [node]
953 953 hbisect.save_state(repo, state)
954 954 print_result(nodes, bgood)
955 955 return
956 956
957 957 # update state
958 958
959 959 if rev:
960 960 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
961 961 else:
962 962 nodes = [repo.lookup('.')]
963 963
964 964 if good or bad or skip:
965 965 if good:
966 966 state['good'] += nodes
967 967 elif bad:
968 968 state['bad'] += nodes
969 969 elif skip:
970 970 state['skip'] += nodes
971 971 hbisect.save_state(repo, state)
972 972
973 973 if not check_state(state):
974 974 return
975 975
976 976 # actually bisect
977 977 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
978 978 if extend:
979 979 if not changesets:
980 980 extendnode = extendbisectrange(nodes, good)
981 981 if extendnode is not None:
982 982 ui.write(_("Extending search to changeset %d:%s\n")
983 983 % (extendnode.rev(), extendnode))
984 984 state['current'] = [extendnode.node()]
985 985 hbisect.save_state(repo, state)
986 986 if noupdate:
987 987 return
988 988 cmdutil.bailifchanged(repo)
989 989 return hg.clean(repo, extendnode.node())
990 990 raise error.Abort(_("nothing to extend"))
991 991
992 992 if changesets == 0:
993 993 print_result(nodes, good)
994 994 else:
995 995 assert len(nodes) == 1 # only a single node can be tested next
996 996 node = nodes[0]
997 997 # compute the approximate number of remaining tests
998 998 tests, size = 0, 2
999 999 while size <= changesets:
1000 1000 tests, size = tests + 1, size * 2
1001 1001 rev = repo.changelog.rev(node)
1002 1002 ui.write(_("Testing changeset %d:%s "
1003 1003 "(%d changesets remaining, ~%d tests)\n")
1004 1004 % (rev, short(node), changesets, tests))
1005 1005 state['current'] = [node]
1006 1006 hbisect.save_state(repo, state)
1007 1007 if not noupdate:
1008 1008 cmdutil.bailifchanged(repo)
1009 1009 return hg.clean(repo, node)
1010 1010
1011 1011 @command('bookmarks|bookmark',
1012 1012 [('f', 'force', False, _('force')),
1013 1013 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
1014 1014 ('d', 'delete', False, _('delete a given bookmark')),
1015 1015 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
1016 1016 ('i', 'inactive', False, _('mark a bookmark inactive')),
1017 1017 ] + formatteropts,
1018 1018 _('hg bookmarks [OPTIONS]... [NAME]...'))
1019 1019 def bookmark(ui, repo, *names, **opts):
1020 1020 '''create a new bookmark or list existing bookmarks
1021 1021
1022 1022 Bookmarks are labels on changesets to help track lines of development.
1023 1023 Bookmarks are unversioned and can be moved, renamed and deleted.
1024 1024 Deleting or moving a bookmark has no effect on the associated changesets.
1025 1025
1026 1026 Creating or updating to a bookmark causes it to be marked as 'active'.
1027 1027 The active bookmark is indicated with a '*'.
1028 1028 When a commit is made, the active bookmark will advance to the new commit.
1029 1029 A plain :hg:`update` will also advance an active bookmark, if possible.
1030 1030 Updating away from a bookmark will cause it to be deactivated.
1031 1031
1032 1032 Bookmarks can be pushed and pulled between repositories (see
1033 1033 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
1034 1034 diverged, a new 'divergent bookmark' of the form 'name@path' will
1035 1035 be created. Using :hg:`merge` will resolve the divergence.
1036 1036
1037 1037 A bookmark named '@' has the special property that :hg:`clone` will
1038 1038 check it out by default if it exists.
1039 1039
1040 1040 .. container:: verbose
1041 1041
1042 1042 Examples:
1043 1043
1044 1044 - create an active bookmark for a new line of development::
1045 1045
1046 1046 hg book new-feature
1047 1047
1048 1048 - create an inactive bookmark as a place marker::
1049 1049
1050 1050 hg book -i reviewed
1051 1051
1052 1052 - create an inactive bookmark on another changeset::
1053 1053
1054 1054 hg book -r .^ tested
1055 1055
1056 1056 - rename bookmark turkey to dinner::
1057 1057
1058 1058 hg book -m turkey dinner
1059 1059
1060 1060 - move the '@' bookmark from another branch::
1061 1061
1062 1062 hg book -f @
1063 1063 '''
1064 1064 force = opts.get('force')
1065 1065 rev = opts.get('rev')
1066 1066 delete = opts.get('delete')
1067 1067 rename = opts.get('rename')
1068 1068 inactive = opts.get('inactive')
1069 1069
1070 1070 def checkformat(mark):
1071 1071 mark = mark.strip()
1072 1072 if not mark:
1073 1073 raise error.Abort(_("bookmark names cannot consist entirely of "
1074 1074 "whitespace"))
1075 1075 scmutil.checknewlabel(repo, mark, 'bookmark')
1076 1076 return mark
1077 1077
1078 1078 def checkconflict(repo, mark, cur, force=False, target=None):
1079 1079 if mark in marks and not force:
1080 1080 if target:
1081 1081 if marks[mark] == target and target == cur:
1082 1082 # re-activating a bookmark
1083 1083 return
1084 1084 anc = repo.changelog.ancestors([repo[target].rev()])
1085 1085 bmctx = repo[marks[mark]]
1086 1086 divs = [repo[b].node() for b in marks
1087 1087 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
1088 1088
1089 1089 # allow resolving a single divergent bookmark even if moving
1090 1090 # the bookmark across branches when a revision is specified
1091 1091 # that contains a divergent bookmark
1092 1092 if bmctx.rev() not in anc and target in divs:
1093 1093 bookmarks.deletedivergent(repo, [target], mark)
1094 1094 return
1095 1095
1096 1096 deletefrom = [b for b in divs
1097 1097 if repo[b].rev() in anc or b == target]
1098 1098 bookmarks.deletedivergent(repo, deletefrom, mark)
1099 1099 if bookmarks.validdest(repo, bmctx, repo[target]):
1100 1100 ui.status(_("moving bookmark '%s' forward from %s\n") %
1101 1101 (mark, short(bmctx.node())))
1102 1102 return
1103 1103 raise error.Abort(_("bookmark '%s' already exists "
1104 1104 "(use -f to force)") % mark)
1105 1105 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
1106 1106 and not force):
1107 1107 raise error.Abort(
1108 1108 _("a bookmark cannot have the name of an existing branch"))
1109 1109
1110 1110 if delete and rename:
1111 1111 raise error.Abort(_("--delete and --rename are incompatible"))
1112 1112 if delete and rev:
1113 1113 raise error.Abort(_("--rev is incompatible with --delete"))
1114 1114 if rename and rev:
1115 1115 raise error.Abort(_("--rev is incompatible with --rename"))
1116 1116 if not names and (delete or rev):
1117 1117 raise error.Abort(_("bookmark name required"))
1118 1118
1119 1119 if delete or rename or names or inactive:
1120 1120 wlock = lock = tr = None
1121 1121 try:
1122 1122 wlock = repo.wlock()
1123 1123 lock = repo.lock()
1124 1124 cur = repo.changectx('.').node()
1125 1125 marks = repo._bookmarks
1126 1126 if delete:
1127 1127 tr = repo.transaction('bookmark')
1128 1128 for mark in names:
1129 1129 if mark not in marks:
1130 1130 raise error.Abort(_("bookmark '%s' does not exist") %
1131 1131 mark)
1132 1132 if mark == repo._activebookmark:
1133 1133 bookmarks.deactivate(repo)
1134 1134 del marks[mark]
1135 1135
1136 1136 elif rename:
1137 1137 tr = repo.transaction('bookmark')
1138 1138 if not names:
1139 1139 raise error.Abort(_("new bookmark name required"))
1140 1140 elif len(names) > 1:
1141 1141 raise error.Abort(_("only one new bookmark name allowed"))
1142 1142 mark = checkformat(names[0])
1143 1143 if rename not in marks:
1144 1144 raise error.Abort(_("bookmark '%s' does not exist")
1145 1145 % rename)
1146 1146 checkconflict(repo, mark, cur, force)
1147 1147 marks[mark] = marks[rename]
1148 1148 if repo._activebookmark == rename and not inactive:
1149 1149 bookmarks.activate(repo, mark)
1150 1150 del marks[rename]
1151 1151 elif names:
1152 1152 tr = repo.transaction('bookmark')
1153 1153 newact = None
1154 1154 for mark in names:
1155 1155 mark = checkformat(mark)
1156 1156 if newact is None:
1157 1157 newact = mark
1158 1158 if inactive and mark == repo._activebookmark:
1159 1159 bookmarks.deactivate(repo)
1160 1160 return
1161 1161 tgt = cur
1162 1162 if rev:
1163 1163 tgt = scmutil.revsingle(repo, rev).node()
1164 1164 checkconflict(repo, mark, cur, force, tgt)
1165 1165 marks[mark] = tgt
1166 1166 if not inactive and cur == marks[newact] and not rev:
1167 1167 bookmarks.activate(repo, newact)
1168 1168 elif cur != tgt and newact == repo._activebookmark:
1169 1169 bookmarks.deactivate(repo)
1170 1170 elif inactive:
1171 1171 if len(marks) == 0:
1172 1172 ui.status(_("no bookmarks set\n"))
1173 1173 elif not repo._activebookmark:
1174 1174 ui.status(_("no active bookmark\n"))
1175 1175 else:
1176 1176 bookmarks.deactivate(repo)
1177 1177 if tr is not None:
1178 1178 marks.recordchange(tr)
1179 1179 tr.close()
1180 1180 finally:
1181 1181 lockmod.release(tr, lock, wlock)
1182 1182 else: # show bookmarks
1183 1183 fm = ui.formatter('bookmarks', opts)
1184 1184 hexfn = fm.hexfunc
1185 1185 marks = repo._bookmarks
1186 1186 if len(marks) == 0 and not fm:
1187 1187 ui.status(_("no bookmarks set\n"))
1188 1188 for bmark, n in sorted(marks.iteritems()):
1189 1189 active = repo._activebookmark
1190 1190 if bmark == active:
1191 1191 prefix, label = '*', activebookmarklabel
1192 1192 else:
1193 1193 prefix, label = ' ', ''
1194 1194
1195 1195 fm.startitem()
1196 1196 if not ui.quiet:
1197 1197 fm.plain(' %s ' % prefix, label=label)
1198 1198 fm.write('bookmark', '%s', bmark, label=label)
1199 1199 pad = " " * (25 - encoding.colwidth(bmark))
1200 1200 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1201 1201 repo.changelog.rev(n), hexfn(n), label=label)
1202 1202 fm.data(active=(bmark == active))
1203 1203 fm.plain('\n')
1204 1204 fm.end()
1205 1205
1206 1206 @command('branch',
1207 1207 [('f', 'force', None,
1208 1208 _('set branch name even if it shadows an existing branch')),
1209 1209 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1210 1210 _('[-fC] [NAME]'))
1211 1211 def branch(ui, repo, label=None, **opts):
1212 1212 """set or show the current branch name
1213 1213
1214 1214 .. note::
1215 1215
1216 1216 Branch names are permanent and global. Use :hg:`bookmark` to create a
1217 1217 light-weight bookmark instead. See :hg:`help glossary` for more
1218 1218 information about named branches and bookmarks.
1219 1219
1220 1220 With no argument, show the current branch name. With one argument,
1221 1221 set the working directory branch name (the branch will not exist
1222 1222 in the repository until the next commit). Standard practice
1223 1223 recommends that primary development take place on the 'default'
1224 1224 branch.
1225 1225
1226 1226 Unless -f/--force is specified, branch will not let you set a
1227 1227 branch name that already exists.
1228 1228
1229 1229 Use -C/--clean to reset the working directory branch to that of
1230 1230 the parent of the working directory, negating a previous branch
1231 1231 change.
1232 1232
1233 1233 Use the command :hg:`update` to switch to an existing branch. Use
1234 1234 :hg:`commit --close-branch` to mark this branch head as closed.
1235 1235 When all heads of a branch are closed, the branch will be
1236 1236 considered closed.
1237 1237
1238 1238 Returns 0 on success.
1239 1239 """
1240 1240 if label:
1241 1241 label = label.strip()
1242 1242
1243 1243 if not opts.get('clean') and not label:
1244 1244 ui.write("%s\n" % repo.dirstate.branch())
1245 1245 return
1246 1246
1247 1247 with repo.wlock():
1248 1248 if opts.get('clean'):
1249 1249 label = repo[None].p1().branch()
1250 1250 repo.dirstate.setbranch(label)
1251 1251 ui.status(_('reset working directory to branch %s\n') % label)
1252 1252 elif label:
1253 1253 if not opts.get('force') and label in repo.branchmap():
1254 1254 if label not in [p.branch() for p in repo[None].parents()]:
1255 1255 raise error.Abort(_('a branch of the same name already'
1256 1256 ' exists'),
1257 1257 # i18n: "it" refers to an existing branch
1258 1258 hint=_("use 'hg update' to switch to it"))
1259 1259 scmutil.checknewlabel(repo, label, 'branch')
1260 1260 repo.dirstate.setbranch(label)
1261 1261 ui.status(_('marked working directory as branch %s\n') % label)
1262 1262
1263 1263 # find any open named branches aside from default
1264 1264 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1265 1265 if n != "default" and not c]
1266 1266 if not others:
1267 1267 ui.status(_('(branches are permanent and global, '
1268 1268 'did you want a bookmark?)\n'))
1269 1269
1270 1270 @command('branches',
1271 1271 [('a', 'active', False,
1272 1272 _('show only branches that have unmerged heads (DEPRECATED)')),
1273 1273 ('c', 'closed', False, _('show normal and closed branches')),
1274 1274 ] + formatteropts,
1275 1275 _('[-c]'))
1276 1276 def branches(ui, repo, active=False, closed=False, **opts):
1277 1277 """list repository named branches
1278 1278
1279 1279 List the repository's named branches, indicating which ones are
1280 1280 inactive. If -c/--closed is specified, also list branches which have
1281 1281 been marked closed (see :hg:`commit --close-branch`).
1282 1282
1283 1283 Use the command :hg:`update` to switch to an existing branch.
1284 1284
1285 1285 Returns 0.
1286 1286 """
1287 1287
1288 1288 fm = ui.formatter('branches', opts)
1289 1289 hexfunc = fm.hexfunc
1290 1290
1291 1291 allheads = set(repo.heads())
1292 1292 branches = []
1293 1293 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1294 1294 isactive = not isclosed and bool(set(heads) & allheads)
1295 1295 branches.append((tag, repo[tip], isactive, not isclosed))
1296 1296 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1297 1297 reverse=True)
1298 1298
1299 1299 for tag, ctx, isactive, isopen in branches:
1300 1300 if active and not isactive:
1301 1301 continue
1302 1302 if isactive:
1303 1303 label = 'branches.active'
1304 1304 notice = ''
1305 1305 elif not isopen:
1306 1306 if not closed:
1307 1307 continue
1308 1308 label = 'branches.closed'
1309 1309 notice = _(' (closed)')
1310 1310 else:
1311 1311 label = 'branches.inactive'
1312 1312 notice = _(' (inactive)')
1313 1313 current = (tag == repo.dirstate.branch())
1314 1314 if current:
1315 1315 label = 'branches.current'
1316 1316
1317 1317 fm.startitem()
1318 1318 fm.write('branch', '%s', tag, label=label)
1319 1319 rev = ctx.rev()
1320 1320 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1321 1321 fmt = ' ' * padsize + ' %d:%s'
1322 1322 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1323 1323 label='log.changeset changeset.%s' % ctx.phasestr())
1324 1324 fm.data(active=isactive, closed=not isopen, current=current)
1325 1325 if not ui.quiet:
1326 1326 fm.plain(notice)
1327 1327 fm.plain('\n')
1328 1328 fm.end()
1329 1329
1330 1330 @command('bundle',
1331 1331 [('f', 'force', None, _('run even when the destination is unrelated')),
1332 1332 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1333 1333 _('REV')),
1334 1334 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1335 1335 _('BRANCH')),
1336 1336 ('', 'base', [],
1337 1337 _('a base changeset assumed to be available at the destination'),
1338 1338 _('REV')),
1339 1339 ('a', 'all', None, _('bundle all changesets in the repository')),
1340 1340 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1341 1341 ] + remoteopts,
1342 1342 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1343 1343 def bundle(ui, repo, fname, dest=None, **opts):
1344 1344 """create a changegroup file
1345 1345
1346 1346 Generate a changegroup file collecting changesets to be added
1347 1347 to a repository.
1348 1348
1349 1349 To create a bundle containing all changesets, use -a/--all
1350 1350 (or --base null). Otherwise, hg assumes the destination will have
1351 1351 all the nodes you specify with --base parameters. Otherwise, hg
1352 1352 will assume the repository has all the nodes in destination, or
1353 1353 default-push/default if no destination is specified.
1354 1354
1355 1355 You can change bundle format with the -t/--type option. You can
1356 1356 specify a compression, a bundle version or both using a dash
1357 1357 (comp-version). The available compression methods are: none, bzip2,
1358 1358 and gzip (by default, bundles are compressed using bzip2). The
1359 1359 available formats are: v1, v2 (default to most suitable).
1360 1360
1361 1361 The bundle file can then be transferred using conventional means
1362 1362 and applied to another repository with the unbundle or pull
1363 1363 command. This is useful when direct push and pull are not
1364 1364 available or when exporting an entire repository is undesirable.
1365 1365
1366 1366 Applying bundles preserves all changeset contents including
1367 1367 permissions, copy/rename information, and revision history.
1368 1368
1369 1369 Returns 0 on success, 1 if no changes found.
1370 1370 """
1371 1371 revs = None
1372 1372 if 'rev' in opts:
1373 1373 revstrings = opts['rev']
1374 1374 revs = scmutil.revrange(repo, revstrings)
1375 1375 if revstrings and not revs:
1376 1376 raise error.Abort(_('no commits to bundle'))
1377 1377
1378 1378 bundletype = opts.get('type', 'bzip2').lower()
1379 1379 try:
1380 1380 bcompression, cgversion, params = exchange.parsebundlespec(
1381 1381 repo, bundletype, strict=False)
1382 1382 except error.UnsupportedBundleSpecification as e:
1383 1383 raise error.Abort(str(e),
1384 1384 hint=_('see "hg help bundle" for supported '
1385 1385 'values for --type'))
1386 1386
1387 1387 # Packed bundles are a pseudo bundle format for now.
1388 1388 if cgversion == 's1':
1389 1389 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1390 1390 hint=_("use 'hg debugcreatestreamclonebundle'"))
1391 1391
1392 1392 if opts.get('all'):
1393 1393 if dest:
1394 1394 raise error.Abort(_("--all is incompatible with specifying "
1395 1395 "a destination"))
1396 1396 if opts.get('base'):
1397 1397 ui.warn(_("ignoring --base because --all was specified\n"))
1398 1398 base = ['null']
1399 1399 else:
1400 1400 base = scmutil.revrange(repo, opts.get('base'))
1401 1401 # TODO: get desired bundlecaps from command line.
1402 1402 bundlecaps = None
1403 1403 if cgversion not in changegroup.supportedoutgoingversions(repo):
1404 1404 raise error.Abort(_("repository does not support bundle version %s") %
1405 1405 cgversion)
1406 1406
1407 1407 if base:
1408 1408 if dest:
1409 1409 raise error.Abort(_("--base is incompatible with specifying "
1410 1410 "a destination"))
1411 1411 common = [repo.lookup(rev) for rev in base]
1412 1412 heads = revs and map(repo.lookup, revs) or revs
1413 1413 cg = changegroup.getchangegroup(repo, 'bundle', heads=heads,
1414 1414 common=common, bundlecaps=bundlecaps,
1415 1415 version=cgversion)
1416 1416 outgoing = None
1417 1417 else:
1418 1418 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1419 1419 dest, branches = hg.parseurl(dest, opts.get('branch'))
1420 1420 other = hg.peer(repo, opts, dest)
1421 1421 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1422 1422 heads = revs and map(repo.lookup, revs) or revs
1423 1423 outgoing = discovery.findcommonoutgoing(repo, other,
1424 1424 onlyheads=heads,
1425 1425 force=opts.get('force'),
1426 1426 portable=True)
1427 1427 cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
1428 1428 bundlecaps, version=cgversion)
1429 1429 if not cg:
1430 1430 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1431 1431 return 1
1432 1432
1433 1433 if cgversion == '01': #bundle1
1434 1434 if bcompression is None:
1435 1435 bcompression = 'UN'
1436 1436 bversion = 'HG10' + bcompression
1437 1437 bcompression = None
1438 1438 else:
1439 1439 assert cgversion == '02'
1440 1440 bversion = 'HG20'
1441 1441
1442 1442 bundle2.writebundle(ui, cg, fname, bversion, compression=bcompression)
1443 1443
1444 1444 @command('cat',
1445 1445 [('o', 'output', '',
1446 1446 _('print output to file with formatted name'), _('FORMAT')),
1447 1447 ('r', 'rev', '', _('print the given revision'), _('REV')),
1448 1448 ('', 'decode', None, _('apply any matching decode filter')),
1449 1449 ] + walkopts,
1450 1450 _('[OPTION]... FILE...'),
1451 1451 inferrepo=True)
1452 1452 def cat(ui, repo, file1, *pats, **opts):
1453 1453 """output the current or given revision of files
1454 1454
1455 1455 Print the specified files as they were at the given revision. If
1456 1456 no revision is given, the parent of the working directory is used.
1457 1457
1458 1458 Output may be to a file, in which case the name of the file is
1459 1459 given using a format string. The formatting rules as follows:
1460 1460
1461 1461 :``%%``: literal "%" character
1462 1462 :``%s``: basename of file being printed
1463 1463 :``%d``: dirname of file being printed, or '.' if in repository root
1464 1464 :``%p``: root-relative path name of file being printed
1465 1465 :``%H``: changeset hash (40 hexadecimal digits)
1466 1466 :``%R``: changeset revision number
1467 1467 :``%h``: short-form changeset hash (12 hexadecimal digits)
1468 1468 :``%r``: zero-padded changeset revision number
1469 1469 :``%b``: basename of the exporting repository
1470 1470
1471 1471 Returns 0 on success.
1472 1472 """
1473 1473 ctx = scmutil.revsingle(repo, opts.get('rev'))
1474 1474 m = scmutil.match(ctx, (file1,) + pats, opts)
1475 1475
1476 1476 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1477 1477
1478 1478 @command('^clone',
1479 1479 [('U', 'noupdate', None, _('the clone will include an empty working '
1480 1480 'directory (only a repository)')),
1481 1481 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1482 1482 _('REV')),
1483 1483 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1484 1484 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1485 1485 ('', 'pull', None, _('use pull protocol to copy metadata')),
1486 1486 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1487 1487 ] + remoteopts,
1488 1488 _('[OPTION]... SOURCE [DEST]'),
1489 1489 norepo=True)
1490 1490 def clone(ui, source, dest=None, **opts):
1491 1491 """make a copy of an existing repository
1492 1492
1493 1493 Create a copy of an existing repository in a new directory.
1494 1494
1495 1495 If no destination directory name is specified, it defaults to the
1496 1496 basename of the source.
1497 1497
1498 1498 The location of the source is added to the new repository's
1499 1499 ``.hg/hgrc`` file, as the default to be used for future pulls.
1500 1500
1501 1501 Only local paths and ``ssh://`` URLs are supported as
1502 1502 destinations. For ``ssh://`` destinations, no working directory or
1503 1503 ``.hg/hgrc`` will be created on the remote side.
1504 1504
1505 1505 If the source repository has a bookmark called '@' set, that
1506 1506 revision will be checked out in the new repository by default.
1507 1507
1508 1508 To check out a particular version, use -u/--update, or
1509 1509 -U/--noupdate to create a clone with no working directory.
1510 1510
1511 1511 To pull only a subset of changesets, specify one or more revisions
1512 1512 identifiers with -r/--rev or branches with -b/--branch. The
1513 1513 resulting clone will contain only the specified changesets and
1514 1514 their ancestors. These options (or 'clone src#rev dest') imply
1515 1515 --pull, even for local source repositories.
1516 1516
1517 1517 .. note::
1518 1518
1519 1519 Specifying a tag will include the tagged changeset but not the
1520 1520 changeset containing the tag.
1521 1521
1522 1522 .. container:: verbose
1523 1523
1524 1524 For efficiency, hardlinks are used for cloning whenever the
1525 1525 source and destination are on the same filesystem (note this
1526 1526 applies only to the repository data, not to the working
1527 1527 directory). Some filesystems, such as AFS, implement hardlinking
1528 1528 incorrectly, but do not report errors. In these cases, use the
1529 1529 --pull option to avoid hardlinking.
1530 1530
1531 1531 In some cases, you can clone repositories and the working
1532 1532 directory using full hardlinks with ::
1533 1533
1534 1534 $ cp -al REPO REPOCLONE
1535 1535
1536 1536 This is the fastest way to clone, but it is not always safe. The
1537 1537 operation is not atomic (making sure REPO is not modified during
1538 1538 the operation is up to you) and you have to make sure your
1539 1539 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1540 1540 so). Also, this is not compatible with certain extensions that
1541 1541 place their metadata under the .hg directory, such as mq.
1542 1542
1543 1543 Mercurial will update the working directory to the first applicable
1544 1544 revision from this list:
1545 1545
1546 1546 a) null if -U or the source repository has no changesets
1547 1547 b) if -u . and the source repository is local, the first parent of
1548 1548 the source repository's working directory
1549 1549 c) the changeset specified with -u (if a branch name, this means the
1550 1550 latest head of that branch)
1551 1551 d) the changeset specified with -r
1552 1552 e) the tipmost head specified with -b
1553 1553 f) the tipmost head specified with the url#branch source syntax
1554 1554 g) the revision marked with the '@' bookmark, if present
1555 1555 h) the tipmost head of the default branch
1556 1556 i) tip
1557 1557
1558 1558 When cloning from servers that support it, Mercurial may fetch
1559 1559 pre-generated data from a server-advertised URL. When this is done,
1560 1560 hooks operating on incoming changesets and changegroups may fire twice,
1561 1561 once for the bundle fetched from the URL and another for any additional
1562 1562 data not fetched from this URL. In addition, if an error occurs, the
1563 1563 repository may be rolled back to a partial clone. This behavior may
1564 1564 change in future releases. See :hg:`help -e clonebundles` for more.
1565 1565
1566 1566 Examples:
1567 1567
1568 1568 - clone a remote repository to a new directory named hg/::
1569 1569
1570 1570 hg clone http://selenic.com/hg
1571 1571
1572 1572 - create a lightweight local clone::
1573 1573
1574 1574 hg clone project/ project-feature/
1575 1575
1576 1576 - clone from an absolute path on an ssh server (note double-slash)::
1577 1577
1578 1578 hg clone ssh://user@server//home/projects/alpha/
1579 1579
1580 1580 - do a high-speed clone over a LAN while checking out a
1581 1581 specified version::
1582 1582
1583 1583 hg clone --uncompressed http://server/repo -u 1.5
1584 1584
1585 1585 - create a repository without changesets after a particular revision::
1586 1586
1587 1587 hg clone -r 04e544 experimental/ good/
1588 1588
1589 1589 - clone (and track) a particular named branch::
1590 1590
1591 1591 hg clone http://selenic.com/hg#stable
1592 1592
1593 1593 See :hg:`help urls` for details on specifying URLs.
1594 1594
1595 1595 Returns 0 on success.
1596 1596 """
1597 1597 if opts.get('noupdate') and opts.get('updaterev'):
1598 1598 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1599 1599
1600 1600 r = hg.clone(ui, opts, source, dest,
1601 1601 pull=opts.get('pull'),
1602 1602 stream=opts.get('uncompressed'),
1603 1603 rev=opts.get('rev'),
1604 1604 update=opts.get('updaterev') or not opts.get('noupdate'),
1605 1605 branch=opts.get('branch'),
1606 1606 shareopts=opts.get('shareopts'))
1607 1607
1608 1608 return r is None
1609 1609
1610 1610 @command('^commit|ci',
1611 1611 [('A', 'addremove', None,
1612 1612 _('mark new/missing files as added/removed before committing')),
1613 1613 ('', 'close-branch', None,
1614 1614 _('mark a branch head as closed')),
1615 1615 ('', 'amend', None, _('amend the parent of the working directory')),
1616 1616 ('s', 'secret', None, _('use the secret phase for committing')),
1617 1617 ('e', 'edit', None, _('invoke editor on commit messages')),
1618 1618 ('i', 'interactive', None, _('use interactive mode')),
1619 1619 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1620 1620 _('[OPTION]... [FILE]...'),
1621 1621 inferrepo=True)
1622 1622 def commit(ui, repo, *pats, **opts):
1623 1623 """commit the specified files or all outstanding changes
1624 1624
1625 1625 Commit changes to the given files into the repository. Unlike a
1626 1626 centralized SCM, this operation is a local operation. See
1627 1627 :hg:`push` for a way to actively distribute your changes.
1628 1628
1629 1629 If a list of files is omitted, all changes reported by :hg:`status`
1630 1630 will be committed.
1631 1631
1632 1632 If you are committing the result of a merge, do not provide any
1633 1633 filenames or -I/-X filters.
1634 1634
1635 1635 If no commit message is specified, Mercurial starts your
1636 1636 configured editor where you can enter a message. In case your
1637 1637 commit fails, you will find a backup of your message in
1638 1638 ``.hg/last-message.txt``.
1639 1639
1640 1640 The --close-branch flag can be used to mark the current branch
1641 1641 head closed. When all heads of a branch are closed, the branch
1642 1642 will be considered closed and no longer listed.
1643 1643
1644 1644 The --amend flag can be used to amend the parent of the
1645 1645 working directory with a new commit that contains the changes
1646 1646 in the parent in addition to those currently reported by :hg:`status`,
1647 1647 if there are any. The old commit is stored in a backup bundle in
1648 1648 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1649 1649 on how to restore it).
1650 1650
1651 1651 Message, user and date are taken from the amended commit unless
1652 1652 specified. When a message isn't specified on the command line,
1653 1653 the editor will open with the message of the amended commit.
1654 1654
1655 1655 It is not possible to amend public changesets (see :hg:`help phases`)
1656 1656 or changesets that have children.
1657 1657
1658 1658 See :hg:`help dates` for a list of formats valid for -d/--date.
1659 1659
1660 1660 Returns 0 on success, 1 if nothing changed.
1661 1661
1662 1662 .. container:: verbose
1663 1663
1664 1664 Examples:
1665 1665
1666 1666 - commit all files ending in .py::
1667 1667
1668 1668 hg commit --include "set:**.py"
1669 1669
1670 1670 - commit all non-binary files::
1671 1671
1672 1672 hg commit --exclude "set:binary()"
1673 1673
1674 1674 - amend the current commit and set the date to now::
1675 1675
1676 1676 hg commit --amend --date now
1677 1677 """
1678 1678 wlock = lock = None
1679 1679 try:
1680 1680 wlock = repo.wlock()
1681 1681 lock = repo.lock()
1682 1682 return _docommit(ui, repo, *pats, **opts)
1683 1683 finally:
1684 1684 release(lock, wlock)
1685 1685
1686 1686 def _docommit(ui, repo, *pats, **opts):
1687 1687 if opts.get('interactive'):
1688 1688 opts.pop('interactive')
1689 1689 cmdutil.dorecord(ui, repo, commit, None, False,
1690 1690 cmdutil.recordfilter, *pats, **opts)
1691 1691 return
1692 1692
1693 1693 if opts.get('subrepos'):
1694 1694 if opts.get('amend'):
1695 1695 raise error.Abort(_('cannot amend with --subrepos'))
1696 1696 # Let --subrepos on the command line override config setting.
1697 1697 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1698 1698
1699 1699 cmdutil.checkunfinished(repo, commit=True)
1700 1700
1701 1701 branch = repo[None].branch()
1702 1702 bheads = repo.branchheads(branch)
1703 1703
1704 1704 extra = {}
1705 1705 if opts.get('close_branch'):
1706 1706 extra['close'] = 1
1707 1707
1708 1708 if not bheads:
1709 1709 raise error.Abort(_('can only close branch heads'))
1710 1710 elif opts.get('amend'):
1711 1711 if repo[None].parents()[0].p1().branch() != branch and \
1712 1712 repo[None].parents()[0].p2().branch() != branch:
1713 1713 raise error.Abort(_('can only close branch heads'))
1714 1714
1715 1715 if opts.get('amend'):
1716 1716 if ui.configbool('ui', 'commitsubrepos'):
1717 1717 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1718 1718
1719 1719 old = repo['.']
1720 1720 if not old.mutable():
1721 1721 raise error.Abort(_('cannot amend public changesets'))
1722 1722 if len(repo[None].parents()) > 1:
1723 1723 raise error.Abort(_('cannot amend while merging'))
1724 1724 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1725 1725 if not allowunstable and old.children():
1726 1726 raise error.Abort(_('cannot amend changeset with children'))
1727 1727
1728 1728 # Currently histedit gets confused if an amend happens while histedit
1729 1729 # is in progress. Since we have a checkunfinished command, we are
1730 1730 # temporarily honoring it.
1731 1731 #
1732 1732 # Note: eventually this guard will be removed. Please do not expect
1733 1733 # this behavior to remain.
1734 1734 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1735 1735 cmdutil.checkunfinished(repo)
1736 1736
1737 1737 # commitfunc is used only for temporary amend commit by cmdutil.amend
1738 1738 def commitfunc(ui, repo, message, match, opts):
1739 1739 return repo.commit(message,
1740 1740 opts.get('user') or old.user(),
1741 1741 opts.get('date') or old.date(),
1742 1742 match,
1743 1743 extra=extra)
1744 1744
1745 1745 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1746 1746 if node == old.node():
1747 1747 ui.status(_("nothing changed\n"))
1748 1748 return 1
1749 1749 else:
1750 1750 def commitfunc(ui, repo, message, match, opts):
1751 1751 backup = ui.backupconfig('phases', 'new-commit')
1752 1752 baseui = repo.baseui
1753 1753 basebackup = baseui.backupconfig('phases', 'new-commit')
1754 1754 try:
1755 1755 if opts.get('secret'):
1756 1756 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1757 1757 # Propagate to subrepos
1758 1758 baseui.setconfig('phases', 'new-commit', 'secret', 'commit')
1759 1759
1760 1760 editform = cmdutil.mergeeditform(repo[None], 'commit.normal')
1761 1761 editor = cmdutil.getcommiteditor(editform=editform, **opts)
1762 1762 return repo.commit(message, opts.get('user'), opts.get('date'),
1763 1763 match,
1764 1764 editor=editor,
1765 1765 extra=extra)
1766 1766 finally:
1767 1767 ui.restoreconfig(backup)
1768 1768 repo.baseui.restoreconfig(basebackup)
1769 1769
1770 1770
1771 1771 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1772 1772
1773 1773 if not node:
1774 1774 stat = cmdutil.postcommitstatus(repo, pats, opts)
1775 1775 if stat[3]:
1776 1776 ui.status(_("nothing changed (%d missing files, see "
1777 1777 "'hg status')\n") % len(stat[3]))
1778 1778 else:
1779 1779 ui.status(_("nothing changed\n"))
1780 1780 return 1
1781 1781
1782 1782 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1783 1783
1784 1784 @command('config|showconfig|debugconfig',
1785 1785 [('u', 'untrusted', None, _('show untrusted configuration options')),
1786 1786 ('e', 'edit', None, _('edit user config')),
1787 1787 ('l', 'local', None, _('edit repository config')),
1788 1788 ('g', 'global', None, _('edit global config'))],
1789 1789 _('[-u] [NAME]...'),
1790 1790 optionalrepo=True)
1791 1791 def config(ui, repo, *values, **opts):
1792 1792 """show combined config settings from all hgrc files
1793 1793
1794 1794 With no arguments, print names and values of all config items.
1795 1795
1796 1796 With one argument of the form section.name, print just the value
1797 1797 of that config item.
1798 1798
1799 1799 With multiple arguments, print names and values of all config
1800 1800 items with matching section names.
1801 1801
1802 1802 With --edit, start an editor on the user-level config file. With
1803 1803 --global, edit the system-wide config file. With --local, edit the
1804 1804 repository-level config file.
1805 1805
1806 1806 With --debug, the source (filename and line number) is printed
1807 1807 for each config item.
1808 1808
1809 1809 See :hg:`help config` for more information about config files.
1810 1810
1811 1811 Returns 0 on success, 1 if NAME does not exist.
1812 1812
1813 1813 """
1814 1814
1815 1815 if opts.get('edit') or opts.get('local') or opts.get('global'):
1816 1816 if opts.get('local') and opts.get('global'):
1817 1817 raise error.Abort(_("can't use --local and --global together"))
1818 1818
1819 1819 if opts.get('local'):
1820 1820 if not repo:
1821 1821 raise error.Abort(_("can't use --local outside a repository"))
1822 1822 paths = [repo.join('hgrc')]
1823 1823 elif opts.get('global'):
1824 1824 paths = scmutil.systemrcpath()
1825 1825 else:
1826 1826 paths = scmutil.userrcpath()
1827 1827
1828 1828 for f in paths:
1829 1829 if os.path.exists(f):
1830 1830 break
1831 1831 else:
1832 1832 if opts.get('global'):
1833 1833 samplehgrc = uimod.samplehgrcs['global']
1834 1834 elif opts.get('local'):
1835 1835 samplehgrc = uimod.samplehgrcs['local']
1836 1836 else:
1837 1837 samplehgrc = uimod.samplehgrcs['user']
1838 1838
1839 1839 f = paths[0]
1840 1840 fp = open(f, "w")
1841 1841 fp.write(samplehgrc)
1842 1842 fp.close()
1843 1843
1844 1844 editor = ui.geteditor()
1845 1845 ui.system("%s \"%s\"" % (editor, f),
1846 1846 onerr=error.Abort, errprefix=_("edit failed"))
1847 1847 return
1848 1848
1849 1849 for f in scmutil.rcpath():
1850 1850 ui.debug('read config from: %s\n' % f)
1851 1851 untrusted = bool(opts.get('untrusted'))
1852 1852 if values:
1853 1853 sections = [v for v in values if '.' not in v]
1854 1854 items = [v for v in values if '.' in v]
1855 1855 if len(items) > 1 or items and sections:
1856 1856 raise error.Abort(_('only one config item permitted'))
1857 1857 matched = False
1858 1858 for section, name, value in ui.walkconfig(untrusted=untrusted):
1859 1859 value = str(value).replace('\n', '\\n')
1860 1860 sectname = section + '.' + name
1861 1861 if values:
1862 1862 for v in values:
1863 1863 if v == section:
1864 1864 ui.debug('%s: ' %
1865 1865 ui.configsource(section, name, untrusted))
1866 1866 ui.write('%s=%s\n' % (sectname, value))
1867 1867 matched = True
1868 1868 elif v == sectname:
1869 1869 ui.debug('%s: ' %
1870 1870 ui.configsource(section, name, untrusted))
1871 1871 ui.write(value, '\n')
1872 1872 matched = True
1873 1873 else:
1874 1874 ui.debug('%s: ' %
1875 1875 ui.configsource(section, name, untrusted))
1876 1876 ui.write('%s=%s\n' % (sectname, value))
1877 1877 matched = True
1878 1878 if matched:
1879 1879 return 0
1880 1880 return 1
1881 1881
1882 1882 @command('copy|cp',
1883 1883 [('A', 'after', None, _('record a copy that has already occurred')),
1884 1884 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1885 1885 ] + walkopts + dryrunopts,
1886 1886 _('[OPTION]... [SOURCE]... DEST'))
1887 1887 def copy(ui, repo, *pats, **opts):
1888 1888 """mark files as copied for the next commit
1889 1889
1890 1890 Mark dest as having copies of source files. If dest is a
1891 1891 directory, copies are put in that directory. If dest is a file,
1892 1892 the source must be a single file.
1893 1893
1894 1894 By default, this command copies the contents of files as they
1895 1895 exist in the working directory. If invoked with -A/--after, the
1896 1896 operation is recorded, but no copying is performed.
1897 1897
1898 1898 This command takes effect with the next commit. To undo a copy
1899 1899 before that, see :hg:`revert`.
1900 1900
1901 1901 Returns 0 on success, 1 if errors are encountered.
1902 1902 """
1903 1903 with repo.wlock(False):
1904 1904 return cmdutil.copy(ui, repo, pats, opts)
1905 1905
1906 1906 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
1907 1907 def debugancestor(ui, repo, *args):
1908 1908 """find the ancestor revision of two revisions in a given index"""
1909 1909 if len(args) == 3:
1910 1910 index, rev1, rev2 = args
1911 1911 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1912 1912 lookup = r.lookup
1913 1913 elif len(args) == 2:
1914 1914 if not repo:
1915 1915 raise error.Abort(_("there is no Mercurial repository here "
1916 1916 "(.hg not found)"))
1917 1917 rev1, rev2 = args
1918 1918 r = repo.changelog
1919 1919 lookup = repo.lookup
1920 1920 else:
1921 1921 raise error.Abort(_('either two or three arguments required'))
1922 1922 a = r.ancestor(lookup(rev1), lookup(rev2))
1923 1923 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1924 1924
1925 1925 @command('debugbuilddag',
1926 1926 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1927 1927 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1928 1928 ('n', 'new-file', None, _('add new file at each rev'))],
1929 1929 _('[OPTION]... [TEXT]'))
1930 1930 def debugbuilddag(ui, repo, text=None,
1931 1931 mergeable_file=False,
1932 1932 overwritten_file=False,
1933 1933 new_file=False):
1934 1934 """builds a repo with a given DAG from scratch in the current empty repo
1935 1935
1936 1936 The description of the DAG is read from stdin if not given on the
1937 1937 command line.
1938 1938
1939 1939 Elements:
1940 1940
1941 1941 - "+n" is a linear run of n nodes based on the current default parent
1942 1942 - "." is a single node based on the current default parent
1943 1943 - "$" resets the default parent to null (implied at the start);
1944 1944 otherwise the default parent is always the last node created
1945 1945 - "<p" sets the default parent to the backref p
1946 1946 - "*p" is a fork at parent p, which is a backref
1947 1947 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1948 1948 - "/p2" is a merge of the preceding node and p2
1949 1949 - ":tag" defines a local tag for the preceding node
1950 1950 - "@branch" sets the named branch for subsequent nodes
1951 1951 - "#...\\n" is a comment up to the end of the line
1952 1952
1953 1953 Whitespace between the above elements is ignored.
1954 1954
1955 1955 A backref is either
1956 1956
1957 1957 - a number n, which references the node curr-n, where curr is the current
1958 1958 node, or
1959 1959 - the name of a local tag you placed earlier using ":tag", or
1960 1960 - empty to denote the default parent.
1961 1961
1962 1962 All string valued-elements are either strictly alphanumeric, or must
1963 1963 be enclosed in double quotes ("..."), with "\\" as escape character.
1964 1964 """
1965 1965
1966 1966 if text is None:
1967 1967 ui.status(_("reading DAG from stdin\n"))
1968 1968 text = ui.fin.read()
1969 1969
1970 1970 cl = repo.changelog
1971 1971 if len(cl) > 0:
1972 1972 raise error.Abort(_('repository is not empty'))
1973 1973
1974 1974 # determine number of revs in DAG
1975 1975 total = 0
1976 1976 for type, data in dagparser.parsedag(text):
1977 1977 if type == 'n':
1978 1978 total += 1
1979 1979
1980 1980 if mergeable_file:
1981 1981 linesperrev = 2
1982 1982 # make a file with k lines per rev
1983 1983 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1984 1984 initialmergedlines.append("")
1985 1985
1986 1986 tags = []
1987 1987
1988 1988 lock = tr = None
1989 1989 try:
1990 1990 lock = repo.lock()
1991 1991 tr = repo.transaction("builddag")
1992 1992
1993 1993 at = -1
1994 1994 atbranch = 'default'
1995 1995 nodeids = []
1996 1996 id = 0
1997 1997 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1998 1998 for type, data in dagparser.parsedag(text):
1999 1999 if type == 'n':
2000 2000 ui.note(('node %s\n' % str(data)))
2001 2001 id, ps = data
2002 2002
2003 2003 files = []
2004 2004 fctxs = {}
2005 2005
2006 2006 p2 = None
2007 2007 if mergeable_file:
2008 2008 fn = "mf"
2009 2009 p1 = repo[ps[0]]
2010 2010 if len(ps) > 1:
2011 2011 p2 = repo[ps[1]]
2012 2012 pa = p1.ancestor(p2)
2013 2013 base, local, other = [x[fn].data() for x in (pa, p1,
2014 2014 p2)]
2015 2015 m3 = simplemerge.Merge3Text(base, local, other)
2016 2016 ml = [l.strip() for l in m3.merge_lines()]
2017 2017 ml.append("")
2018 2018 elif at > 0:
2019 2019 ml = p1[fn].data().split("\n")
2020 2020 else:
2021 2021 ml = initialmergedlines
2022 2022 ml[id * linesperrev] += " r%i" % id
2023 2023 mergedtext = "\n".join(ml)
2024 2024 files.append(fn)
2025 2025 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
2026 2026
2027 2027 if overwritten_file:
2028 2028 fn = "of"
2029 2029 files.append(fn)
2030 2030 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
2031 2031
2032 2032 if new_file:
2033 2033 fn = "nf%i" % id
2034 2034 files.append(fn)
2035 2035 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
2036 2036 if len(ps) > 1:
2037 2037 if not p2:
2038 2038 p2 = repo[ps[1]]
2039 2039 for fn in p2:
2040 2040 if fn.startswith("nf"):
2041 2041 files.append(fn)
2042 2042 fctxs[fn] = p2[fn]
2043 2043
2044 2044 def fctxfn(repo, cx, path):
2045 2045 return fctxs.get(path)
2046 2046
2047 2047 if len(ps) == 0 or ps[0] < 0:
2048 2048 pars = [None, None]
2049 2049 elif len(ps) == 1:
2050 2050 pars = [nodeids[ps[0]], None]
2051 2051 else:
2052 2052 pars = [nodeids[p] for p in ps]
2053 2053 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
2054 2054 date=(id, 0),
2055 2055 user="debugbuilddag",
2056 2056 extra={'branch': atbranch})
2057 2057 nodeid = repo.commitctx(cx)
2058 2058 nodeids.append(nodeid)
2059 2059 at = id
2060 2060 elif type == 'l':
2061 2061 id, name = data
2062 2062 ui.note(('tag %s\n' % name))
2063 2063 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
2064 2064 elif type == 'a':
2065 2065 ui.note(('branch %s\n' % data))
2066 2066 atbranch = data
2067 2067 ui.progress(_('building'), id, unit=_('revisions'), total=total)
2068 2068 tr.close()
2069 2069
2070 2070 if tags:
2071 2071 repo.vfs.write("localtags", "".join(tags))
2072 2072 finally:
2073 2073 ui.progress(_('building'), None)
2074 2074 release(tr, lock)
2075 2075
2076 2076 @command('debugbundle',
2077 2077 [('a', 'all', None, _('show all details')),
2078 2078 ('', 'spec', None, _('print the bundlespec of the bundle'))],
2079 2079 _('FILE'),
2080 2080 norepo=True)
2081 2081 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
2082 2082 """lists the contents of a bundle"""
2083 2083 with hg.openpath(ui, bundlepath) as f:
2084 2084 if spec:
2085 2085 spec = exchange.getbundlespec(ui, f)
2086 2086 ui.write('%s\n' % spec)
2087 2087 return
2088 2088
2089 2089 gen = exchange.readbundle(ui, f, bundlepath)
2090 2090 if isinstance(gen, bundle2.unbundle20):
2091 2091 return _debugbundle2(ui, gen, all=all, **opts)
2092 2092 _debugchangegroup(ui, gen, all=all, **opts)
2093 2093
2094 2094 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
2095 2095 indent_string = ' ' * indent
2096 2096 if all:
2097 2097 ui.write("%sformat: id, p1, p2, cset, delta base, len(delta)\n"
2098 2098 % indent_string)
2099 2099
2100 2100 def showchunks(named):
2101 2101 ui.write("\n%s%s\n" % (indent_string, named))
2102 2102 chain = None
2103 2103 while True:
2104 2104 chunkdata = gen.deltachunk(chain)
2105 2105 if not chunkdata:
2106 2106 break
2107 2107 node = chunkdata['node']
2108 2108 p1 = chunkdata['p1']
2109 2109 p2 = chunkdata['p2']
2110 2110 cs = chunkdata['cs']
2111 2111 deltabase = chunkdata['deltabase']
2112 2112 delta = chunkdata['delta']
2113 2113 ui.write("%s%s %s %s %s %s %s\n" %
2114 2114 (indent_string, hex(node), hex(p1), hex(p2),
2115 2115 hex(cs), hex(deltabase), len(delta)))
2116 2116 chain = node
2117 2117
2118 2118 chunkdata = gen.changelogheader()
2119 2119 showchunks("changelog")
2120 2120 chunkdata = gen.manifestheader()
2121 2121 showchunks("manifest")
2122 2122 while True:
2123 2123 chunkdata = gen.filelogheader()
2124 2124 if not chunkdata:
2125 2125 break
2126 2126 fname = chunkdata['filename']
2127 2127 showchunks(fname)
2128 2128 else:
2129 2129 if isinstance(gen, bundle2.unbundle20):
2130 2130 raise error.Abort(_('use debugbundle2 for this file'))
2131 2131 chunkdata = gen.changelogheader()
2132 2132 chain = None
2133 2133 while True:
2134 2134 chunkdata = gen.deltachunk(chain)
2135 2135 if not chunkdata:
2136 2136 break
2137 2137 node = chunkdata['node']
2138 2138 ui.write("%s%s\n" % (indent_string, hex(node)))
2139 2139 chain = node
2140 2140
2141 2141 def _debugbundle2(ui, gen, all=None, **opts):
2142 2142 """lists the contents of a bundle2"""
2143 2143 if not isinstance(gen, bundle2.unbundle20):
2144 2144 raise error.Abort(_('not a bundle2 file'))
2145 2145 ui.write(('Stream params: %s\n' % repr(gen.params)))
2146 2146 for part in gen.iterparts():
2147 2147 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
2148 2148 if part.type == 'changegroup':
2149 2149 version = part.params.get('version', '01')
2150 2150 cg = changegroup.getunbundler(version, part, 'UN')
2151 2151 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
2152 2152
2153 2153 @command('debugcreatestreamclonebundle', [], 'FILE')
2154 2154 def debugcreatestreamclonebundle(ui, repo, fname):
2155 2155 """create a stream clone bundle file
2156 2156
2157 2157 Stream bundles are special bundles that are essentially archives of
2158 2158 revlog files. They are commonly used for cloning very quickly.
2159 2159 """
2160 2160 requirements, gen = streamclone.generatebundlev1(repo)
2161 2161 changegroup.writechunks(ui, gen, fname)
2162 2162
2163 2163 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
2164 2164
2165 2165 @command('debugapplystreamclonebundle', [], 'FILE')
2166 2166 def debugapplystreamclonebundle(ui, repo, fname):
2167 2167 """apply a stream clone bundle file"""
2168 2168 f = hg.openpath(ui, fname)
2169 2169 gen = exchange.readbundle(ui, f, fname)
2170 2170 gen.apply(repo)
2171 2171
2172 2172 @command('debugcheckstate', [], '')
2173 2173 def debugcheckstate(ui, repo):
2174 2174 """validate the correctness of the current dirstate"""
2175 2175 parent1, parent2 = repo.dirstate.parents()
2176 2176 m1 = repo[parent1].manifest()
2177 2177 m2 = repo[parent2].manifest()
2178 2178 errors = 0
2179 2179 for f in repo.dirstate:
2180 2180 state = repo.dirstate[f]
2181 2181 if state in "nr" and f not in m1:
2182 2182 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
2183 2183 errors += 1
2184 2184 if state in "a" and f in m1:
2185 2185 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
2186 2186 errors += 1
2187 2187 if state in "m" and f not in m1 and f not in m2:
2188 2188 ui.warn(_("%s in state %s, but not in either manifest\n") %
2189 2189 (f, state))
2190 2190 errors += 1
2191 2191 for f in m1:
2192 2192 state = repo.dirstate[f]
2193 2193 if state not in "nrm":
2194 2194 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
2195 2195 errors += 1
2196 2196 if errors:
2197 2197 error = _(".hg/dirstate inconsistent with current parent's manifest")
2198 2198 raise error.Abort(error)
2199 2199
2200 2200 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
2201 2201 def debugcommands(ui, cmd='', *args):
2202 2202 """list all available commands and options"""
2203 2203 for cmd, vals in sorted(table.iteritems()):
2204 2204 cmd = cmd.split('|')[0].strip('^')
2205 2205 opts = ', '.join([i[1] for i in vals[1]])
2206 2206 ui.write('%s: %s\n' % (cmd, opts))
2207 2207
2208 2208 @command('debugcomplete',
2209 2209 [('o', 'options', None, _('show the command options'))],
2210 2210 _('[-o] CMD'),
2211 2211 norepo=True)
2212 2212 def debugcomplete(ui, cmd='', **opts):
2213 2213 """returns the completion list associated with the given command"""
2214 2214
2215 2215 if opts.get('options'):
2216 2216 options = []
2217 2217 otables = [globalopts]
2218 2218 if cmd:
2219 2219 aliases, entry = cmdutil.findcmd(cmd, table, False)
2220 2220 otables.append(entry[1])
2221 2221 for t in otables:
2222 2222 for o in t:
2223 2223 if "(DEPRECATED)" in o[3]:
2224 2224 continue
2225 2225 if o[0]:
2226 2226 options.append('-%s' % o[0])
2227 2227 options.append('--%s' % o[1])
2228 2228 ui.write("%s\n" % "\n".join(options))
2229 2229 return
2230 2230
2231 2231 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
2232 2232 if ui.verbose:
2233 2233 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
2234 2234 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
2235 2235
2236 2236 @command('debugdag',
2237 2237 [('t', 'tags', None, _('use tags as labels')),
2238 2238 ('b', 'branches', None, _('annotate with branch names')),
2239 2239 ('', 'dots', None, _('use dots for runs')),
2240 2240 ('s', 'spaces', None, _('separate elements by spaces'))],
2241 2241 _('[OPTION]... [FILE [REV]...]'),
2242 2242 optionalrepo=True)
2243 2243 def debugdag(ui, repo, file_=None, *revs, **opts):
2244 2244 """format the changelog or an index DAG as a concise textual description
2245 2245
2246 2246 If you pass a revlog index, the revlog's DAG is emitted. If you list
2247 2247 revision numbers, they get labeled in the output as rN.
2248 2248
2249 2249 Otherwise, the changelog DAG of the current repo is emitted.
2250 2250 """
2251 2251 spaces = opts.get('spaces')
2252 2252 dots = opts.get('dots')
2253 2253 if file_:
2254 2254 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2255 2255 revs = set((int(r) for r in revs))
2256 2256 def events():
2257 2257 for r in rlog:
2258 2258 yield 'n', (r, list(p for p in rlog.parentrevs(r)
2259 2259 if p != -1))
2260 2260 if r in revs:
2261 2261 yield 'l', (r, "r%i" % r)
2262 2262 elif repo:
2263 2263 cl = repo.changelog
2264 2264 tags = opts.get('tags')
2265 2265 branches = opts.get('branches')
2266 2266 if tags:
2267 2267 labels = {}
2268 2268 for l, n in repo.tags().items():
2269 2269 labels.setdefault(cl.rev(n), []).append(l)
2270 2270 def events():
2271 2271 b = "default"
2272 2272 for r in cl:
2273 2273 if branches:
2274 2274 newb = cl.read(cl.node(r))[5]['branch']
2275 2275 if newb != b:
2276 2276 yield 'a', newb
2277 2277 b = newb
2278 2278 yield 'n', (r, list(p for p in cl.parentrevs(r)
2279 2279 if p != -1))
2280 2280 if tags:
2281 2281 ls = labels.get(r)
2282 2282 if ls:
2283 2283 for l in ls:
2284 2284 yield 'l', (r, l)
2285 2285 else:
2286 2286 raise error.Abort(_('need repo for changelog dag'))
2287 2287
2288 2288 for line in dagparser.dagtextlines(events(),
2289 2289 addspaces=spaces,
2290 2290 wraplabels=True,
2291 2291 wrapannotations=True,
2292 2292 wrapnonlinear=dots,
2293 2293 usedots=dots,
2294 2294 maxlinewidth=70):
2295 2295 ui.write(line)
2296 2296 ui.write("\n")
2297 2297
2298 2298 @command('debugdata', debugrevlogopts, _('-c|-m|FILE REV'))
2299 2299 def debugdata(ui, repo, file_, rev=None, **opts):
2300 2300 """dump the contents of a data file revision"""
2301 2301 if opts.get('changelog') or opts.get('manifest'):
2302 2302 file_, rev = None, file_
2303 2303 elif rev is None:
2304 2304 raise error.CommandError('debugdata', _('invalid arguments'))
2305 2305 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
2306 2306 try:
2307 2307 ui.write(r.revision(r.lookup(rev)))
2308 2308 except KeyError:
2309 2309 raise error.Abort(_('invalid revision identifier %s') % rev)
2310 2310
2311 2311 @command('debugdate',
2312 2312 [('e', 'extended', None, _('try extended date formats'))],
2313 2313 _('[-e] DATE [RANGE]'),
2314 2314 norepo=True, optionalrepo=True)
2315 2315 def debugdate(ui, date, range=None, **opts):
2316 2316 """parse and display a date"""
2317 2317 if opts["extended"]:
2318 2318 d = util.parsedate(date, util.extendeddateformats)
2319 2319 else:
2320 2320 d = util.parsedate(date)
2321 2321 ui.write(("internal: %s %s\n") % d)
2322 2322 ui.write(("standard: %s\n") % util.datestr(d))
2323 2323 if range:
2324 2324 m = util.matchdate(range)
2325 2325 ui.write(("match: %s\n") % m(d[0]))
2326 2326
2327 2327 @command('debugdiscovery',
2328 2328 [('', 'old', None, _('use old-style discovery')),
2329 2329 ('', 'nonheads', None,
2330 2330 _('use old-style discovery with non-heads included')),
2331 2331 ] + remoteopts,
2332 2332 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
2333 2333 def debugdiscovery(ui, repo, remoteurl="default", **opts):
2334 2334 """runs the changeset discovery protocol in isolation"""
2335 2335 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
2336 2336 opts.get('branch'))
2337 2337 remote = hg.peer(repo, opts, remoteurl)
2338 2338 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
2339 2339
2340 2340 # make sure tests are repeatable
2341 2341 random.seed(12323)
2342 2342
2343 2343 def doit(localheads, remoteheads, remote=remote):
2344 2344 if opts.get('old'):
2345 2345 if localheads:
2346 2346 raise error.Abort('cannot use localheads with old style '
2347 2347 'discovery')
2348 2348 if not util.safehasattr(remote, 'branches'):
2349 2349 # enable in-client legacy support
2350 2350 remote = localrepo.locallegacypeer(remote.local())
2351 2351 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
2352 2352 force=True)
2353 2353 common = set(common)
2354 2354 if not opts.get('nonheads'):
2355 2355 ui.write(("unpruned common: %s\n") %
2356 2356 " ".join(sorted(short(n) for n in common)))
2357 2357 dag = dagutil.revlogdag(repo.changelog)
2358 2358 all = dag.ancestorset(dag.internalizeall(common))
2359 2359 common = dag.externalizeall(dag.headsetofconnecteds(all))
2360 2360 else:
2361 2361 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
2362 2362 common = set(common)
2363 2363 rheads = set(hds)
2364 2364 lheads = set(repo.heads())
2365 2365 ui.write(("common heads: %s\n") %
2366 2366 " ".join(sorted(short(n) for n in common)))
2367 2367 if lheads <= common:
2368 2368 ui.write(("local is subset\n"))
2369 2369 elif rheads <= common:
2370 2370 ui.write(("remote is subset\n"))
2371 2371
2372 2372 serverlogs = opts.get('serverlog')
2373 2373 if serverlogs:
2374 2374 for filename in serverlogs:
2375 2375 with open(filename, 'r') as logfile:
2376 2376 line = logfile.readline()
2377 2377 while line:
2378 2378 parts = line.strip().split(';')
2379 2379 op = parts[1]
2380 2380 if op == 'cg':
2381 2381 pass
2382 2382 elif op == 'cgss':
2383 2383 doit(parts[2].split(' '), parts[3].split(' '))
2384 2384 elif op == 'unb':
2385 2385 doit(parts[3].split(' '), parts[2].split(' '))
2386 2386 line = logfile.readline()
2387 2387 else:
2388 2388 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2389 2389 opts.get('remote_head'))
2390 2390 localrevs = opts.get('local_head')
2391 2391 doit(localrevs, remoterevs)
2392 2392
2393 2393 @command('debugextensions', formatteropts, [], norepo=True)
2394 2394 def debugextensions(ui, **opts):
2395 2395 '''show information about active extensions'''
2396 2396 exts = extensions.extensions(ui)
2397 2397 fm = ui.formatter('debugextensions', opts)
2398 2398 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
2399 2399 extsource = extmod.__file__
2400 2400 exttestedwith = getattr(extmod, 'testedwith', None)
2401 2401 if exttestedwith is not None:
2402 2402 exttestedwith = exttestedwith.split()
2403 2403 extbuglink = getattr(extmod, 'buglink', None)
2404 2404
2405 2405 fm.startitem()
2406 2406
2407 2407 if ui.quiet or ui.verbose:
2408 2408 fm.write('name', '%s\n', extname)
2409 2409 else:
2410 2410 fm.write('name', '%s', extname)
2411 2411 if not exttestedwith:
2412 2412 fm.plain(_(' (untested!)\n'))
2413 2413 else:
2414 2414 if exttestedwith == ['internal'] or \
2415 2415 util.version() in exttestedwith:
2416 2416 fm.plain('\n')
2417 2417 else:
2418 2418 lasttestedversion = exttestedwith[-1]
2419 2419 fm.plain(' (%s!)\n' % lasttestedversion)
2420 2420
2421 2421 fm.condwrite(ui.verbose and extsource, 'source',
2422 2422 _(' location: %s\n'), extsource or "")
2423 2423
2424 2424 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
2425 2425 _(' tested with: %s\n'), ' '.join(exttestedwith or []))
2426 2426
2427 2427 fm.condwrite(ui.verbose and extbuglink, 'buglink',
2428 2428 _(' bug reporting: %s\n'), extbuglink or "")
2429 2429
2430 2430 fm.end()
2431 2431
2432 2432 @command('debugfileset',
2433 2433 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2434 2434 _('[-r REV] FILESPEC'))
2435 2435 def debugfileset(ui, repo, expr, **opts):
2436 2436 '''parse and apply a fileset specification'''
2437 2437 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2438 2438 if ui.verbose:
2439 2439 tree = fileset.parse(expr)
2440 2440 ui.note(fileset.prettyformat(tree), "\n")
2441 2441
2442 2442 for f in ctx.getfileset(expr):
2443 2443 ui.write("%s\n" % f)
2444 2444
2445 2445 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
2446 2446 def debugfsinfo(ui, path="."):
2447 2447 """show information detected about current filesystem"""
2448 2448 util.writefile('.debugfsinfo', '')
2449 2449 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2450 2450 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2451 2451 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2452 2452 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2453 2453 and 'yes' or 'no'))
2454 2454 os.unlink('.debugfsinfo')
2455 2455
2456 2456 @command('debuggetbundle',
2457 2457 [('H', 'head', [], _('id of head node'), _('ID')),
2458 2458 ('C', 'common', [], _('id of common node'), _('ID')),
2459 2459 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2460 2460 _('REPO FILE [-H|-C ID]...'),
2461 2461 norepo=True)
2462 2462 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2463 2463 """retrieves a bundle from a repo
2464 2464
2465 2465 Every ID must be a full-length hex node id string. Saves the bundle to the
2466 2466 given file.
2467 2467 """
2468 2468 repo = hg.peer(ui, opts, repopath)
2469 2469 if not repo.capable('getbundle'):
2470 2470 raise error.Abort("getbundle() not supported by target repository")
2471 2471 args = {}
2472 2472 if common:
2473 2473 args['common'] = [bin(s) for s in common]
2474 2474 if head:
2475 2475 args['heads'] = [bin(s) for s in head]
2476 2476 # TODO: get desired bundlecaps from command line.
2477 2477 args['bundlecaps'] = None
2478 2478 bundle = repo.getbundle('debug', **args)
2479 2479
2480 2480 bundletype = opts.get('type', 'bzip2').lower()
2481 2481 btypes = {'none': 'HG10UN',
2482 2482 'bzip2': 'HG10BZ',
2483 2483 'gzip': 'HG10GZ',
2484 2484 'bundle2': 'HG20'}
2485 2485 bundletype = btypes.get(bundletype)
2486 2486 if bundletype not in bundle2.bundletypes:
2487 2487 raise error.Abort(_('unknown bundle type specified with --type'))
2488 2488 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
2489 2489
2490 2490 @command('debugignore', [], '[FILE]')
2491 2491 def debugignore(ui, repo, *files, **opts):
2492 2492 """display the combined ignore pattern and information about ignored files
2493 2493
2494 2494 With no argument display the combined ignore pattern.
2495 2495
2496 2496 Given space separated file names, shows if the given file is ignored and
2497 2497 if so, show the ignore rule (file and line number) that matched it.
2498 2498 """
2499 2499 ignore = repo.dirstate._ignore
2500 2500 if not files:
2501 2501 # Show all the patterns
2502 2502 includepat = getattr(ignore, 'includepat', None)
2503 2503 if includepat is not None:
2504 2504 ui.write("%s\n" % includepat)
2505 2505 else:
2506 2506 raise error.Abort(_("no ignore patterns found"))
2507 2507 else:
2508 2508 for f in files:
2509 2509 nf = util.normpath(f)
2510 2510 ignored = None
2511 2511 ignoredata = None
2512 2512 if nf != '.':
2513 2513 if ignore(nf):
2514 2514 ignored = nf
2515 2515 ignoredata = repo.dirstate._ignorefileandline(nf)
2516 2516 else:
2517 2517 for p in util.finddirs(nf):
2518 2518 if ignore(p):
2519 2519 ignored = p
2520 2520 ignoredata = repo.dirstate._ignorefileandline(p)
2521 2521 break
2522 2522 if ignored:
2523 2523 if ignored == nf:
2524 2524 ui.write("%s is ignored\n" % f)
2525 2525 else:
2526 2526 ui.write("%s is ignored because of containing folder %s\n"
2527 2527 % (f, ignored))
2528 2528 ignorefile, lineno, line = ignoredata
2529 2529 ui.write("(ignore rule in %s, line %d: '%s')\n"
2530 2530 % (ignorefile, lineno, line))
2531 2531 else:
2532 2532 ui.write("%s is not ignored\n" % f)
2533 2533
2534 2534 @command('debugindex', debugrevlogopts +
2535 2535 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2536 2536 _('[-f FORMAT] -c|-m|FILE'),
2537 2537 optionalrepo=True)
2538 2538 def debugindex(ui, repo, file_=None, **opts):
2539 2539 """dump the contents of an index file"""
2540 2540 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2541 2541 format = opts.get('format', 0)
2542 2542 if format not in (0, 1):
2543 2543 raise error.Abort(_("unknown format %d") % format)
2544 2544
2545 2545 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2546 2546 if generaldelta:
2547 2547 basehdr = ' delta'
2548 2548 else:
2549 2549 basehdr = ' base'
2550 2550
2551 2551 if ui.debugflag:
2552 2552 shortfn = hex
2553 2553 else:
2554 2554 shortfn = short
2555 2555
2556 2556 # There might not be anything in r, so have a sane default
2557 2557 idlen = 12
2558 2558 for i in r:
2559 2559 idlen = len(shortfn(r.node(i)))
2560 2560 break
2561 2561
2562 2562 if format == 0:
2563 2563 ui.write(" rev offset length " + basehdr + " linkrev"
2564 2564 " %s %s p2\n" % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
2565 2565 elif format == 1:
2566 2566 ui.write(" rev flag offset length"
2567 2567 " size " + basehdr + " link p1 p2"
2568 2568 " %s\n" % "nodeid".rjust(idlen))
2569 2569
2570 2570 for i in r:
2571 2571 node = r.node(i)
2572 2572 if generaldelta:
2573 2573 base = r.deltaparent(i)
2574 2574 else:
2575 2575 base = r.chainbase(i)
2576 2576 if format == 0:
2577 2577 try:
2578 2578 pp = r.parents(node)
2579 2579 except Exception:
2580 2580 pp = [nullid, nullid]
2581 2581 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2582 2582 i, r.start(i), r.length(i), base, r.linkrev(i),
2583 2583 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2584 2584 elif format == 1:
2585 2585 pr = r.parentrevs(i)
2586 2586 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2587 2587 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2588 2588 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
2589 2589
2590 2590 @command('debugindexdot', debugrevlogopts,
2591 2591 _('-c|-m|FILE'), optionalrepo=True)
2592 2592 def debugindexdot(ui, repo, file_=None, **opts):
2593 2593 """dump an index DAG as a graphviz dot file"""
2594 2594 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
2595 2595 ui.write(("digraph G {\n"))
2596 2596 for i in r:
2597 2597 node = r.node(i)
2598 2598 pp = r.parents(node)
2599 2599 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2600 2600 if pp[1] != nullid:
2601 2601 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2602 2602 ui.write("}\n")
2603 2603
2604 2604 @command('debugdeltachain',
2605 2605 debugrevlogopts + formatteropts,
2606 2606 _('-c|-m|FILE'),
2607 2607 optionalrepo=True)
2608 2608 def debugdeltachain(ui, repo, file_=None, **opts):
2609 2609 """dump information about delta chains in a revlog
2610 2610
2611 2611 Output can be templatized. Available template keywords are:
2612 2612
2613 2613 rev revision number
2614 2614 chainid delta chain identifier (numbered by unique base)
2615 2615 chainlen delta chain length to this revision
2616 2616 prevrev previous revision in delta chain
2617 2617 deltatype role of delta / how it was computed
2618 2618 compsize compressed size of revision
2619 2619 uncompsize uncompressed size of revision
2620 2620 chainsize total size of compressed revisions in chain
2621 2621 chainratio total chain size divided by uncompressed revision size
2622 2622 (new delta chains typically start at ratio 2.00)
2623 2623 lindist linear distance from base revision in delta chain to end
2624 2624 of this revision
2625 2625 extradist total size of revisions not part of this delta chain from
2626 2626 base of delta chain to end of this revision; a measurement
2627 2627 of how much extra data we need to read/seek across to read
2628 2628 the delta chain for this revision
2629 2629 extraratio extradist divided by chainsize; another representation of
2630 2630 how much unrelated data is needed to load this delta chain
2631 2631 """
2632 2632 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
2633 2633 index = r.index
2634 2634 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2635 2635
2636 2636 def revinfo(rev):
2637 2637 e = index[rev]
2638 2638 compsize = e[1]
2639 2639 uncompsize = e[2]
2640 2640 chainsize = 0
2641 2641
2642 2642 if generaldelta:
2643 2643 if e[3] == e[5]:
2644 2644 deltatype = 'p1'
2645 2645 elif e[3] == e[6]:
2646 2646 deltatype = 'p2'
2647 2647 elif e[3] == rev - 1:
2648 2648 deltatype = 'prev'
2649 2649 elif e[3] == rev:
2650 2650 deltatype = 'base'
2651 2651 else:
2652 2652 deltatype = 'other'
2653 2653 else:
2654 2654 if e[3] == rev:
2655 2655 deltatype = 'base'
2656 2656 else:
2657 2657 deltatype = 'prev'
2658 2658
2659 2659 chain = r._deltachain(rev)[0]
2660 2660 for iterrev in chain:
2661 2661 e = index[iterrev]
2662 2662 chainsize += e[1]
2663 2663
2664 2664 return compsize, uncompsize, deltatype, chain, chainsize
2665 2665
2666 2666 fm = ui.formatter('debugdeltachain', opts)
2667 2667
2668 2668 fm.plain(' rev chain# chainlen prev delta '
2669 2669 'size rawsize chainsize ratio lindist extradist '
2670 2670 'extraratio\n')
2671 2671
2672 2672 chainbases = {}
2673 2673 for rev in r:
2674 2674 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
2675 2675 chainbase = chain[0]
2676 2676 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
2677 2677 basestart = r.start(chainbase)
2678 2678 revstart = r.start(rev)
2679 2679 lineardist = revstart + comp - basestart
2680 2680 extradist = lineardist - chainsize
2681 2681 try:
2682 2682 prevrev = chain[-2]
2683 2683 except IndexError:
2684 2684 prevrev = -1
2685 2685
2686 2686 chainratio = float(chainsize) / float(uncomp)
2687 2687 extraratio = float(extradist) / float(chainsize)
2688 2688
2689 2689 fm.startitem()
2690 2690 fm.write('rev chainid chainlen prevrev deltatype compsize '
2691 2691 'uncompsize chainsize chainratio lindist extradist '
2692 2692 'extraratio',
2693 2693 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
2694 2694 rev, chainid, len(chain), prevrev, deltatype, comp,
2695 2695 uncomp, chainsize, chainratio, lineardist, extradist,
2696 2696 extraratio,
2697 2697 rev=rev, chainid=chainid, chainlen=len(chain),
2698 2698 prevrev=prevrev, deltatype=deltatype, compsize=comp,
2699 2699 uncompsize=uncomp, chainsize=chainsize,
2700 2700 chainratio=chainratio, lindist=lineardist,
2701 2701 extradist=extradist, extraratio=extraratio)
2702 2702
2703 2703 fm.end()
2704 2704
2705 2705 @command('debuginstall', [] + formatteropts, '', norepo=True)
2706 2706 def debuginstall(ui, **opts):
2707 2707 '''test Mercurial installation
2708 2708
2709 2709 Returns 0 on success.
2710 2710 '''
2711 2711
2712 2712 def writetemp(contents):
2713 2713 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2714 2714 f = os.fdopen(fd, "wb")
2715 2715 f.write(contents)
2716 2716 f.close()
2717 2717 return name
2718 2718
2719 2719 problems = 0
2720 2720
2721 2721 fm = ui.formatter('debuginstall', opts)
2722 2722 fm.startitem()
2723 2723
2724 2724 # encoding
2725 2725 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
2726 2726 err = None
2727 2727 try:
2728 2728 encoding.fromlocal("test")
2729 2729 except error.Abort as inst:
2730 2730 err = inst
2731 2731 problems += 1
2732 2732 fm.condwrite(err, 'encodingerror', _(" %s\n"
2733 2733 " (check that your locale is properly set)\n"), err)
2734 2734
2735 2735 # Python
2736 2736 fm.write('pythonexe', _("checking Python executable (%s)\n"),
2737 2737 sys.executable)
2738 2738 fm.write('pythonver', _("checking Python version (%s)\n"),
2739 2739 ("%s.%s.%s" % sys.version_info[:3]))
2740 2740 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
2741 2741 os.path.dirname(os.__file__))
2742 2742
2743 2743 # compiled modules
2744 2744 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
2745 2745 os.path.dirname(__file__))
2746 2746
2747 2747 err = None
2748 2748 try:
2749 2749 from . import (
2750 2750 base85,
2751 2751 bdiff,
2752 2752 mpatch,
2753 2753 osutil,
2754 2754 )
2755 2755 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2756 2756 except Exception as inst:
2757 2757 err = inst
2758 2758 problems += 1
2759 2759 fm.condwrite(err, 'extensionserror', " %s\n", err)
2760 2760
2761 2761 # templates
2762 2762 p = templater.templatepaths()
2763 2763 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
2764 2764 fm.condwrite(not p, '', _(" no template directories found\n"))
2765 2765 if p:
2766 2766 m = templater.templatepath("map-cmdline.default")
2767 2767 if m:
2768 2768 # template found, check if it is working
2769 2769 err = None
2770 2770 try:
2771 2771 templater.templater.frommapfile(m)
2772 2772 except Exception as inst:
2773 2773 err = inst
2774 2774 p = None
2775 2775 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
2776 2776 else:
2777 2777 p = None
2778 2778 fm.condwrite(p, 'defaulttemplate',
2779 2779 _("checking default template (%s)\n"), m)
2780 2780 fm.condwrite(not m, 'defaulttemplatenotfound',
2781 2781 _(" template '%s' not found\n"), "default")
2782 2782 if not p:
2783 2783 problems += 1
2784 2784 fm.condwrite(not p, '',
2785 2785 _(" (templates seem to have been installed incorrectly)\n"))
2786 2786
2787 2787 # editor
2788 2788 editor = ui.geteditor()
2789 2789 editor = util.expandpath(editor)
2790 2790 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
2791 2791 cmdpath = util.findexe(shlex.split(editor)[0])
2792 2792 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
2793 2793 _(" No commit editor set and can't find %s in PATH\n"
2794 2794 " (specify a commit editor in your configuration"
2795 2795 " file)\n"), not cmdpath and editor == 'vi' and editor)
2796 2796 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
2797 2797 _(" Can't find editor '%s' in PATH\n"
2798 2798 " (specify a commit editor in your configuration"
2799 2799 " file)\n"), not cmdpath and editor)
2800 2800 if not cmdpath and editor != 'vi':
2801 2801 problems += 1
2802 2802
2803 2803 # check username
2804 2804 username = None
2805 2805 err = None
2806 2806 try:
2807 2807 username = ui.username()
2808 2808 except error.Abort as e:
2809 2809 err = e
2810 2810 problems += 1
2811 2811
2812 2812 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
2813 2813 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
2814 2814 " (specify a username in your configuration file)\n"), err)
2815 2815
2816 2816 fm.condwrite(not problems, '',
2817 2817 _("no problems detected\n"))
2818 2818 if not problems:
2819 2819 fm.data(problems=problems)
2820 2820 fm.condwrite(problems, 'problems',
2821 2821 _("%s problems detected,"
2822 2822 " please check your install!\n"), problems)
2823 2823 fm.end()
2824 2824
2825 2825 return problems
2826 2826
2827 2827 @command('debugknown', [], _('REPO ID...'), norepo=True)
2828 2828 def debugknown(ui, repopath, *ids, **opts):
2829 2829 """test whether node ids are known to a repo
2830 2830
2831 2831 Every ID must be a full-length hex node id string. Returns a list of 0s
2832 2832 and 1s indicating unknown/known.
2833 2833 """
2834 2834 repo = hg.peer(ui, opts, repopath)
2835 2835 if not repo.capable('known'):
2836 2836 raise error.Abort("known() not supported by target repository")
2837 2837 flags = repo.known([bin(s) for s in ids])
2838 2838 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2839 2839
2840 2840 @command('debuglabelcomplete', [], _('LABEL...'))
2841 2841 def debuglabelcomplete(ui, repo, *args):
2842 2842 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2843 2843 debugnamecomplete(ui, repo, *args)
2844 2844
2845 2845 @command('debugmergestate', [], '')
2846 2846 def debugmergestate(ui, repo, *args):
2847 2847 """print merge state
2848 2848
2849 2849 Use --verbose to print out information about whether v1 or v2 merge state
2850 2850 was chosen."""
2851 2851 def _hashornull(h):
2852 2852 if h == nullhex:
2853 2853 return 'null'
2854 2854 else:
2855 2855 return h
2856 2856
2857 2857 def printrecords(version):
2858 2858 ui.write(('* version %s records\n') % version)
2859 2859 if version == 1:
2860 2860 records = v1records
2861 2861 else:
2862 2862 records = v2records
2863 2863
2864 2864 for rtype, record in records:
2865 2865 # pretty print some record types
2866 2866 if rtype == 'L':
2867 2867 ui.write(('local: %s\n') % record)
2868 2868 elif rtype == 'O':
2869 2869 ui.write(('other: %s\n') % record)
2870 2870 elif rtype == 'm':
2871 2871 driver, mdstate = record.split('\0', 1)
2872 2872 ui.write(('merge driver: %s (state "%s")\n')
2873 2873 % (driver, mdstate))
2874 2874 elif rtype in 'FDC':
2875 2875 r = record.split('\0')
2876 2876 f, state, hash, lfile, afile, anode, ofile = r[0:7]
2877 2877 if version == 1:
2878 2878 onode = 'not stored in v1 format'
2879 2879 flags = r[7]
2880 2880 else:
2881 2881 onode, flags = r[7:9]
2882 2882 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
2883 2883 % (f, rtype, state, _hashornull(hash)))
2884 2884 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
2885 2885 ui.write((' ancestor path: %s (node %s)\n')
2886 2886 % (afile, _hashornull(anode)))
2887 2887 ui.write((' other path: %s (node %s)\n')
2888 2888 % (ofile, _hashornull(onode)))
2889 2889 elif rtype == 'f':
2890 2890 filename, rawextras = record.split('\0', 1)
2891 2891 extras = rawextras.split('\0')
2892 2892 i = 0
2893 2893 extrastrings = []
2894 2894 while i < len(extras):
2895 2895 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
2896 2896 i += 2
2897 2897
2898 2898 ui.write(('file extras: %s (%s)\n')
2899 2899 % (filename, ', '.join(extrastrings)))
2900 2900 elif rtype == 'l':
2901 2901 labels = record.split('\0', 2)
2902 2902 labels = [l for l in labels if len(l) > 0]
2903 2903 ui.write(('labels:\n'))
2904 2904 ui.write((' local: %s\n' % labels[0]))
2905 2905 ui.write((' other: %s\n' % labels[1]))
2906 2906 if len(labels) > 2:
2907 2907 ui.write((' base: %s\n' % labels[2]))
2908 2908 else:
2909 2909 ui.write(('unrecognized entry: %s\t%s\n')
2910 2910 % (rtype, record.replace('\0', '\t')))
2911 2911
2912 2912 # Avoid mergestate.read() since it may raise an exception for unsupported
2913 2913 # merge state records. We shouldn't be doing this, but this is OK since this
2914 2914 # command is pretty low-level.
2915 2915 ms = mergemod.mergestate(repo)
2916 2916
2917 2917 # sort so that reasonable information is on top
2918 2918 v1records = ms._readrecordsv1()
2919 2919 v2records = ms._readrecordsv2()
2920 2920 order = 'LOml'
2921 2921 def key(r):
2922 2922 idx = order.find(r[0])
2923 2923 if idx == -1:
2924 2924 return (1, r[1])
2925 2925 else:
2926 2926 return (0, idx)
2927 2927 v1records.sort(key=key)
2928 2928 v2records.sort(key=key)
2929 2929
2930 2930 if not v1records and not v2records:
2931 2931 ui.write(('no merge state found\n'))
2932 2932 elif not v2records:
2933 2933 ui.note(('no version 2 merge state\n'))
2934 2934 printrecords(1)
2935 2935 elif ms._v1v2match(v1records, v2records):
2936 2936 ui.note(('v1 and v2 states match: using v2\n'))
2937 2937 printrecords(2)
2938 2938 else:
2939 2939 ui.note(('v1 and v2 states mismatch: using v1\n'))
2940 2940 printrecords(1)
2941 2941 if ui.verbose:
2942 2942 printrecords(2)
2943 2943
2944 2944 @command('debugnamecomplete', [], _('NAME...'))
2945 2945 def debugnamecomplete(ui, repo, *args):
2946 2946 '''complete "names" - tags, open branch names, bookmark names'''
2947 2947
2948 2948 names = set()
2949 2949 # since we previously only listed open branches, we will handle that
2950 2950 # specially (after this for loop)
2951 2951 for name, ns in repo.names.iteritems():
2952 2952 if name != 'branches':
2953 2953 names.update(ns.listnames(repo))
2954 2954 names.update(tag for (tag, heads, tip, closed)
2955 2955 in repo.branchmap().iterbranches() if not closed)
2956 2956 completions = set()
2957 2957 if not args:
2958 2958 args = ['']
2959 2959 for a in args:
2960 2960 completions.update(n for n in names if n.startswith(a))
2961 2961 ui.write('\n'.join(sorted(completions)))
2962 2962 ui.write('\n')
2963 2963
2964 2964 @command('debuglocks',
2965 2965 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
2966 2966 ('W', 'force-wlock', None,
2967 2967 _('free the working state lock (DANGEROUS)'))],
2968 2968 _('[OPTION]...'))
2969 2969 def debuglocks(ui, repo, **opts):
2970 2970 """show or modify state of locks
2971 2971
2972 2972 By default, this command will show which locks are held. This
2973 2973 includes the user and process holding the lock, the amount of time
2974 2974 the lock has been held, and the machine name where the process is
2975 2975 running if it's not local.
2976 2976
2977 2977 Locks protect the integrity of Mercurial's data, so should be
2978 2978 treated with care. System crashes or other interruptions may cause
2979 2979 locks to not be properly released, though Mercurial will usually
2980 2980 detect and remove such stale locks automatically.
2981 2981
2982 2982 However, detecting stale locks may not always be possible (for
2983 2983 instance, on a shared filesystem). Removing locks may also be
2984 2984 blocked by filesystem permissions.
2985 2985
2986 2986 Returns 0 if no locks are held.
2987 2987
2988 2988 """
2989 2989
2990 2990 if opts.get('force_lock'):
2991 2991 repo.svfs.unlink('lock')
2992 2992 if opts.get('force_wlock'):
2993 2993 repo.vfs.unlink('wlock')
2994 2994 if opts.get('force_lock') or opts.get('force_lock'):
2995 2995 return 0
2996 2996
2997 2997 now = time.time()
2998 2998 held = 0
2999 2999
3000 3000 def report(vfs, name, method):
3001 3001 # this causes stale locks to get reaped for more accurate reporting
3002 3002 try:
3003 3003 l = method(False)
3004 3004 except error.LockHeld:
3005 3005 l = None
3006 3006
3007 3007 if l:
3008 3008 l.release()
3009 3009 else:
3010 3010 try:
3011 3011 stat = vfs.lstat(name)
3012 3012 age = now - stat.st_mtime
3013 3013 user = util.username(stat.st_uid)
3014 3014 locker = vfs.readlock(name)
3015 3015 if ":" in locker:
3016 3016 host, pid = locker.split(':')
3017 3017 if host == socket.gethostname():
3018 3018 locker = 'user %s, process %s' % (user, pid)
3019 3019 else:
3020 3020 locker = 'user %s, process %s, host %s' \
3021 3021 % (user, pid, host)
3022 3022 ui.write("%-6s %s (%ds)\n" % (name + ":", locker, age))
3023 3023 return 1
3024 3024 except OSError as e:
3025 3025 if e.errno != errno.ENOENT:
3026 3026 raise
3027 3027
3028 3028 ui.write("%-6s free\n" % (name + ":"))
3029 3029 return 0
3030 3030
3031 3031 held += report(repo.svfs, "lock", repo.lock)
3032 3032 held += report(repo.vfs, "wlock", repo.wlock)
3033 3033
3034 3034 return held
3035 3035
3036 3036 @command('debugobsolete',
3037 3037 [('', 'flags', 0, _('markers flag')),
3038 3038 ('', 'record-parents', False,
3039 3039 _('record parent information for the precursor')),
3040 3040 ('r', 'rev', [], _('display markers relevant to REV')),
3041 3041 ('', 'index', False, _('display index of the marker')),
3042 3042 ('', 'delete', [], _('delete markers specified by indices')),
3043 3043 ] + commitopts2,
3044 3044 _('[OBSOLETED [REPLACEMENT ...]]'))
3045 3045 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
3046 3046 """create arbitrary obsolete marker
3047 3047
3048 3048 With no arguments, displays the list of obsolescence markers."""
3049 3049
3050 3050 def parsenodeid(s):
3051 3051 try:
3052 3052 # We do not use revsingle/revrange functions here to accept
3053 3053 # arbitrary node identifiers, possibly not present in the
3054 3054 # local repository.
3055 3055 n = bin(s)
3056 3056 if len(n) != len(nullid):
3057 3057 raise TypeError()
3058 3058 return n
3059 3059 except TypeError:
3060 3060 raise error.Abort('changeset references must be full hexadecimal '
3061 3061 'node identifiers')
3062 3062
3063 3063 if opts.get('delete'):
3064 3064 indices = []
3065 3065 for v in opts.get('delete'):
3066 3066 try:
3067 3067 indices.append(int(v))
3068 3068 except ValueError:
3069 3069 raise error.Abort(_('invalid index value: %r') % v,
3070 3070 hint=_('use integers for indices'))
3071 3071
3072 3072 if repo.currenttransaction():
3073 3073 raise error.Abort(_('cannot delete obsmarkers in the middle '
3074 3074 'of transaction.'))
3075 3075
3076 3076 with repo.lock():
3077 3077 n = repair.deleteobsmarkers(repo.obsstore, indices)
3078 3078 ui.write(_('deleted %i obsolescense markers\n') % n)
3079 3079
3080 3080 return
3081 3081
3082 3082 if precursor is not None:
3083 3083 if opts['rev']:
3084 3084 raise error.Abort('cannot select revision when creating marker')
3085 3085 metadata = {}
3086 3086 metadata['user'] = opts['user'] or ui.username()
3087 3087 succs = tuple(parsenodeid(succ) for succ in successors)
3088 3088 l = repo.lock()
3089 3089 try:
3090 3090 tr = repo.transaction('debugobsolete')
3091 3091 try:
3092 3092 date = opts.get('date')
3093 3093 if date:
3094 3094 date = util.parsedate(date)
3095 3095 else:
3096 3096 date = None
3097 3097 prec = parsenodeid(precursor)
3098 3098 parents = None
3099 3099 if opts['record_parents']:
3100 3100 if prec not in repo.unfiltered():
3101 3101 raise error.Abort('cannot used --record-parents on '
3102 3102 'unknown changesets')
3103 3103 parents = repo.unfiltered()[prec].parents()
3104 3104 parents = tuple(p.node() for p in parents)
3105 3105 repo.obsstore.create(tr, prec, succs, opts['flags'],
3106 3106 parents=parents, date=date,
3107 3107 metadata=metadata)
3108 3108 tr.close()
3109 3109 except ValueError as exc:
3110 3110 raise error.Abort(_('bad obsmarker input: %s') % exc)
3111 3111 finally:
3112 3112 tr.release()
3113 3113 finally:
3114 3114 l.release()
3115 3115 else:
3116 3116 if opts['rev']:
3117 3117 revs = scmutil.revrange(repo, opts['rev'])
3118 3118 nodes = [repo[r].node() for r in revs]
3119 3119 markers = list(obsolete.getmarkers(repo, nodes=nodes))
3120 3120 markers.sort(key=lambda x: x._data)
3121 3121 else:
3122 3122 markers = obsolete.getmarkers(repo)
3123 3123
3124 3124 markerstoiter = markers
3125 3125 isrelevant = lambda m: True
3126 3126 if opts.get('rev') and opts.get('index'):
3127 3127 markerstoiter = obsolete.getmarkers(repo)
3128 3128 markerset = set(markers)
3129 3129 isrelevant = lambda m: m in markerset
3130 3130
3131 3131 for i, m in enumerate(markerstoiter):
3132 3132 if not isrelevant(m):
3133 3133 # marker can be irrelevant when we're iterating over a set
3134 3134 # of markers (markerstoiter) which is bigger than the set
3135 3135 # of markers we want to display (markers)
3136 3136 # this can happen if both --index and --rev options are
3137 3137 # provided and thus we need to iterate over all of the markers
3138 3138 # to get the correct indices, but only display the ones that
3139 3139 # are relevant to --rev value
3140 3140 continue
3141 3141 ind = i if opts.get('index') else None
3142 3142 cmdutil.showmarker(ui, m, index=ind)
3143 3143
3144 3144 @command('debugpathcomplete',
3145 3145 [('f', 'full', None, _('complete an entire path')),
3146 3146 ('n', 'normal', None, _('show only normal files')),
3147 3147 ('a', 'added', None, _('show only added files')),
3148 3148 ('r', 'removed', None, _('show only removed files'))],
3149 3149 _('FILESPEC...'))
3150 3150 def debugpathcomplete(ui, repo, *specs, **opts):
3151 3151 '''complete part or all of a tracked path
3152 3152
3153 3153 This command supports shells that offer path name completion. It
3154 3154 currently completes only files already known to the dirstate.
3155 3155
3156 3156 Completion extends only to the next path segment unless
3157 3157 --full is specified, in which case entire paths are used.'''
3158 3158
3159 3159 def complete(path, acceptable):
3160 3160 dirstate = repo.dirstate
3161 3161 spec = os.path.normpath(os.path.join(os.getcwd(), path))
3162 3162 rootdir = repo.root + os.sep
3163 3163 if spec != repo.root and not spec.startswith(rootdir):
3164 3164 return [], []
3165 3165 if os.path.isdir(spec):
3166 3166 spec += '/'
3167 3167 spec = spec[len(rootdir):]
3168 3168 fixpaths = os.sep != '/'
3169 3169 if fixpaths:
3170 3170 spec = spec.replace(os.sep, '/')
3171 3171 speclen = len(spec)
3172 3172 fullpaths = opts['full']
3173 3173 files, dirs = set(), set()
3174 3174 adddir, addfile = dirs.add, files.add
3175 3175 for f, st in dirstate.iteritems():
3176 3176 if f.startswith(spec) and st[0] in acceptable:
3177 3177 if fixpaths:
3178 3178 f = f.replace('/', os.sep)
3179 3179 if fullpaths:
3180 3180 addfile(f)
3181 3181 continue
3182 3182 s = f.find(os.sep, speclen)
3183 3183 if s >= 0:
3184 3184 adddir(f[:s])
3185 3185 else:
3186 3186 addfile(f)
3187 3187 return files, dirs
3188 3188
3189 3189 acceptable = ''
3190 3190 if opts['normal']:
3191 3191 acceptable += 'nm'
3192 3192 if opts['added']:
3193 3193 acceptable += 'a'
3194 3194 if opts['removed']:
3195 3195 acceptable += 'r'
3196 3196 cwd = repo.getcwd()
3197 3197 if not specs:
3198 3198 specs = ['.']
3199 3199
3200 3200 files, dirs = set(), set()
3201 3201 for spec in specs:
3202 3202 f, d = complete(spec, acceptable or 'nmar')
3203 3203 files.update(f)
3204 3204 dirs.update(d)
3205 3205 files.update(dirs)
3206 3206 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
3207 3207 ui.write('\n')
3208 3208
3209 3209 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3210 3210 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3211 3211 '''access the pushkey key/value protocol
3212 3212
3213 3213 With two args, list the keys in the given namespace.
3214 3214
3215 3215 With five args, set a key to new if it currently is set to old.
3216 3216 Reports success or failure.
3217 3217 '''
3218 3218
3219 3219 target = hg.peer(ui, {}, repopath)
3220 3220 if keyinfo:
3221 3221 key, old, new = keyinfo
3222 3222 r = target.pushkey(namespace, key, old, new)
3223 3223 ui.status(str(r) + '\n')
3224 3224 return not r
3225 3225 else:
3226 3226 for k, v in sorted(target.listkeys(namespace).iteritems()):
3227 3227 ui.write("%s\t%s\n" % (k.encode('string-escape'),
3228 3228 v.encode('string-escape')))
3229 3229
3230 3230 @command('debugpvec', [], _('A B'))
3231 3231 def debugpvec(ui, repo, a, b=None):
3232 3232 ca = scmutil.revsingle(repo, a)
3233 3233 cb = scmutil.revsingle(repo, b)
3234 3234 pa = pvec.ctxpvec(ca)
3235 3235 pb = pvec.ctxpvec(cb)
3236 3236 if pa == pb:
3237 3237 rel = "="
3238 3238 elif pa > pb:
3239 3239 rel = ">"
3240 3240 elif pa < pb:
3241 3241 rel = "<"
3242 3242 elif pa | pb:
3243 3243 rel = "|"
3244 3244 ui.write(_("a: %s\n") % pa)
3245 3245 ui.write(_("b: %s\n") % pb)
3246 3246 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3247 3247 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
3248 3248 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
3249 3249 pa.distance(pb), rel))
3250 3250
3251 3251 @command('debugrebuilddirstate|debugrebuildstate',
3252 3252 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
3253 3253 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
3254 3254 'the working copy parent')),
3255 3255 ],
3256 3256 _('[-r REV]'))
3257 3257 def debugrebuilddirstate(ui, repo, rev, **opts):
3258 3258 """rebuild the dirstate as it would look like for the given revision
3259 3259
3260 3260 If no revision is specified the first current parent will be used.
3261 3261
3262 3262 The dirstate will be set to the files of the given revision.
3263 3263 The actual working directory content or existing dirstate
3264 3264 information such as adds or removes is not considered.
3265 3265
3266 3266 ``minimal`` will only rebuild the dirstate status for files that claim to be
3267 3267 tracked but are not in the parent manifest, or that exist in the parent
3268 3268 manifest but are not in the dirstate. It will not change adds, removes, or
3269 3269 modified files that are in the working copy parent.
3270 3270
3271 3271 One use of this command is to make the next :hg:`status` invocation
3272 3272 check the actual file content.
3273 3273 """
3274 3274 ctx = scmutil.revsingle(repo, rev)
3275 3275 with repo.wlock():
3276 3276 dirstate = repo.dirstate
3277 3277 changedfiles = None
3278 3278 # See command doc for what minimal does.
3279 3279 if opts.get('minimal'):
3280 3280 manifestfiles = set(ctx.manifest().keys())
3281 3281 dirstatefiles = set(dirstate)
3282 3282 manifestonly = manifestfiles - dirstatefiles
3283 3283 dsonly = dirstatefiles - manifestfiles
3284 3284 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
3285 3285 changedfiles = manifestonly | dsnotadded
3286 3286
3287 3287 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3288 3288
3289 3289 @command('debugrebuildfncache', [], '')
3290 3290 def debugrebuildfncache(ui, repo):
3291 3291 """rebuild the fncache file"""
3292 3292 repair.rebuildfncache(ui, repo)
3293 3293
3294 3294 @command('debugrename',
3295 3295 [('r', 'rev', '', _('revision to debug'), _('REV'))],
3296 3296 _('[-r REV] FILE'))
3297 3297 def debugrename(ui, repo, file1, *pats, **opts):
3298 3298 """dump rename information"""
3299 3299
3300 3300 ctx = scmutil.revsingle(repo, opts.get('rev'))
3301 3301 m = scmutil.match(ctx, (file1,) + pats, opts)
3302 3302 for abs in ctx.walk(m):
3303 3303 fctx = ctx[abs]
3304 3304 o = fctx.filelog().renamed(fctx.filenode())
3305 3305 rel = m.rel(abs)
3306 3306 if o:
3307 3307 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3308 3308 else:
3309 3309 ui.write(_("%s not renamed\n") % rel)
3310 3310
3311 3311 @command('debugrevlog', debugrevlogopts +
3312 3312 [('d', 'dump', False, _('dump index data'))],
3313 3313 _('-c|-m|FILE'),
3314 3314 optionalrepo=True)
3315 3315 def debugrevlog(ui, repo, file_=None, **opts):
3316 3316 """show data and statistics about a revlog"""
3317 3317 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
3318 3318
3319 3319 if opts.get("dump"):
3320 3320 numrevs = len(r)
3321 3321 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
3322 3322 " rawsize totalsize compression heads chainlen\n")
3323 3323 ts = 0
3324 3324 heads = set()
3325 3325
3326 3326 for rev in xrange(numrevs):
3327 3327 dbase = r.deltaparent(rev)
3328 3328 if dbase == -1:
3329 3329 dbase = rev
3330 3330 cbase = r.chainbase(rev)
3331 3331 clen = r.chainlen(rev)
3332 3332 p1, p2 = r.parentrevs(rev)
3333 3333 rs = r.rawsize(rev)
3334 3334 ts = ts + rs
3335 3335 heads -= set(r.parentrevs(rev))
3336 3336 heads.add(rev)
3337 3337 try:
3338 3338 compression = ts / r.end(rev)
3339 3339 except ZeroDivisionError:
3340 3340 compression = 0
3341 3341 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3342 3342 "%11d %5d %8d\n" %
3343 3343 (rev, p1, p2, r.start(rev), r.end(rev),
3344 3344 r.start(dbase), r.start(cbase),
3345 3345 r.start(p1), r.start(p2),
3346 3346 rs, ts, compression, len(heads), clen))
3347 3347 return 0
3348 3348
3349 3349 v = r.version
3350 3350 format = v & 0xFFFF
3351 3351 flags = []
3352 3352 gdelta = False
3353 3353 if v & revlog.REVLOGNGINLINEDATA:
3354 3354 flags.append('inline')
3355 3355 if v & revlog.REVLOGGENERALDELTA:
3356 3356 gdelta = True
3357 3357 flags.append('generaldelta')
3358 3358 if not flags:
3359 3359 flags = ['(none)']
3360 3360
3361 3361 nummerges = 0
3362 3362 numfull = 0
3363 3363 numprev = 0
3364 3364 nump1 = 0
3365 3365 nump2 = 0
3366 3366 numother = 0
3367 3367 nump1prev = 0
3368 3368 nump2prev = 0
3369 3369 chainlengths = []
3370 3370
3371 3371 datasize = [None, 0, 0L]
3372 3372 fullsize = [None, 0, 0L]
3373 3373 deltasize = [None, 0, 0L]
3374 3374
3375 3375 def addsize(size, l):
3376 3376 if l[0] is None or size < l[0]:
3377 3377 l[0] = size
3378 3378 if size > l[1]:
3379 3379 l[1] = size
3380 3380 l[2] += size
3381 3381
3382 3382 numrevs = len(r)
3383 3383 for rev in xrange(numrevs):
3384 3384 p1, p2 = r.parentrevs(rev)
3385 3385 delta = r.deltaparent(rev)
3386 3386 if format > 0:
3387 3387 addsize(r.rawsize(rev), datasize)
3388 3388 if p2 != nullrev:
3389 3389 nummerges += 1
3390 3390 size = r.length(rev)
3391 3391 if delta == nullrev:
3392 3392 chainlengths.append(0)
3393 3393 numfull += 1
3394 3394 addsize(size, fullsize)
3395 3395 else:
3396 3396 chainlengths.append(chainlengths[delta] + 1)
3397 3397 addsize(size, deltasize)
3398 3398 if delta == rev - 1:
3399 3399 numprev += 1
3400 3400 if delta == p1:
3401 3401 nump1prev += 1
3402 3402 elif delta == p2:
3403 3403 nump2prev += 1
3404 3404 elif delta == p1:
3405 3405 nump1 += 1
3406 3406 elif delta == p2:
3407 3407 nump2 += 1
3408 3408 elif delta != nullrev:
3409 3409 numother += 1
3410 3410
3411 3411 # Adjust size min value for empty cases
3412 3412 for size in (datasize, fullsize, deltasize):
3413 3413 if size[0] is None:
3414 3414 size[0] = 0
3415 3415
3416 3416 numdeltas = numrevs - numfull
3417 3417 numoprev = numprev - nump1prev - nump2prev
3418 3418 totalrawsize = datasize[2]
3419 3419 datasize[2] /= numrevs
3420 3420 fulltotal = fullsize[2]
3421 3421 fullsize[2] /= numfull
3422 3422 deltatotal = deltasize[2]
3423 3423 if numrevs - numfull > 0:
3424 3424 deltasize[2] /= numrevs - numfull
3425 3425 totalsize = fulltotal + deltatotal
3426 3426 avgchainlen = sum(chainlengths) / numrevs
3427 3427 maxchainlen = max(chainlengths)
3428 3428 compratio = 1
3429 3429 if totalsize:
3430 3430 compratio = totalrawsize / totalsize
3431 3431
3432 3432 basedfmtstr = '%%%dd\n'
3433 3433 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
3434 3434
3435 3435 def dfmtstr(max):
3436 3436 return basedfmtstr % len(str(max))
3437 3437 def pcfmtstr(max, padding=0):
3438 3438 return basepcfmtstr % (len(str(max)), ' ' * padding)
3439 3439
3440 3440 def pcfmt(value, total):
3441 3441 if total:
3442 3442 return (value, 100 * float(value) / total)
3443 3443 else:
3444 3444 return value, 100.0
3445 3445
3446 3446 ui.write(('format : %d\n') % format)
3447 3447 ui.write(('flags : %s\n') % ', '.join(flags))
3448 3448
3449 3449 ui.write('\n')
3450 3450 fmt = pcfmtstr(totalsize)
3451 3451 fmt2 = dfmtstr(totalsize)
3452 3452 ui.write(('revisions : ') + fmt2 % numrevs)
3453 3453 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
3454 3454 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
3455 3455 ui.write(('revisions : ') + fmt2 % numrevs)
3456 3456 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
3457 3457 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
3458 3458 ui.write(('revision size : ') + fmt2 % totalsize)
3459 3459 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
3460 3460 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
3461 3461
3462 3462 ui.write('\n')
3463 3463 fmt = dfmtstr(max(avgchainlen, compratio))
3464 3464 ui.write(('avg chain length : ') + fmt % avgchainlen)
3465 3465 ui.write(('max chain length : ') + fmt % maxchainlen)
3466 3466 ui.write(('compression ratio : ') + fmt % compratio)
3467 3467
3468 3468 if format > 0:
3469 3469 ui.write('\n')
3470 3470 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
3471 3471 % tuple(datasize))
3472 3472 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
3473 3473 % tuple(fullsize))
3474 3474 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
3475 3475 % tuple(deltasize))
3476 3476
3477 3477 if numdeltas > 0:
3478 3478 ui.write('\n')
3479 3479 fmt = pcfmtstr(numdeltas)
3480 3480 fmt2 = pcfmtstr(numdeltas, 4)
3481 3481 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
3482 3482 if numprev > 0:
3483 3483 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
3484 3484 numprev))
3485 3485 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
3486 3486 numprev))
3487 3487 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
3488 3488 numprev))
3489 3489 if gdelta:
3490 3490 ui.write(('deltas against p1 : ')
3491 3491 + fmt % pcfmt(nump1, numdeltas))
3492 3492 ui.write(('deltas against p2 : ')
3493 3493 + fmt % pcfmt(nump2, numdeltas))
3494 3494 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
3495 3495 numdeltas))
3496 3496
3497 3497 @command('debugrevspec',
3498 3498 [('', 'optimize', None, _('print parsed tree after optimizing'))],
3499 3499 ('REVSPEC'))
3500 3500 def debugrevspec(ui, repo, expr, **opts):
3501 3501 """parse and apply a revision specification
3502 3502
3503 3503 Use --verbose to print the parsed tree before and after aliases
3504 3504 expansion.
3505 3505 """
3506 3506 if ui.verbose:
3507 3507 tree = revset.parse(expr, lookup=repo.__contains__)
3508 3508 ui.note(revset.prettyformat(tree), "\n")
3509 3509 newtree = revset.expandaliases(ui, tree)
3510 3510 if newtree != tree:
3511 3511 ui.note("* expanded:\n", revset.prettyformat(newtree), "\n")
3512 3512 tree = newtree
3513 3513 newtree = revset.foldconcat(tree)
3514 3514 if newtree != tree:
3515 3515 ui.note("* concatenated:\n", revset.prettyformat(newtree), "\n")
3516 3516 if opts["optimize"]:
3517 weight, optimizedtree = revset.optimize(newtree, True)
3517 optimizedtree = revset.optimize(newtree)
3518 3518 ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
3519 3519 func = revset.match(ui, expr, repo)
3520 3520 revs = func(repo)
3521 3521 if ui.verbose:
3522 3522 ui.note("* set:\n", revset.prettyformatset(revs), "\n")
3523 3523 for c in revs:
3524 3524 ui.write("%s\n" % c)
3525 3525
3526 3526 @command('debugsetparents', [], _('REV1 [REV2]'))
3527 3527 def debugsetparents(ui, repo, rev1, rev2=None):
3528 3528 """manually set the parents of the current working directory
3529 3529
3530 3530 This is useful for writing repository conversion tools, but should
3531 3531 be used with care. For example, neither the working directory nor the
3532 3532 dirstate is updated, so file status may be incorrect after running this
3533 3533 command.
3534 3534
3535 3535 Returns 0 on success.
3536 3536 """
3537 3537
3538 3538 r1 = scmutil.revsingle(repo, rev1).node()
3539 3539 r2 = scmutil.revsingle(repo, rev2, 'null').node()
3540 3540
3541 3541 with repo.wlock():
3542 3542 repo.setparents(r1, r2)
3543 3543
3544 3544 @command('debugdirstate|debugstate',
3545 3545 [('', 'nodates', None, _('do not display the saved mtime')),
3546 3546 ('', 'datesort', None, _('sort by saved mtime'))],
3547 3547 _('[OPTION]...'))
3548 3548 def debugstate(ui, repo, **opts):
3549 3549 """show the contents of the current dirstate"""
3550 3550
3551 3551 nodates = opts.get('nodates')
3552 3552 datesort = opts.get('datesort')
3553 3553
3554 3554 timestr = ""
3555 3555 if datesort:
3556 3556 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
3557 3557 else:
3558 3558 keyfunc = None # sort by filename
3559 3559 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
3560 3560 if ent[3] == -1:
3561 3561 timestr = 'unset '
3562 3562 elif nodates:
3563 3563 timestr = 'set '
3564 3564 else:
3565 3565 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
3566 3566 time.localtime(ent[3]))
3567 3567 if ent[1] & 0o20000:
3568 3568 mode = 'lnk'
3569 3569 else:
3570 3570 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
3571 3571 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
3572 3572 for f in repo.dirstate.copies():
3573 3573 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
3574 3574
3575 3575 @command('debugsub',
3576 3576 [('r', 'rev', '',
3577 3577 _('revision to check'), _('REV'))],
3578 3578 _('[-r REV] [REV]'))
3579 3579 def debugsub(ui, repo, rev=None):
3580 3580 ctx = scmutil.revsingle(repo, rev, None)
3581 3581 for k, v in sorted(ctx.substate.items()):
3582 3582 ui.write(('path %s\n') % k)
3583 3583 ui.write((' source %s\n') % v[0])
3584 3584 ui.write((' revision %s\n') % v[1])
3585 3585
3586 3586 @command('debugsuccessorssets',
3587 3587 [],
3588 3588 _('[REV]'))
3589 3589 def debugsuccessorssets(ui, repo, *revs):
3590 3590 """show set of successors for revision
3591 3591
3592 3592 A successors set of changeset A is a consistent group of revisions that
3593 3593 succeed A. It contains non-obsolete changesets only.
3594 3594
3595 3595 In most cases a changeset A has a single successors set containing a single
3596 3596 successor (changeset A replaced by A').
3597 3597
3598 3598 A changeset that is made obsolete with no successors are called "pruned".
3599 3599 Such changesets have no successors sets at all.
3600 3600
3601 3601 A changeset that has been "split" will have a successors set containing
3602 3602 more than one successor.
3603 3603
3604 3604 A changeset that has been rewritten in multiple different ways is called
3605 3605 "divergent". Such changesets have multiple successor sets (each of which
3606 3606 may also be split, i.e. have multiple successors).
3607 3607
3608 3608 Results are displayed as follows::
3609 3609
3610 3610 <rev1>
3611 3611 <successors-1A>
3612 3612 <rev2>
3613 3613 <successors-2A>
3614 3614 <successors-2B1> <successors-2B2> <successors-2B3>
3615 3615
3616 3616 Here rev2 has two possible (i.e. divergent) successors sets. The first
3617 3617 holds one element, whereas the second holds three (i.e. the changeset has
3618 3618 been split).
3619 3619 """
3620 3620 # passed to successorssets caching computation from one call to another
3621 3621 cache = {}
3622 3622 ctx2str = str
3623 3623 node2str = short
3624 3624 if ui.debug():
3625 3625 def ctx2str(ctx):
3626 3626 return ctx.hex()
3627 3627 node2str = hex
3628 3628 for rev in scmutil.revrange(repo, revs):
3629 3629 ctx = repo[rev]
3630 3630 ui.write('%s\n'% ctx2str(ctx))
3631 3631 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
3632 3632 if succsset:
3633 3633 ui.write(' ')
3634 3634 ui.write(node2str(succsset[0]))
3635 3635 for node in succsset[1:]:
3636 3636 ui.write(' ')
3637 3637 ui.write(node2str(node))
3638 3638 ui.write('\n')
3639 3639
3640 3640 @command('debugtemplate',
3641 3641 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
3642 3642 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
3643 3643 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3644 3644 optionalrepo=True)
3645 3645 def debugtemplate(ui, repo, tmpl, **opts):
3646 3646 """parse and apply a template
3647 3647
3648 3648 If -r/--rev is given, the template is processed as a log template and
3649 3649 applied to the given changesets. Otherwise, it is processed as a generic
3650 3650 template.
3651 3651
3652 3652 Use --verbose to print the parsed tree.
3653 3653 """
3654 3654 revs = None
3655 3655 if opts['rev']:
3656 3656 if repo is None:
3657 3657 raise error.RepoError(_('there is no Mercurial repository here '
3658 3658 '(.hg not found)'))
3659 3659 revs = scmutil.revrange(repo, opts['rev'])
3660 3660
3661 3661 props = {}
3662 3662 for d in opts['define']:
3663 3663 try:
3664 3664 k, v = (e.strip() for e in d.split('=', 1))
3665 3665 if not k:
3666 3666 raise ValueError
3667 3667 props[k] = v
3668 3668 except ValueError:
3669 3669 raise error.Abort(_('malformed keyword definition: %s') % d)
3670 3670
3671 3671 if ui.verbose:
3672 3672 aliases = ui.configitems('templatealias')
3673 3673 tree = templater.parse(tmpl)
3674 3674 ui.note(templater.prettyformat(tree), '\n')
3675 3675 newtree = templater.expandaliases(tree, aliases)
3676 3676 if newtree != tree:
3677 3677 ui.note("* expanded:\n", templater.prettyformat(newtree), '\n')
3678 3678
3679 3679 mapfile = None
3680 3680 if revs is None:
3681 3681 k = 'debugtemplate'
3682 3682 t = formatter.maketemplater(ui, k, tmpl)
3683 3683 ui.write(templater.stringify(t(k, **props)))
3684 3684 else:
3685 3685 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
3686 3686 mapfile, buffered=False)
3687 3687 for r in revs:
3688 3688 displayer.show(repo[r], **props)
3689 3689 displayer.close()
3690 3690
3691 3691 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True)
3692 3692 def debugwalk(ui, repo, *pats, **opts):
3693 3693 """show how files match on given patterns"""
3694 3694 m = scmutil.match(repo[None], pats, opts)
3695 3695 items = list(repo.walk(m))
3696 3696 if not items:
3697 3697 return
3698 3698 f = lambda fn: fn
3699 3699 if ui.configbool('ui', 'slash') and os.sep != '/':
3700 3700 f = lambda fn: util.normpath(fn)
3701 3701 fmt = 'f %%-%ds %%-%ds %%s' % (
3702 3702 max([len(abs) for abs in items]),
3703 3703 max([len(m.rel(abs)) for abs in items]))
3704 3704 for abs in items:
3705 3705 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
3706 3706 ui.write("%s\n" % line.rstrip())
3707 3707
3708 3708 @command('debugwireargs',
3709 3709 [('', 'three', '', 'three'),
3710 3710 ('', 'four', '', 'four'),
3711 3711 ('', 'five', '', 'five'),
3712 3712 ] + remoteopts,
3713 3713 _('REPO [OPTIONS]... [ONE [TWO]]'),
3714 3714 norepo=True)
3715 3715 def debugwireargs(ui, repopath, *vals, **opts):
3716 3716 repo = hg.peer(ui, opts, repopath)
3717 3717 for opt in remoteopts:
3718 3718 del opts[opt[1]]
3719 3719 args = {}
3720 3720 for k, v in opts.iteritems():
3721 3721 if v:
3722 3722 args[k] = v
3723 3723 # run twice to check that we don't mess up the stream for the next command
3724 3724 res1 = repo.debugwireargs(*vals, **args)
3725 3725 res2 = repo.debugwireargs(*vals, **args)
3726 3726 ui.write("%s\n" % res1)
3727 3727 if res1 != res2:
3728 3728 ui.warn("%s\n" % res2)
3729 3729
3730 3730 @command('^diff',
3731 3731 [('r', 'rev', [], _('revision'), _('REV')),
3732 3732 ('c', 'change', '', _('change made by revision'), _('REV'))
3733 3733 ] + diffopts + diffopts2 + walkopts + subrepoopts,
3734 3734 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
3735 3735 inferrepo=True)
3736 3736 def diff(ui, repo, *pats, **opts):
3737 3737 """diff repository (or selected files)
3738 3738
3739 3739 Show differences between revisions for the specified files.
3740 3740
3741 3741 Differences between files are shown using the unified diff format.
3742 3742
3743 3743 .. note::
3744 3744
3745 3745 :hg:`diff` may generate unexpected results for merges, as it will
3746 3746 default to comparing against the working directory's first
3747 3747 parent changeset if no revisions are specified.
3748 3748
3749 3749 When two revision arguments are given, then changes are shown
3750 3750 between those revisions. If only one revision is specified then
3751 3751 that revision is compared to the working directory, and, when no
3752 3752 revisions are specified, the working directory files are compared
3753 3753 to its first parent.
3754 3754
3755 3755 Alternatively you can specify -c/--change with a revision to see
3756 3756 the changes in that changeset relative to its first parent.
3757 3757
3758 3758 Without the -a/--text option, diff will avoid generating diffs of
3759 3759 files it detects as binary. With -a, diff will generate a diff
3760 3760 anyway, probably with undesirable results.
3761 3761
3762 3762 Use the -g/--git option to generate diffs in the git extended diff
3763 3763 format. For more information, read :hg:`help diffs`.
3764 3764
3765 3765 .. container:: verbose
3766 3766
3767 3767 Examples:
3768 3768
3769 3769 - compare a file in the current working directory to its parent::
3770 3770
3771 3771 hg diff foo.c
3772 3772
3773 3773 - compare two historical versions of a directory, with rename info::
3774 3774
3775 3775 hg diff --git -r 1.0:1.2 lib/
3776 3776
3777 3777 - get change stats relative to the last change on some date::
3778 3778
3779 3779 hg diff --stat -r "date('may 2')"
3780 3780
3781 3781 - diff all newly-added files that contain a keyword::
3782 3782
3783 3783 hg diff "set:added() and grep(GNU)"
3784 3784
3785 3785 - compare a revision and its parents::
3786 3786
3787 3787 hg diff -c 9353 # compare against first parent
3788 3788 hg diff -r 9353^:9353 # same using revset syntax
3789 3789 hg diff -r 9353^2:9353 # compare against the second parent
3790 3790
3791 3791 Returns 0 on success.
3792 3792 """
3793 3793
3794 3794 revs = opts.get('rev')
3795 3795 change = opts.get('change')
3796 3796 stat = opts.get('stat')
3797 3797 reverse = opts.get('reverse')
3798 3798
3799 3799 if revs and change:
3800 3800 msg = _('cannot specify --rev and --change at the same time')
3801 3801 raise error.Abort(msg)
3802 3802 elif change:
3803 3803 node2 = scmutil.revsingle(repo, change, None).node()
3804 3804 node1 = repo[node2].p1().node()
3805 3805 else:
3806 3806 node1, node2 = scmutil.revpair(repo, revs)
3807 3807
3808 3808 if reverse:
3809 3809 node1, node2 = node2, node1
3810 3810
3811 3811 diffopts = patch.diffallopts(ui, opts)
3812 3812 m = scmutil.match(repo[node2], pats, opts)
3813 3813 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
3814 3814 listsubrepos=opts.get('subrepos'),
3815 3815 root=opts.get('root'))
3816 3816
3817 3817 @command('^export',
3818 3818 [('o', 'output', '',
3819 3819 _('print output to file with formatted name'), _('FORMAT')),
3820 3820 ('', 'switch-parent', None, _('diff against the second parent')),
3821 3821 ('r', 'rev', [], _('revisions to export'), _('REV')),
3822 3822 ] + diffopts,
3823 3823 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
3824 3824 def export(ui, repo, *changesets, **opts):
3825 3825 """dump the header and diffs for one or more changesets
3826 3826
3827 3827 Print the changeset header and diffs for one or more revisions.
3828 3828 If no revision is given, the parent of the working directory is used.
3829 3829
3830 3830 The information shown in the changeset header is: author, date,
3831 3831 branch name (if non-default), changeset hash, parent(s) and commit
3832 3832 comment.
3833 3833
3834 3834 .. note::
3835 3835
3836 3836 :hg:`export` may generate unexpected diff output for merge
3837 3837 changesets, as it will compare the merge changeset against its
3838 3838 first parent only.
3839 3839
3840 3840 Output may be to a file, in which case the name of the file is
3841 3841 given using a format string. The formatting rules are as follows:
3842 3842
3843 3843 :``%%``: literal "%" character
3844 3844 :``%H``: changeset hash (40 hexadecimal digits)
3845 3845 :``%N``: number of patches being generated
3846 3846 :``%R``: changeset revision number
3847 3847 :``%b``: basename of the exporting repository
3848 3848 :``%h``: short-form changeset hash (12 hexadecimal digits)
3849 3849 :``%m``: first line of the commit message (only alphanumeric characters)
3850 3850 :``%n``: zero-padded sequence number, starting at 1
3851 3851 :``%r``: zero-padded changeset revision number
3852 3852
3853 3853 Without the -a/--text option, export will avoid generating diffs
3854 3854 of files it detects as binary. With -a, export will generate a
3855 3855 diff anyway, probably with undesirable results.
3856 3856
3857 3857 Use the -g/--git option to generate diffs in the git extended diff
3858 3858 format. See :hg:`help diffs` for more information.
3859 3859
3860 3860 With the --switch-parent option, the diff will be against the
3861 3861 second parent. It can be useful to review a merge.
3862 3862
3863 3863 .. container:: verbose
3864 3864
3865 3865 Examples:
3866 3866
3867 3867 - use export and import to transplant a bugfix to the current
3868 3868 branch::
3869 3869
3870 3870 hg export -r 9353 | hg import -
3871 3871
3872 3872 - export all the changesets between two revisions to a file with
3873 3873 rename information::
3874 3874
3875 3875 hg export --git -r 123:150 > changes.txt
3876 3876
3877 3877 - split outgoing changes into a series of patches with
3878 3878 descriptive names::
3879 3879
3880 3880 hg export -r "outgoing()" -o "%n-%m.patch"
3881 3881
3882 3882 Returns 0 on success.
3883 3883 """
3884 3884 changesets += tuple(opts.get('rev', []))
3885 3885 if not changesets:
3886 3886 changesets = ['.']
3887 3887 revs = scmutil.revrange(repo, changesets)
3888 3888 if not revs:
3889 3889 raise error.Abort(_("export requires at least one changeset"))
3890 3890 if len(revs) > 1:
3891 3891 ui.note(_('exporting patches:\n'))
3892 3892 else:
3893 3893 ui.note(_('exporting patch:\n'))
3894 3894 cmdutil.export(repo, revs, template=opts.get('output'),
3895 3895 switch_parent=opts.get('switch_parent'),
3896 3896 opts=patch.diffallopts(ui, opts))
3897 3897
3898 3898 @command('files',
3899 3899 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3900 3900 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3901 3901 ] + walkopts + formatteropts + subrepoopts,
3902 3902 _('[OPTION]... [PATTERN]...'))
3903 3903 def files(ui, repo, *pats, **opts):
3904 3904 """list tracked files
3905 3905
3906 3906 Print files under Mercurial control in the working directory or
3907 3907 specified revision whose names match the given patterns (excluding
3908 3908 removed files).
3909 3909
3910 3910 If no patterns are given to match, this command prints the names
3911 3911 of all files under Mercurial control in the working directory.
3912 3912
3913 3913 .. container:: verbose
3914 3914
3915 3915 Examples:
3916 3916
3917 3917 - list all files under the current directory::
3918 3918
3919 3919 hg files .
3920 3920
3921 3921 - shows sizes and flags for current revision::
3922 3922
3923 3923 hg files -vr .
3924 3924
3925 3925 - list all files named README::
3926 3926
3927 3927 hg files -I "**/README"
3928 3928
3929 3929 - list all binary files::
3930 3930
3931 3931 hg files "set:binary()"
3932 3932
3933 3933 - find files containing a regular expression::
3934 3934
3935 3935 hg files "set:grep('bob')"
3936 3936
3937 3937 - search tracked file contents with xargs and grep::
3938 3938
3939 3939 hg files -0 | xargs -0 grep foo
3940 3940
3941 3941 See :hg:`help patterns` and :hg:`help filesets` for more information
3942 3942 on specifying file patterns.
3943 3943
3944 3944 Returns 0 if a match is found, 1 otherwise.
3945 3945
3946 3946 """
3947 3947 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3948 3948
3949 3949 end = '\n'
3950 3950 if opts.get('print0'):
3951 3951 end = '\0'
3952 3952 fm = ui.formatter('files', opts)
3953 3953 fmt = '%s' + end
3954 3954
3955 3955 m = scmutil.match(ctx, pats, opts)
3956 3956 ret = cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
3957 3957
3958 3958 fm.end()
3959 3959
3960 3960 return ret
3961 3961
3962 3962 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
3963 3963 def forget(ui, repo, *pats, **opts):
3964 3964 """forget the specified files on the next commit
3965 3965
3966 3966 Mark the specified files so they will no longer be tracked
3967 3967 after the next commit.
3968 3968
3969 3969 This only removes files from the current branch, not from the
3970 3970 entire project history, and it does not delete them from the
3971 3971 working directory.
3972 3972
3973 3973 To delete the file from the working directory, see :hg:`remove`.
3974 3974
3975 3975 To undo a forget before the next commit, see :hg:`add`.
3976 3976
3977 3977 .. container:: verbose
3978 3978
3979 3979 Examples:
3980 3980
3981 3981 - forget newly-added binary files::
3982 3982
3983 3983 hg forget "set:added() and binary()"
3984 3984
3985 3985 - forget files that would be excluded by .hgignore::
3986 3986
3987 3987 hg forget "set:hgignore()"
3988 3988
3989 3989 Returns 0 on success.
3990 3990 """
3991 3991
3992 3992 if not pats:
3993 3993 raise error.Abort(_('no files specified'))
3994 3994
3995 3995 m = scmutil.match(repo[None], pats, opts)
3996 3996 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3997 3997 return rejected and 1 or 0
3998 3998
3999 3999 @command(
4000 4000 'graft',
4001 4001 [('r', 'rev', [], _('revisions to graft'), _('REV')),
4002 4002 ('c', 'continue', False, _('resume interrupted graft')),
4003 4003 ('e', 'edit', False, _('invoke editor on commit messages')),
4004 4004 ('', 'log', None, _('append graft info to log message')),
4005 4005 ('f', 'force', False, _('force graft')),
4006 4006 ('D', 'currentdate', False,
4007 4007 _('record the current date as commit date')),
4008 4008 ('U', 'currentuser', False,
4009 4009 _('record the current user as committer'), _('DATE'))]
4010 4010 + commitopts2 + mergetoolopts + dryrunopts,
4011 4011 _('[OPTION]... [-r REV]... REV...'))
4012 4012 def graft(ui, repo, *revs, **opts):
4013 4013 '''copy changes from other branches onto the current branch
4014 4014
4015 4015 This command uses Mercurial's merge logic to copy individual
4016 4016 changes from other branches without merging branches in the
4017 4017 history graph. This is sometimes known as 'backporting' or
4018 4018 'cherry-picking'. By default, graft will copy user, date, and
4019 4019 description from the source changesets.
4020 4020
4021 4021 Changesets that are ancestors of the current revision, that have
4022 4022 already been grafted, or that are merges will be skipped.
4023 4023
4024 4024 If --log is specified, log messages will have a comment appended
4025 4025 of the form::
4026 4026
4027 4027 (grafted from CHANGESETHASH)
4028 4028
4029 4029 If --force is specified, revisions will be grafted even if they
4030 4030 are already ancestors of or have been grafted to the destination.
4031 4031 This is useful when the revisions have since been backed out.
4032 4032
4033 4033 If a graft merge results in conflicts, the graft process is
4034 4034 interrupted so that the current merge can be manually resolved.
4035 4035 Once all conflicts are addressed, the graft process can be
4036 4036 continued with the -c/--continue option.
4037 4037
4038 4038 .. note::
4039 4039
4040 4040 The -c/--continue option does not reapply earlier options, except
4041 4041 for --force.
4042 4042
4043 4043 .. container:: verbose
4044 4044
4045 4045 Examples:
4046 4046
4047 4047 - copy a single change to the stable branch and edit its description::
4048 4048
4049 4049 hg update stable
4050 4050 hg graft --edit 9393
4051 4051
4052 4052 - graft a range of changesets with one exception, updating dates::
4053 4053
4054 4054 hg graft -D "2085::2093 and not 2091"
4055 4055
4056 4056 - continue a graft after resolving conflicts::
4057 4057
4058 4058 hg graft -c
4059 4059
4060 4060 - show the source of a grafted changeset::
4061 4061
4062 4062 hg log --debug -r .
4063 4063
4064 4064 - show revisions sorted by date::
4065 4065
4066 4066 hg log -r "sort(all(), date)"
4067 4067
4068 4068 See :hg:`help revisions` and :hg:`help revsets` for more about
4069 4069 specifying revisions.
4070 4070
4071 4071 Returns 0 on successful completion.
4072 4072 '''
4073 4073 with repo.wlock():
4074 4074 return _dograft(ui, repo, *revs, **opts)
4075 4075
4076 4076 def _dograft(ui, repo, *revs, **opts):
4077 4077 if revs and opts['rev']:
4078 4078 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
4079 4079 'revision ordering!\n'))
4080 4080
4081 4081 revs = list(revs)
4082 4082 revs.extend(opts['rev'])
4083 4083
4084 4084 if not opts.get('user') and opts.get('currentuser'):
4085 4085 opts['user'] = ui.username()
4086 4086 if not opts.get('date') and opts.get('currentdate'):
4087 4087 opts['date'] = "%d %d" % util.makedate()
4088 4088
4089 4089 editor = cmdutil.getcommiteditor(editform='graft', **opts)
4090 4090
4091 4091 cont = False
4092 4092 if opts['continue']:
4093 4093 cont = True
4094 4094 if revs:
4095 4095 raise error.Abort(_("can't specify --continue and revisions"))
4096 4096 # read in unfinished revisions
4097 4097 try:
4098 4098 nodes = repo.vfs.read('graftstate').splitlines()
4099 4099 revs = [repo[node].rev() for node in nodes]
4100 4100 except IOError as inst:
4101 4101 if inst.errno != errno.ENOENT:
4102 4102 raise
4103 4103 cmdutil.wrongtooltocontinue(repo, _('graft'))
4104 4104 else:
4105 4105 cmdutil.checkunfinished(repo)
4106 4106 cmdutil.bailifchanged(repo)
4107 4107 if not revs:
4108 4108 raise error.Abort(_('no revisions specified'))
4109 4109 revs = scmutil.revrange(repo, revs)
4110 4110
4111 4111 skipped = set()
4112 4112 # check for merges
4113 4113 for rev in repo.revs('%ld and merge()', revs):
4114 4114 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
4115 4115 skipped.add(rev)
4116 4116 revs = [r for r in revs if r not in skipped]
4117 4117 if not revs:
4118 4118 return -1
4119 4119
4120 4120 # Don't check in the --continue case, in effect retaining --force across
4121 4121 # --continues. That's because without --force, any revisions we decided to
4122 4122 # skip would have been filtered out here, so they wouldn't have made their
4123 4123 # way to the graftstate. With --force, any revisions we would have otherwise
4124 4124 # skipped would not have been filtered out, and if they hadn't been applied
4125 4125 # already, they'd have been in the graftstate.
4126 4126 if not (cont or opts.get('force')):
4127 4127 # check for ancestors of dest branch
4128 4128 crev = repo['.'].rev()
4129 4129 ancestors = repo.changelog.ancestors([crev], inclusive=True)
4130 4130 # Cannot use x.remove(y) on smart set, this has to be a list.
4131 4131 # XXX make this lazy in the future
4132 4132 revs = list(revs)
4133 4133 # don't mutate while iterating, create a copy
4134 4134 for rev in list(revs):
4135 4135 if rev in ancestors:
4136 4136 ui.warn(_('skipping ancestor revision %d:%s\n') %
4137 4137 (rev, repo[rev]))
4138 4138 # XXX remove on list is slow
4139 4139 revs.remove(rev)
4140 4140 if not revs:
4141 4141 return -1
4142 4142
4143 4143 # analyze revs for earlier grafts
4144 4144 ids = {}
4145 4145 for ctx in repo.set("%ld", revs):
4146 4146 ids[ctx.hex()] = ctx.rev()
4147 4147 n = ctx.extra().get('source')
4148 4148 if n:
4149 4149 ids[n] = ctx.rev()
4150 4150
4151 4151 # check ancestors for earlier grafts
4152 4152 ui.debug('scanning for duplicate grafts\n')
4153 4153
4154 4154 for rev in repo.changelog.findmissingrevs(revs, [crev]):
4155 4155 ctx = repo[rev]
4156 4156 n = ctx.extra().get('source')
4157 4157 if n in ids:
4158 4158 try:
4159 4159 r = repo[n].rev()
4160 4160 except error.RepoLookupError:
4161 4161 r = None
4162 4162 if r in revs:
4163 4163 ui.warn(_('skipping revision %d:%s '
4164 4164 '(already grafted to %d:%s)\n')
4165 4165 % (r, repo[r], rev, ctx))
4166 4166 revs.remove(r)
4167 4167 elif ids[n] in revs:
4168 4168 if r is None:
4169 4169 ui.warn(_('skipping already grafted revision %d:%s '
4170 4170 '(%d:%s also has unknown origin %s)\n')
4171 4171 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
4172 4172 else:
4173 4173 ui.warn(_('skipping already grafted revision %d:%s '
4174 4174 '(%d:%s also has origin %d:%s)\n')
4175 4175 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
4176 4176 revs.remove(ids[n])
4177 4177 elif ctx.hex() in ids:
4178 4178 r = ids[ctx.hex()]
4179 4179 ui.warn(_('skipping already grafted revision %d:%s '
4180 4180 '(was grafted from %d:%s)\n') %
4181 4181 (r, repo[r], rev, ctx))
4182 4182 revs.remove(r)
4183 4183 if not revs:
4184 4184 return -1
4185 4185
4186 4186 for pos, ctx in enumerate(repo.set("%ld", revs)):
4187 4187 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
4188 4188 ctx.description().split('\n', 1)[0])
4189 4189 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
4190 4190 if names:
4191 4191 desc += ' (%s)' % ' '.join(names)
4192 4192 ui.status(_('grafting %s\n') % desc)
4193 4193 if opts.get('dry_run'):
4194 4194 continue
4195 4195
4196 4196 source = ctx.extra().get('source')
4197 4197 extra = {}
4198 4198 if source:
4199 4199 extra['source'] = source
4200 4200 extra['intermediate-source'] = ctx.hex()
4201 4201 else:
4202 4202 extra['source'] = ctx.hex()
4203 4203 user = ctx.user()
4204 4204 if opts.get('user'):
4205 4205 user = opts['user']
4206 4206 date = ctx.date()
4207 4207 if opts.get('date'):
4208 4208 date = opts['date']
4209 4209 message = ctx.description()
4210 4210 if opts.get('log'):
4211 4211 message += '\n(grafted from %s)' % ctx.hex()
4212 4212
4213 4213 # we don't merge the first commit when continuing
4214 4214 if not cont:
4215 4215 # perform the graft merge with p1(rev) as 'ancestor'
4216 4216 try:
4217 4217 # ui.forcemerge is an internal variable, do not document
4218 4218 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4219 4219 'graft')
4220 4220 stats = mergemod.graft(repo, ctx, ctx.p1(),
4221 4221 ['local', 'graft'])
4222 4222 finally:
4223 4223 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
4224 4224 # report any conflicts
4225 4225 if stats and stats[3] > 0:
4226 4226 # write out state for --continue
4227 4227 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
4228 4228 repo.vfs.write('graftstate', ''.join(nodelines))
4229 4229 extra = ''
4230 4230 if opts.get('user'):
4231 4231 extra += ' --user %s' % util.shellquote(opts['user'])
4232 4232 if opts.get('date'):
4233 4233 extra += ' --date %s' % util.shellquote(opts['date'])
4234 4234 if opts.get('log'):
4235 4235 extra += ' --log'
4236 4236 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
4237 4237 raise error.Abort(
4238 4238 _("unresolved conflicts, can't continue"),
4239 4239 hint=hint)
4240 4240 else:
4241 4241 cont = False
4242 4242
4243 4243 # commit
4244 4244 node = repo.commit(text=message, user=user,
4245 4245 date=date, extra=extra, editor=editor)
4246 4246 if node is None:
4247 4247 ui.warn(
4248 4248 _('note: graft of %d:%s created no changes to commit\n') %
4249 4249 (ctx.rev(), ctx))
4250 4250
4251 4251 # remove state when we complete successfully
4252 4252 if not opts.get('dry_run'):
4253 4253 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
4254 4254
4255 4255 return 0
4256 4256
4257 4257 @command('grep',
4258 4258 [('0', 'print0', None, _('end fields with NUL')),
4259 4259 ('', 'all', None, _('print all revisions that match')),
4260 4260 ('a', 'text', None, _('treat all files as text')),
4261 4261 ('f', 'follow', None,
4262 4262 _('follow changeset history,'
4263 4263 ' or file history across copies and renames')),
4264 4264 ('i', 'ignore-case', None, _('ignore case when matching')),
4265 4265 ('l', 'files-with-matches', None,
4266 4266 _('print only filenames and revisions that match')),
4267 4267 ('n', 'line-number', None, _('print matching line numbers')),
4268 4268 ('r', 'rev', [],
4269 4269 _('only search files changed within revision range'), _('REV')),
4270 4270 ('u', 'user', None, _('list the author (long with -v)')),
4271 4271 ('d', 'date', None, _('list the date (short with -q)')),
4272 4272 ] + walkopts,
4273 4273 _('[OPTION]... PATTERN [FILE]...'),
4274 4274 inferrepo=True)
4275 4275 def grep(ui, repo, pattern, *pats, **opts):
4276 4276 """search for a pattern in specified files and revisions
4277 4277
4278 4278 Search revisions of files for a regular expression.
4279 4279
4280 4280 This command behaves differently than Unix grep. It only accepts
4281 4281 Python/Perl regexps. It searches repository history, not the
4282 4282 working directory. It always prints the revision number in which a
4283 4283 match appears.
4284 4284
4285 4285 By default, grep only prints output for the first revision of a
4286 4286 file in which it finds a match. To get it to print every revision
4287 4287 that contains a change in match status ("-" for a match that
4288 4288 becomes a non-match, or "+" for a non-match that becomes a match),
4289 4289 use the --all flag.
4290 4290
4291 4291 Returns 0 if a match is found, 1 otherwise.
4292 4292 """
4293 4293 reflags = re.M
4294 4294 if opts.get('ignore_case'):
4295 4295 reflags |= re.I
4296 4296 try:
4297 4297 regexp = util.re.compile(pattern, reflags)
4298 4298 except re.error as inst:
4299 4299 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
4300 4300 return 1
4301 4301 sep, eol = ':', '\n'
4302 4302 if opts.get('print0'):
4303 4303 sep = eol = '\0'
4304 4304
4305 4305 getfile = util.lrucachefunc(repo.file)
4306 4306
4307 4307 def matchlines(body):
4308 4308 begin = 0
4309 4309 linenum = 0
4310 4310 while begin < len(body):
4311 4311 match = regexp.search(body, begin)
4312 4312 if not match:
4313 4313 break
4314 4314 mstart, mend = match.span()
4315 4315 linenum += body.count('\n', begin, mstart) + 1
4316 4316 lstart = body.rfind('\n', begin, mstart) + 1 or begin
4317 4317 begin = body.find('\n', mend) + 1 or len(body) + 1
4318 4318 lend = begin - 1
4319 4319 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
4320 4320
4321 4321 class linestate(object):
4322 4322 def __init__(self, line, linenum, colstart, colend):
4323 4323 self.line = line
4324 4324 self.linenum = linenum
4325 4325 self.colstart = colstart
4326 4326 self.colend = colend
4327 4327
4328 4328 def __hash__(self):
4329 4329 return hash((self.linenum, self.line))
4330 4330
4331 4331 def __eq__(self, other):
4332 4332 return self.line == other.line
4333 4333
4334 4334 def __iter__(self):
4335 4335 yield (self.line[:self.colstart], '')
4336 4336 yield (self.line[self.colstart:self.colend], 'grep.match')
4337 4337 rest = self.line[self.colend:]
4338 4338 while rest != '':
4339 4339 match = regexp.search(rest)
4340 4340 if not match:
4341 4341 yield (rest, '')
4342 4342 break
4343 4343 mstart, mend = match.span()
4344 4344 yield (rest[:mstart], '')
4345 4345 yield (rest[mstart:mend], 'grep.match')
4346 4346 rest = rest[mend:]
4347 4347
4348 4348 matches = {}
4349 4349 copies = {}
4350 4350 def grepbody(fn, rev, body):
4351 4351 matches[rev].setdefault(fn, [])
4352 4352 m = matches[rev][fn]
4353 4353 for lnum, cstart, cend, line in matchlines(body):
4354 4354 s = linestate(line, lnum, cstart, cend)
4355 4355 m.append(s)
4356 4356
4357 4357 def difflinestates(a, b):
4358 4358 sm = difflib.SequenceMatcher(None, a, b)
4359 4359 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
4360 4360 if tag == 'insert':
4361 4361 for i in xrange(blo, bhi):
4362 4362 yield ('+', b[i])
4363 4363 elif tag == 'delete':
4364 4364 for i in xrange(alo, ahi):
4365 4365 yield ('-', a[i])
4366 4366 elif tag == 'replace':
4367 4367 for i in xrange(alo, ahi):
4368 4368 yield ('-', a[i])
4369 4369 for i in xrange(blo, bhi):
4370 4370 yield ('+', b[i])
4371 4371
4372 4372 def display(fn, ctx, pstates, states):
4373 4373 rev = ctx.rev()
4374 4374 if ui.quiet:
4375 4375 datefunc = util.shortdate
4376 4376 else:
4377 4377 datefunc = util.datestr
4378 4378 found = False
4379 4379 @util.cachefunc
4380 4380 def binary():
4381 4381 flog = getfile(fn)
4382 4382 return util.binary(flog.read(ctx.filenode(fn)))
4383 4383
4384 4384 if opts.get('all'):
4385 4385 iter = difflinestates(pstates, states)
4386 4386 else:
4387 4387 iter = [('', l) for l in states]
4388 4388 for change, l in iter:
4389 4389 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
4390 4390
4391 4391 if opts.get('line_number'):
4392 4392 cols.append((str(l.linenum), 'grep.linenumber'))
4393 4393 if opts.get('all'):
4394 4394 cols.append((change, 'grep.change'))
4395 4395 if opts.get('user'):
4396 4396 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
4397 4397 if opts.get('date'):
4398 4398 cols.append((datefunc(ctx.date()), 'grep.date'))
4399 4399 for col, label in cols[:-1]:
4400 4400 ui.write(col, label=label)
4401 4401 ui.write(sep, label='grep.sep')
4402 4402 ui.write(cols[-1][0], label=cols[-1][1])
4403 4403 if not opts.get('files_with_matches'):
4404 4404 ui.write(sep, label='grep.sep')
4405 4405 if not opts.get('text') and binary():
4406 4406 ui.write(" Binary file matches")
4407 4407 else:
4408 4408 for s, label in l:
4409 4409 ui.write(s, label=label)
4410 4410 ui.write(eol)
4411 4411 found = True
4412 4412 if opts.get('files_with_matches'):
4413 4413 break
4414 4414 return found
4415 4415
4416 4416 skip = {}
4417 4417 revfiles = {}
4418 4418 matchfn = scmutil.match(repo[None], pats, opts)
4419 4419 found = False
4420 4420 follow = opts.get('follow')
4421 4421
4422 4422 def prep(ctx, fns):
4423 4423 rev = ctx.rev()
4424 4424 pctx = ctx.p1()
4425 4425 parent = pctx.rev()
4426 4426 matches.setdefault(rev, {})
4427 4427 matches.setdefault(parent, {})
4428 4428 files = revfiles.setdefault(rev, [])
4429 4429 for fn in fns:
4430 4430 flog = getfile(fn)
4431 4431 try:
4432 4432 fnode = ctx.filenode(fn)
4433 4433 except error.LookupError:
4434 4434 continue
4435 4435
4436 4436 copied = flog.renamed(fnode)
4437 4437 copy = follow and copied and copied[0]
4438 4438 if copy:
4439 4439 copies.setdefault(rev, {})[fn] = copy
4440 4440 if fn in skip:
4441 4441 if copy:
4442 4442 skip[copy] = True
4443 4443 continue
4444 4444 files.append(fn)
4445 4445
4446 4446 if fn not in matches[rev]:
4447 4447 grepbody(fn, rev, flog.read(fnode))
4448 4448
4449 4449 pfn = copy or fn
4450 4450 if pfn not in matches[parent]:
4451 4451 try:
4452 4452 fnode = pctx.filenode(pfn)
4453 4453 grepbody(pfn, parent, flog.read(fnode))
4454 4454 except error.LookupError:
4455 4455 pass
4456 4456
4457 4457 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
4458 4458 rev = ctx.rev()
4459 4459 parent = ctx.p1().rev()
4460 4460 for fn in sorted(revfiles.get(rev, [])):
4461 4461 states = matches[rev][fn]
4462 4462 copy = copies.get(rev, {}).get(fn)
4463 4463 if fn in skip:
4464 4464 if copy:
4465 4465 skip[copy] = True
4466 4466 continue
4467 4467 pstates = matches.get(parent, {}).get(copy or fn, [])
4468 4468 if pstates or states:
4469 4469 r = display(fn, ctx, pstates, states)
4470 4470 found = found or r
4471 4471 if r and not opts.get('all'):
4472 4472 skip[fn] = True
4473 4473 if copy:
4474 4474 skip[copy] = True
4475 4475 del matches[rev]
4476 4476 del revfiles[rev]
4477 4477
4478 4478 return not found
4479 4479
4480 4480 @command('heads',
4481 4481 [('r', 'rev', '',
4482 4482 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
4483 4483 ('t', 'topo', False, _('show topological heads only')),
4484 4484 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
4485 4485 ('c', 'closed', False, _('show normal and closed branch heads')),
4486 4486 ] + templateopts,
4487 4487 _('[-ct] [-r STARTREV] [REV]...'))
4488 4488 def heads(ui, repo, *branchrevs, **opts):
4489 4489 """show branch heads
4490 4490
4491 4491 With no arguments, show all open branch heads in the repository.
4492 4492 Branch heads are changesets that have no descendants on the
4493 4493 same branch. They are where development generally takes place and
4494 4494 are the usual targets for update and merge operations.
4495 4495
4496 4496 If one or more REVs are given, only open branch heads on the
4497 4497 branches associated with the specified changesets are shown. This
4498 4498 means that you can use :hg:`heads .` to see the heads on the
4499 4499 currently checked-out branch.
4500 4500
4501 4501 If -c/--closed is specified, also show branch heads marked closed
4502 4502 (see :hg:`commit --close-branch`).
4503 4503
4504 4504 If STARTREV is specified, only those heads that are descendants of
4505 4505 STARTREV will be displayed.
4506 4506
4507 4507 If -t/--topo is specified, named branch mechanics will be ignored and only
4508 4508 topological heads (changesets with no children) will be shown.
4509 4509
4510 4510 Returns 0 if matching heads are found, 1 if not.
4511 4511 """
4512 4512
4513 4513 start = None
4514 4514 if 'rev' in opts:
4515 4515 start = scmutil.revsingle(repo, opts['rev'], None).node()
4516 4516
4517 4517 if opts.get('topo'):
4518 4518 heads = [repo[h] for h in repo.heads(start)]
4519 4519 else:
4520 4520 heads = []
4521 4521 for branch in repo.branchmap():
4522 4522 heads += repo.branchheads(branch, start, opts.get('closed'))
4523 4523 heads = [repo[h] for h in heads]
4524 4524
4525 4525 if branchrevs:
4526 4526 branches = set(repo[br].branch() for br in branchrevs)
4527 4527 heads = [h for h in heads if h.branch() in branches]
4528 4528
4529 4529 if opts.get('active') and branchrevs:
4530 4530 dagheads = repo.heads(start)
4531 4531 heads = [h for h in heads if h.node() in dagheads]
4532 4532
4533 4533 if branchrevs:
4534 4534 haveheads = set(h.branch() for h in heads)
4535 4535 if branches - haveheads:
4536 4536 headless = ', '.join(b for b in branches - haveheads)
4537 4537 msg = _('no open branch heads found on branches %s')
4538 4538 if opts.get('rev'):
4539 4539 msg += _(' (started at %s)') % opts['rev']
4540 4540 ui.warn((msg + '\n') % headless)
4541 4541
4542 4542 if not heads:
4543 4543 return 1
4544 4544
4545 4545 heads = sorted(heads, key=lambda x: -x.rev())
4546 4546 displayer = cmdutil.show_changeset(ui, repo, opts)
4547 4547 for ctx in heads:
4548 4548 displayer.show(ctx)
4549 4549 displayer.close()
4550 4550
4551 4551 @command('help',
4552 4552 [('e', 'extension', None, _('show only help for extensions')),
4553 4553 ('c', 'command', None, _('show only help for commands')),
4554 4554 ('k', 'keyword', None, _('show topics matching keyword')),
4555 4555 ('s', 'system', [], _('show help for specific platform(s)')),
4556 4556 ],
4557 4557 _('[-ecks] [TOPIC]'),
4558 4558 norepo=True)
4559 4559 def help_(ui, name=None, **opts):
4560 4560 """show help for a given topic or a help overview
4561 4561
4562 4562 With no arguments, print a list of commands with short help messages.
4563 4563
4564 4564 Given a topic, extension, or command name, print help for that
4565 4565 topic.
4566 4566
4567 4567 Returns 0 if successful.
4568 4568 """
4569 4569
4570 4570 textwidth = ui.configint('ui', 'textwidth', 78)
4571 4571 termwidth = ui.termwidth() - 2
4572 4572 if textwidth <= 0 or termwidth < textwidth:
4573 4573 textwidth = termwidth
4574 4574
4575 4575 keep = opts.get('system') or []
4576 4576 if len(keep) == 0:
4577 4577 if sys.platform.startswith('win'):
4578 4578 keep.append('windows')
4579 4579 elif sys.platform == 'OpenVMS':
4580 4580 keep.append('vms')
4581 4581 elif sys.platform == 'plan9':
4582 4582 keep.append('plan9')
4583 4583 else:
4584 4584 keep.append('unix')
4585 4585 keep.append(sys.platform.lower())
4586 4586 if ui.verbose:
4587 4587 keep.append('verbose')
4588 4588
4589 4589 section = None
4590 4590 subtopic = None
4591 4591 if name and '.' in name:
4592 4592 name, section = name.split('.', 1)
4593 4593 section = section.lower()
4594 4594 if '.' in section:
4595 4595 subtopic, section = section.split('.', 1)
4596 4596 else:
4597 4597 subtopic = section
4598 4598
4599 4599 text = help.help_(ui, name, subtopic=subtopic, **opts)
4600 4600
4601 4601 formatted, pruned = minirst.format(text, textwidth, keep=keep,
4602 4602 section=section)
4603 4603
4604 4604 # We could have been given a weird ".foo" section without a name
4605 4605 # to look for, or we could have simply failed to found "foo.bar"
4606 4606 # because bar isn't a section of foo
4607 4607 if section and not (formatted and name):
4608 4608 raise error.Abort(_("help section not found"))
4609 4609
4610 4610 if 'verbose' in pruned:
4611 4611 keep.append('omitted')
4612 4612 else:
4613 4613 keep.append('notomitted')
4614 4614 formatted, pruned = minirst.format(text, textwidth, keep=keep,
4615 4615 section=section)
4616 4616 ui.write(formatted)
4617 4617
4618 4618
4619 4619 @command('identify|id',
4620 4620 [('r', 'rev', '',
4621 4621 _('identify the specified revision'), _('REV')),
4622 4622 ('n', 'num', None, _('show local revision number')),
4623 4623 ('i', 'id', None, _('show global revision id')),
4624 4624 ('b', 'branch', None, _('show branch')),
4625 4625 ('t', 'tags', None, _('show tags')),
4626 4626 ('B', 'bookmarks', None, _('show bookmarks')),
4627 4627 ] + remoteopts,
4628 4628 _('[-nibtB] [-r REV] [SOURCE]'),
4629 4629 optionalrepo=True)
4630 4630 def identify(ui, repo, source=None, rev=None,
4631 4631 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
4632 4632 """identify the working directory or specified revision
4633 4633
4634 4634 Print a summary identifying the repository state at REV using one or
4635 4635 two parent hash identifiers, followed by a "+" if the working
4636 4636 directory has uncommitted changes, the branch name (if not default),
4637 4637 a list of tags, and a list of bookmarks.
4638 4638
4639 4639 When REV is not given, print a summary of the current state of the
4640 4640 repository.
4641 4641
4642 4642 Specifying a path to a repository root or Mercurial bundle will
4643 4643 cause lookup to operate on that repository/bundle.
4644 4644
4645 4645 .. container:: verbose
4646 4646
4647 4647 Examples:
4648 4648
4649 4649 - generate a build identifier for the working directory::
4650 4650
4651 4651 hg id --id > build-id.dat
4652 4652
4653 4653 - find the revision corresponding to a tag::
4654 4654
4655 4655 hg id -n -r 1.3
4656 4656
4657 4657 - check the most recent revision of a remote repository::
4658 4658
4659 4659 hg id -r tip http://selenic.com/hg/
4660 4660
4661 4661 See :hg:`log` for generating more information about specific revisions,
4662 4662 including full hash identifiers.
4663 4663
4664 4664 Returns 0 if successful.
4665 4665 """
4666 4666
4667 4667 if not repo and not source:
4668 4668 raise error.Abort(_("there is no Mercurial repository here "
4669 4669 "(.hg not found)"))
4670 4670
4671 4671 if ui.debugflag:
4672 4672 hexfunc = hex
4673 4673 else:
4674 4674 hexfunc = short
4675 4675 default = not (num or id or branch or tags or bookmarks)
4676 4676 output = []
4677 4677 revs = []
4678 4678
4679 4679 if source:
4680 4680 source, branches = hg.parseurl(ui.expandpath(source))
4681 4681 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
4682 4682 repo = peer.local()
4683 4683 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
4684 4684
4685 4685 if not repo:
4686 4686 if num or branch or tags:
4687 4687 raise error.Abort(
4688 4688 _("can't query remote revision number, branch, or tags"))
4689 4689 if not rev and revs:
4690 4690 rev = revs[0]
4691 4691 if not rev:
4692 4692 rev = "tip"
4693 4693
4694 4694 remoterev = peer.lookup(rev)
4695 4695 if default or id:
4696 4696 output = [hexfunc(remoterev)]
4697 4697
4698 4698 def getbms():
4699 4699 bms = []
4700 4700
4701 4701 if 'bookmarks' in peer.listkeys('namespaces'):
4702 4702 hexremoterev = hex(remoterev)
4703 4703 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
4704 4704 if bmr == hexremoterev]
4705 4705
4706 4706 return sorted(bms)
4707 4707
4708 4708 if bookmarks:
4709 4709 output.extend(getbms())
4710 4710 elif default and not ui.quiet:
4711 4711 # multiple bookmarks for a single parent separated by '/'
4712 4712 bm = '/'.join(getbms())
4713 4713 if bm:
4714 4714 output.append(bm)
4715 4715 else:
4716 4716 ctx = scmutil.revsingle(repo, rev, None)
4717 4717
4718 4718 if ctx.rev() is None:
4719 4719 ctx = repo[None]
4720 4720 parents = ctx.parents()
4721 4721 taglist = []
4722 4722 for p in parents:
4723 4723 taglist.extend(p.tags())
4724 4724
4725 4725 changed = ""
4726 4726 if default or id or num:
4727 4727 if (any(repo.status())
4728 4728 or any(ctx.sub(s).dirty() for s in ctx.substate)):
4729 4729 changed = '+'
4730 4730 if default or id:
4731 4731 output = ["%s%s" %
4732 4732 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
4733 4733 if num:
4734 4734 output.append("%s%s" %
4735 4735 ('+'.join([str(p.rev()) for p in parents]), changed))
4736 4736 else:
4737 4737 if default or id:
4738 4738 output = [hexfunc(ctx.node())]
4739 4739 if num:
4740 4740 output.append(str(ctx.rev()))
4741 4741 taglist = ctx.tags()
4742 4742
4743 4743 if default and not ui.quiet:
4744 4744 b = ctx.branch()
4745 4745 if b != 'default':
4746 4746 output.append("(%s)" % b)
4747 4747
4748 4748 # multiple tags for a single parent separated by '/'
4749 4749 t = '/'.join(taglist)
4750 4750 if t:
4751 4751 output.append(t)
4752 4752
4753 4753 # multiple bookmarks for a single parent separated by '/'
4754 4754 bm = '/'.join(ctx.bookmarks())
4755 4755 if bm:
4756 4756 output.append(bm)
4757 4757 else:
4758 4758 if branch:
4759 4759 output.append(ctx.branch())
4760 4760
4761 4761 if tags:
4762 4762 output.extend(taglist)
4763 4763
4764 4764 if bookmarks:
4765 4765 output.extend(ctx.bookmarks())
4766 4766
4767 4767 ui.write("%s\n" % ' '.join(output))
4768 4768
4769 4769 @command('import|patch',
4770 4770 [('p', 'strip', 1,
4771 4771 _('directory strip option for patch. This has the same '
4772 4772 'meaning as the corresponding patch option'), _('NUM')),
4773 4773 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
4774 4774 ('e', 'edit', False, _('invoke editor on commit messages')),
4775 4775 ('f', 'force', None,
4776 4776 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
4777 4777 ('', 'no-commit', None,
4778 4778 _("don't commit, just update the working directory")),
4779 4779 ('', 'bypass', None,
4780 4780 _("apply patch without touching the working directory")),
4781 4781 ('', 'partial', None,
4782 4782 _('commit even if some hunks fail')),
4783 4783 ('', 'exact', None,
4784 4784 _('abort if patch would apply lossily')),
4785 4785 ('', 'prefix', '',
4786 4786 _('apply patch to subdirectory'), _('DIR')),
4787 4787 ('', 'import-branch', None,
4788 4788 _('use any branch information in patch (implied by --exact)'))] +
4789 4789 commitopts + commitopts2 + similarityopts,
4790 4790 _('[OPTION]... PATCH...'))
4791 4791 def import_(ui, repo, patch1=None, *patches, **opts):
4792 4792 """import an ordered set of patches
4793 4793
4794 4794 Import a list of patches and commit them individually (unless
4795 4795 --no-commit is specified).
4796 4796
4797 4797 To read a patch from standard input, use "-" as the patch name. If
4798 4798 a URL is specified, the patch will be downloaded from there.
4799 4799
4800 4800 Import first applies changes to the working directory (unless
4801 4801 --bypass is specified), import will abort if there are outstanding
4802 4802 changes.
4803 4803
4804 4804 Use --bypass to apply and commit patches directly to the
4805 4805 repository, without affecting the working directory. Without
4806 4806 --exact, patches will be applied on top of the working directory
4807 4807 parent revision.
4808 4808
4809 4809 You can import a patch straight from a mail message. Even patches
4810 4810 as attachments work (to use the body part, it must have type
4811 4811 text/plain or text/x-patch). From and Subject headers of email
4812 4812 message are used as default committer and commit message. All
4813 4813 text/plain body parts before first diff are added to the commit
4814 4814 message.
4815 4815
4816 4816 If the imported patch was generated by :hg:`export`, user and
4817 4817 description from patch override values from message headers and
4818 4818 body. Values given on command line with -m/--message and -u/--user
4819 4819 override these.
4820 4820
4821 4821 If --exact is specified, import will set the working directory to
4822 4822 the parent of each patch before applying it, and will abort if the
4823 4823 resulting changeset has a different ID than the one recorded in
4824 4824 the patch. This will guard against various ways that portable
4825 4825 patch formats and mail systems might fail to transfer Mercurial
4826 4826 data or metadata. See ':hg: bundle' for lossless transmission.
4827 4827
4828 4828 Use --partial to ensure a changeset will be created from the patch
4829 4829 even if some hunks fail to apply. Hunks that fail to apply will be
4830 4830 written to a <target-file>.rej file. Conflicts can then be resolved
4831 4831 by hand before :hg:`commit --amend` is run to update the created
4832 4832 changeset. This flag exists to let people import patches that
4833 4833 partially apply without losing the associated metadata (author,
4834 4834 date, description, ...).
4835 4835
4836 4836 .. note::
4837 4837
4838 4838 When no hunks apply cleanly, :hg:`import --partial` will create
4839 4839 an empty changeset, importing only the patch metadata.
4840 4840
4841 4841 With -s/--similarity, hg will attempt to discover renames and
4842 4842 copies in the patch in the same way as :hg:`addremove`.
4843 4843
4844 4844 It is possible to use external patch programs to perform the patch
4845 4845 by setting the ``ui.patch`` configuration option. For the default
4846 4846 internal tool, the fuzz can also be configured via ``patch.fuzz``.
4847 4847 See :hg:`help config` for more information about configuration
4848 4848 files and how to use these options.
4849 4849
4850 4850 See :hg:`help dates` for a list of formats valid for -d/--date.
4851 4851
4852 4852 .. container:: verbose
4853 4853
4854 4854 Examples:
4855 4855
4856 4856 - import a traditional patch from a website and detect renames::
4857 4857
4858 4858 hg import -s 80 http://example.com/bugfix.patch
4859 4859
4860 4860 - import a changeset from an hgweb server::
4861 4861
4862 4862 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
4863 4863
4864 4864 - import all the patches in an Unix-style mbox::
4865 4865
4866 4866 hg import incoming-patches.mbox
4867 4867
4868 4868 - attempt to exactly restore an exported changeset (not always
4869 4869 possible)::
4870 4870
4871 4871 hg import --exact proposed-fix.patch
4872 4872
4873 4873 - use an external tool to apply a patch which is too fuzzy for
4874 4874 the default internal tool.
4875 4875
4876 4876 hg import --config ui.patch="patch --merge" fuzzy.patch
4877 4877
4878 4878 - change the default fuzzing from 2 to a less strict 7
4879 4879
4880 4880 hg import --config ui.fuzz=7 fuzz.patch
4881 4881
4882 4882 Returns 0 on success, 1 on partial success (see --partial).
4883 4883 """
4884 4884
4885 4885 if not patch1:
4886 4886 raise error.Abort(_('need at least one patch to import'))
4887 4887
4888 4888 patches = (patch1,) + patches
4889 4889
4890 4890 date = opts.get('date')
4891 4891 if date:
4892 4892 opts['date'] = util.parsedate(date)
4893 4893
4894 4894 exact = opts.get('exact')
4895 4895 update = not opts.get('bypass')
4896 4896 if not update and opts.get('no_commit'):
4897 4897 raise error.Abort(_('cannot use --no-commit with --bypass'))
4898 4898 try:
4899 4899 sim = float(opts.get('similarity') or 0)
4900 4900 except ValueError:
4901 4901 raise error.Abort(_('similarity must be a number'))
4902 4902 if sim < 0 or sim > 100:
4903 4903 raise error.Abort(_('similarity must be between 0 and 100'))
4904 4904 if sim and not update:
4905 4905 raise error.Abort(_('cannot use --similarity with --bypass'))
4906 4906 if exact:
4907 4907 if opts.get('edit'):
4908 4908 raise error.Abort(_('cannot use --exact with --edit'))
4909 4909 if opts.get('prefix'):
4910 4910 raise error.Abort(_('cannot use --exact with --prefix'))
4911 4911
4912 4912 base = opts["base"]
4913 4913 wlock = dsguard = lock = tr = None
4914 4914 msgs = []
4915 4915 ret = 0
4916 4916
4917 4917
4918 4918 try:
4919 4919 wlock = repo.wlock()
4920 4920
4921 4921 if update:
4922 4922 cmdutil.checkunfinished(repo)
4923 4923 if (exact or not opts.get('force')):
4924 4924 cmdutil.bailifchanged(repo)
4925 4925
4926 4926 if not opts.get('no_commit'):
4927 4927 lock = repo.lock()
4928 4928 tr = repo.transaction('import')
4929 4929 else:
4930 4930 dsguard = cmdutil.dirstateguard(repo, 'import')
4931 4931 parents = repo[None].parents()
4932 4932 for patchurl in patches:
4933 4933 if patchurl == '-':
4934 4934 ui.status(_('applying patch from stdin\n'))
4935 4935 patchfile = ui.fin
4936 4936 patchurl = 'stdin' # for error message
4937 4937 else:
4938 4938 patchurl = os.path.join(base, patchurl)
4939 4939 ui.status(_('applying %s\n') % patchurl)
4940 4940 patchfile = hg.openpath(ui, patchurl)
4941 4941
4942 4942 haspatch = False
4943 4943 for hunk in patch.split(patchfile):
4944 4944 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
4945 4945 parents, opts,
4946 4946 msgs, hg.clean)
4947 4947 if msg:
4948 4948 haspatch = True
4949 4949 ui.note(msg + '\n')
4950 4950 if update or exact:
4951 4951 parents = repo[None].parents()
4952 4952 else:
4953 4953 parents = [repo[node]]
4954 4954 if rej:
4955 4955 ui.write_err(_("patch applied partially\n"))
4956 4956 ui.write_err(_("(fix the .rej files and run "
4957 4957 "`hg commit --amend`)\n"))
4958 4958 ret = 1
4959 4959 break
4960 4960
4961 4961 if not haspatch:
4962 4962 raise error.Abort(_('%s: no diffs found') % patchurl)
4963 4963
4964 4964 if tr:
4965 4965 tr.close()
4966 4966 if msgs:
4967 4967 repo.savecommitmessage('\n* * *\n'.join(msgs))
4968 4968 if dsguard:
4969 4969 dsguard.close()
4970 4970 return ret
4971 4971 finally:
4972 4972 if tr:
4973 4973 tr.release()
4974 4974 release(lock, dsguard, wlock)
4975 4975
4976 4976 @command('incoming|in',
4977 4977 [('f', 'force', None,
4978 4978 _('run even if remote repository is unrelated')),
4979 4979 ('n', 'newest-first', None, _('show newest record first')),
4980 4980 ('', 'bundle', '',
4981 4981 _('file to store the bundles into'), _('FILE')),
4982 4982 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4983 4983 ('B', 'bookmarks', False, _("compare bookmarks")),
4984 4984 ('b', 'branch', [],
4985 4985 _('a specific branch you would like to pull'), _('BRANCH')),
4986 4986 ] + logopts + remoteopts + subrepoopts,
4987 4987 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
4988 4988 def incoming(ui, repo, source="default", **opts):
4989 4989 """show new changesets found in source
4990 4990
4991 4991 Show new changesets found in the specified path/URL or the default
4992 4992 pull location. These are the changesets that would have been pulled
4993 4993 if a pull at the time you issued this command.
4994 4994
4995 4995 See pull for valid source format details.
4996 4996
4997 4997 .. container:: verbose
4998 4998
4999 4999 With -B/--bookmarks, the result of bookmark comparison between
5000 5000 local and remote repositories is displayed. With -v/--verbose,
5001 5001 status is also displayed for each bookmark like below::
5002 5002
5003 5003 BM1 01234567890a added
5004 5004 BM2 1234567890ab advanced
5005 5005 BM3 234567890abc diverged
5006 5006 BM4 34567890abcd changed
5007 5007
5008 5008 The action taken locally when pulling depends on the
5009 5009 status of each bookmark:
5010 5010
5011 5011 :``added``: pull will create it
5012 5012 :``advanced``: pull will update it
5013 5013 :``diverged``: pull will create a divergent bookmark
5014 5014 :``changed``: result depends on remote changesets
5015 5015
5016 5016 From the point of view of pulling behavior, bookmark
5017 5017 existing only in the remote repository are treated as ``added``,
5018 5018 even if it is in fact locally deleted.
5019 5019
5020 5020 .. container:: verbose
5021 5021
5022 5022 For remote repository, using --bundle avoids downloading the
5023 5023 changesets twice if the incoming is followed by a pull.
5024 5024
5025 5025 Examples:
5026 5026
5027 5027 - show incoming changes with patches and full description::
5028 5028
5029 5029 hg incoming -vp
5030 5030
5031 5031 - show incoming changes excluding merges, store a bundle::
5032 5032
5033 5033 hg in -vpM --bundle incoming.hg
5034 5034 hg pull incoming.hg
5035 5035
5036 5036 - briefly list changes inside a bundle::
5037 5037
5038 5038 hg in changes.hg -T "{desc|firstline}\\n"
5039 5039
5040 5040 Returns 0 if there are incoming changes, 1 otherwise.
5041 5041 """
5042 5042 if opts.get('graph'):
5043 5043 cmdutil.checkunsupportedgraphflags([], opts)
5044 5044 def display(other, chlist, displayer):
5045 5045 revdag = cmdutil.graphrevs(other, chlist, opts)
5046 5046 cmdutil.displaygraph(ui, repo, revdag, displayer,
5047 5047 graphmod.asciiedges)
5048 5048
5049 5049 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
5050 5050 return 0
5051 5051
5052 5052 if opts.get('bundle') and opts.get('subrepos'):
5053 5053 raise error.Abort(_('cannot combine --bundle and --subrepos'))
5054 5054
5055 5055 if opts.get('bookmarks'):
5056 5056 source, branches = hg.parseurl(ui.expandpath(source),
5057 5057 opts.get('branch'))
5058 5058 other = hg.peer(repo, opts, source)
5059 5059 if 'bookmarks' not in other.listkeys('namespaces'):
5060 5060 ui.warn(_("remote doesn't support bookmarks\n"))
5061 5061 return 0
5062 5062 ui.status(_('comparing with %s\n') % util.hidepassword(source))
5063 5063 return bookmarks.incoming(ui, repo, other)
5064 5064
5065 5065 repo._subtoppath = ui.expandpath(source)
5066 5066 try:
5067 5067 return hg.incoming(ui, repo, source, opts)
5068 5068 finally:
5069 5069 del repo._subtoppath
5070 5070
5071 5071
5072 5072 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
5073 5073 norepo=True)
5074 5074 def init(ui, dest=".", **opts):
5075 5075 """create a new repository in the given directory
5076 5076
5077 5077 Initialize a new repository in the given directory. If the given
5078 5078 directory does not exist, it will be created.
5079 5079
5080 5080 If no directory is given, the current directory is used.
5081 5081
5082 5082 It is possible to specify an ``ssh://`` URL as the destination.
5083 5083 See :hg:`help urls` for more information.
5084 5084
5085 5085 Returns 0 on success.
5086 5086 """
5087 5087 hg.peer(ui, opts, ui.expandpath(dest), create=True)
5088 5088
5089 5089 @command('locate',
5090 5090 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
5091 5091 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5092 5092 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
5093 5093 ] + walkopts,
5094 5094 _('[OPTION]... [PATTERN]...'))
5095 5095 def locate(ui, repo, *pats, **opts):
5096 5096 """locate files matching specific patterns (DEPRECATED)
5097 5097
5098 5098 Print files under Mercurial control in the working directory whose
5099 5099 names match the given patterns.
5100 5100
5101 5101 By default, this command searches all directories in the working
5102 5102 directory. To search just the current directory and its
5103 5103 subdirectories, use "--include .".
5104 5104
5105 5105 If no patterns are given to match, this command prints the names
5106 5106 of all files under Mercurial control in the working directory.
5107 5107
5108 5108 If you want to feed the output of this command into the "xargs"
5109 5109 command, use the -0 option to both this command and "xargs". This
5110 5110 will avoid the problem of "xargs" treating single filenames that
5111 5111 contain whitespace as multiple filenames.
5112 5112
5113 5113 See :hg:`help files` for a more versatile command.
5114 5114
5115 5115 Returns 0 if a match is found, 1 otherwise.
5116 5116 """
5117 5117 if opts.get('print0'):
5118 5118 end = '\0'
5119 5119 else:
5120 5120 end = '\n'
5121 5121 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
5122 5122
5123 5123 ret = 1
5124 5124 ctx = repo[rev]
5125 5125 m = scmutil.match(ctx, pats, opts, default='relglob',
5126 5126 badfn=lambda x, y: False)
5127 5127
5128 5128 for abs in ctx.matches(m):
5129 5129 if opts.get('fullpath'):
5130 5130 ui.write(repo.wjoin(abs), end)
5131 5131 else:
5132 5132 ui.write(((pats and m.rel(abs)) or abs), end)
5133 5133 ret = 0
5134 5134
5135 5135 return ret
5136 5136
5137 5137 @command('^log|history',
5138 5138 [('f', 'follow', None,
5139 5139 _('follow changeset history, or file history across copies and renames')),
5140 5140 ('', 'follow-first', None,
5141 5141 _('only follow the first parent of merge changesets (DEPRECATED)')),
5142 5142 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
5143 5143 ('C', 'copies', None, _('show copied files')),
5144 5144 ('k', 'keyword', [],
5145 5145 _('do case-insensitive search for a given text'), _('TEXT')),
5146 5146 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
5147 5147 ('', 'removed', None, _('include revisions where files were removed')),
5148 5148 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
5149 5149 ('u', 'user', [], _('revisions committed by user'), _('USER')),
5150 5150 ('', 'only-branch', [],
5151 5151 _('show only changesets within the given named branch (DEPRECATED)'),
5152 5152 _('BRANCH')),
5153 5153 ('b', 'branch', [],
5154 5154 _('show changesets within the given named branch'), _('BRANCH')),
5155 5155 ('P', 'prune', [],
5156 5156 _('do not display revision or any of its ancestors'), _('REV')),
5157 5157 ] + logopts + walkopts,
5158 5158 _('[OPTION]... [FILE]'),
5159 5159 inferrepo=True)
5160 5160 def log(ui, repo, *pats, **opts):
5161 5161 """show revision history of entire repository or files
5162 5162
5163 5163 Print the revision history of the specified files or the entire
5164 5164 project.
5165 5165
5166 5166 If no revision range is specified, the default is ``tip:0`` unless
5167 5167 --follow is set, in which case the working directory parent is
5168 5168 used as the starting revision.
5169 5169
5170 5170 File history is shown without following rename or copy history of
5171 5171 files. Use -f/--follow with a filename to follow history across
5172 5172 renames and copies. --follow without a filename will only show
5173 5173 ancestors or descendants of the starting revision.
5174 5174
5175 5175 By default this command prints revision number and changeset id,
5176 5176 tags, non-trivial parents, user, date and time, and a summary for
5177 5177 each commit. When the -v/--verbose switch is used, the list of
5178 5178 changed files and full commit message are shown.
5179 5179
5180 5180 With --graph the revisions are shown as an ASCII art DAG with the most
5181 5181 recent changeset at the top.
5182 5182 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
5183 5183 and '+' represents a fork where the changeset from the lines below is a
5184 5184 parent of the 'o' merge on the same line.
5185 5185
5186 5186 .. note::
5187 5187
5188 5188 :hg:`log --patch` may generate unexpected diff output for merge
5189 5189 changesets, as it will only compare the merge changeset against
5190 5190 its first parent. Also, only files different from BOTH parents
5191 5191 will appear in files:.
5192 5192
5193 5193 .. note::
5194 5194
5195 5195 For performance reasons, :hg:`log FILE` may omit duplicate changes
5196 5196 made on branches and will not show removals or mode changes. To
5197 5197 see all such changes, use the --removed switch.
5198 5198
5199 5199 .. container:: verbose
5200 5200
5201 5201 Some examples:
5202 5202
5203 5203 - changesets with full descriptions and file lists::
5204 5204
5205 5205 hg log -v
5206 5206
5207 5207 - changesets ancestral to the working directory::
5208 5208
5209 5209 hg log -f
5210 5210
5211 5211 - last 10 commits on the current branch::
5212 5212
5213 5213 hg log -l 10 -b .
5214 5214
5215 5215 - changesets showing all modifications of a file, including removals::
5216 5216
5217 5217 hg log --removed file.c
5218 5218
5219 5219 - all changesets that touch a directory, with diffs, excluding merges::
5220 5220
5221 5221 hg log -Mp lib/
5222 5222
5223 5223 - all revision numbers that match a keyword::
5224 5224
5225 5225 hg log -k bug --template "{rev}\\n"
5226 5226
5227 5227 - the full hash identifier of the working directory parent::
5228 5228
5229 5229 hg log -r . --template "{node}\\n"
5230 5230
5231 5231 - list available log templates::
5232 5232
5233 5233 hg log -T list
5234 5234
5235 5235 - check if a given changeset is included in a tagged release::
5236 5236
5237 5237 hg log -r "a21ccf and ancestor(1.9)"
5238 5238
5239 5239 - find all changesets by some user in a date range::
5240 5240
5241 5241 hg log -k alice -d "may 2008 to jul 2008"
5242 5242
5243 5243 - summary of all changesets after the last tag::
5244 5244
5245 5245 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
5246 5246
5247 5247 See :hg:`help dates` for a list of formats valid for -d/--date.
5248 5248
5249 5249 See :hg:`help revisions` and :hg:`help revsets` for more about
5250 5250 specifying and ordering revisions.
5251 5251
5252 5252 See :hg:`help templates` for more about pre-packaged styles and
5253 5253 specifying custom templates.
5254 5254
5255 5255 Returns 0 on success.
5256 5256
5257 5257 """
5258 5258 if opts.get('follow') and opts.get('rev'):
5259 5259 opts['rev'] = [revset.formatspec('reverse(::%lr)', opts.get('rev'))]
5260 5260 del opts['follow']
5261 5261
5262 5262 if opts.get('graph'):
5263 5263 return cmdutil.graphlog(ui, repo, *pats, **opts)
5264 5264
5265 5265 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
5266 5266 limit = cmdutil.loglimit(opts)
5267 5267 count = 0
5268 5268
5269 5269 getrenamed = None
5270 5270 if opts.get('copies'):
5271 5271 endrev = None
5272 5272 if opts.get('rev'):
5273 5273 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
5274 5274 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
5275 5275
5276 5276 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
5277 5277 for rev in revs:
5278 5278 if count == limit:
5279 5279 break
5280 5280 ctx = repo[rev]
5281 5281 copies = None
5282 5282 if getrenamed is not None and rev:
5283 5283 copies = []
5284 5284 for fn in ctx.files():
5285 5285 rename = getrenamed(fn, rev)
5286 5286 if rename:
5287 5287 copies.append((fn, rename[0]))
5288 5288 if filematcher:
5289 5289 revmatchfn = filematcher(ctx.rev())
5290 5290 else:
5291 5291 revmatchfn = None
5292 5292 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
5293 5293 if displayer.flush(ctx):
5294 5294 count += 1
5295 5295
5296 5296 displayer.close()
5297 5297
5298 5298 @command('manifest',
5299 5299 [('r', 'rev', '', _('revision to display'), _('REV')),
5300 5300 ('', 'all', False, _("list files from all revisions"))]
5301 5301 + formatteropts,
5302 5302 _('[-r REV]'))
5303 5303 def manifest(ui, repo, node=None, rev=None, **opts):
5304 5304 """output the current or given revision of the project manifest
5305 5305
5306 5306 Print a list of version controlled files for the given revision.
5307 5307 If no revision is given, the first parent of the working directory
5308 5308 is used, or the null revision if no revision is checked out.
5309 5309
5310 5310 With -v, print file permissions, symlink and executable bits.
5311 5311 With --debug, print file revision hashes.
5312 5312
5313 5313 If option --all is specified, the list of all files from all revisions
5314 5314 is printed. This includes deleted and renamed files.
5315 5315
5316 5316 Returns 0 on success.
5317 5317 """
5318 5318
5319 5319 fm = ui.formatter('manifest', opts)
5320 5320
5321 5321 if opts.get('all'):
5322 5322 if rev or node:
5323 5323 raise error.Abort(_("can't specify a revision with --all"))
5324 5324
5325 5325 res = []
5326 5326 prefix = "data/"
5327 5327 suffix = ".i"
5328 5328 plen = len(prefix)
5329 5329 slen = len(suffix)
5330 5330 with repo.lock():
5331 5331 for fn, b, size in repo.store.datafiles():
5332 5332 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
5333 5333 res.append(fn[plen:-slen])
5334 5334 for f in res:
5335 5335 fm.startitem()
5336 5336 fm.write("path", '%s\n', f)
5337 5337 fm.end()
5338 5338 return
5339 5339
5340 5340 if rev and node:
5341 5341 raise error.Abort(_("please specify just one revision"))
5342 5342
5343 5343 if not node:
5344 5344 node = rev
5345 5345
5346 5346 char = {'l': '@', 'x': '*', '': ''}
5347 5347 mode = {'l': '644', 'x': '755', '': '644'}
5348 5348 ctx = scmutil.revsingle(repo, node)
5349 5349 mf = ctx.manifest()
5350 5350 for f in ctx:
5351 5351 fm.startitem()
5352 5352 fl = ctx[f].flags()
5353 5353 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
5354 5354 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
5355 5355 fm.write('path', '%s\n', f)
5356 5356 fm.end()
5357 5357
5358 5358 @command('^merge',
5359 5359 [('f', 'force', None,
5360 5360 _('force a merge including outstanding changes (DEPRECATED)')),
5361 5361 ('r', 'rev', '', _('revision to merge'), _('REV')),
5362 5362 ('P', 'preview', None,
5363 5363 _('review revisions to merge (no merge is performed)'))
5364 5364 ] + mergetoolopts,
5365 5365 _('[-P] [[-r] REV]'))
5366 5366 def merge(ui, repo, node=None, **opts):
5367 5367 """merge another revision into working directory
5368 5368
5369 5369 The current working directory is updated with all changes made in
5370 5370 the requested revision since the last common predecessor revision.
5371 5371
5372 5372 Files that changed between either parent are marked as changed for
5373 5373 the next commit and a commit must be performed before any further
5374 5374 updates to the repository are allowed. The next commit will have
5375 5375 two parents.
5376 5376
5377 5377 ``--tool`` can be used to specify the merge tool used for file
5378 5378 merges. It overrides the HGMERGE environment variable and your
5379 5379 configuration files. See :hg:`help merge-tools` for options.
5380 5380
5381 5381 If no revision is specified, the working directory's parent is a
5382 5382 head revision, and the current branch contains exactly one other
5383 5383 head, the other head is merged with by default. Otherwise, an
5384 5384 explicit revision with which to merge with must be provided.
5385 5385
5386 5386 See :hg:`help resolve` for information on handling file conflicts.
5387 5387
5388 5388 To undo an uncommitted merge, use :hg:`update --clean .` which
5389 5389 will check out a clean copy of the original merge parent, losing
5390 5390 all changes.
5391 5391
5392 5392 Returns 0 on success, 1 if there are unresolved files.
5393 5393 """
5394 5394
5395 5395 if opts.get('rev') and node:
5396 5396 raise error.Abort(_("please specify just one revision"))
5397 5397 if not node:
5398 5398 node = opts.get('rev')
5399 5399
5400 5400 if node:
5401 5401 node = scmutil.revsingle(repo, node).node()
5402 5402
5403 5403 if not node:
5404 5404 node = repo[destutil.destmerge(repo)].node()
5405 5405
5406 5406 if opts.get('preview'):
5407 5407 # find nodes that are ancestors of p2 but not of p1
5408 5408 p1 = repo.lookup('.')
5409 5409 p2 = repo.lookup(node)
5410 5410 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
5411 5411
5412 5412 displayer = cmdutil.show_changeset(ui, repo, opts)
5413 5413 for node in nodes:
5414 5414 displayer.show(repo[node])
5415 5415 displayer.close()
5416 5416 return 0
5417 5417
5418 5418 try:
5419 5419 # ui.forcemerge is an internal variable, do not document
5420 5420 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
5421 5421 force = opts.get('force')
5422 5422 return hg.merge(repo, node, force=force, mergeforce=force)
5423 5423 finally:
5424 5424 ui.setconfig('ui', 'forcemerge', '', 'merge')
5425 5425
5426 5426 @command('outgoing|out',
5427 5427 [('f', 'force', None, _('run even when the destination is unrelated')),
5428 5428 ('r', 'rev', [],
5429 5429 _('a changeset intended to be included in the destination'), _('REV')),
5430 5430 ('n', 'newest-first', None, _('show newest record first')),
5431 5431 ('B', 'bookmarks', False, _('compare bookmarks')),
5432 5432 ('b', 'branch', [], _('a specific branch you would like to push'),
5433 5433 _('BRANCH')),
5434 5434 ] + logopts + remoteopts + subrepoopts,
5435 5435 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
5436 5436 def outgoing(ui, repo, dest=None, **opts):
5437 5437 """show changesets not found in the destination
5438 5438
5439 5439 Show changesets not found in the specified destination repository
5440 5440 or the default push location. These are the changesets that would
5441 5441 be pushed if a push was requested.
5442 5442
5443 5443 See pull for details of valid destination formats.
5444 5444
5445 5445 .. container:: verbose
5446 5446
5447 5447 With -B/--bookmarks, the result of bookmark comparison between
5448 5448 local and remote repositories is displayed. With -v/--verbose,
5449 5449 status is also displayed for each bookmark like below::
5450 5450
5451 5451 BM1 01234567890a added
5452 5452 BM2 deleted
5453 5453 BM3 234567890abc advanced
5454 5454 BM4 34567890abcd diverged
5455 5455 BM5 4567890abcde changed
5456 5456
5457 5457 The action taken when pushing depends on the
5458 5458 status of each bookmark:
5459 5459
5460 5460 :``added``: push with ``-B`` will create it
5461 5461 :``deleted``: push with ``-B`` will delete it
5462 5462 :``advanced``: push will update it
5463 5463 :``diverged``: push with ``-B`` will update it
5464 5464 :``changed``: push with ``-B`` will update it
5465 5465
5466 5466 From the point of view of pushing behavior, bookmarks
5467 5467 existing only in the remote repository are treated as
5468 5468 ``deleted``, even if it is in fact added remotely.
5469 5469
5470 5470 Returns 0 if there are outgoing changes, 1 otherwise.
5471 5471 """
5472 5472 if opts.get('graph'):
5473 5473 cmdutil.checkunsupportedgraphflags([], opts)
5474 5474 o, other = hg._outgoing(ui, repo, dest, opts)
5475 5475 if not o:
5476 5476 cmdutil.outgoinghooks(ui, repo, other, opts, o)
5477 5477 return
5478 5478
5479 5479 revdag = cmdutil.graphrevs(repo, o, opts)
5480 5480 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
5481 5481 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
5482 5482 cmdutil.outgoinghooks(ui, repo, other, opts, o)
5483 5483 return 0
5484 5484
5485 5485 if opts.get('bookmarks'):
5486 5486 dest = ui.expandpath(dest or 'default-push', dest or 'default')
5487 5487 dest, branches = hg.parseurl(dest, opts.get('branch'))
5488 5488 other = hg.peer(repo, opts, dest)
5489 5489 if 'bookmarks' not in other.listkeys('namespaces'):
5490 5490 ui.warn(_("remote doesn't support bookmarks\n"))
5491 5491 return 0
5492 5492 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
5493 5493 return bookmarks.outgoing(ui, repo, other)
5494 5494
5495 5495 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
5496 5496 try:
5497 5497 return hg.outgoing(ui, repo, dest, opts)
5498 5498 finally:
5499 5499 del repo._subtoppath
5500 5500
5501 5501 @command('parents',
5502 5502 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
5503 5503 ] + templateopts,
5504 5504 _('[-r REV] [FILE]'),
5505 5505 inferrepo=True)
5506 5506 def parents(ui, repo, file_=None, **opts):
5507 5507 """show the parents of the working directory or revision (DEPRECATED)
5508 5508
5509 5509 Print the working directory's parent revisions. If a revision is
5510 5510 given via -r/--rev, the parent of that revision will be printed.
5511 5511 If a file argument is given, the revision in which the file was
5512 5512 last changed (before the working directory revision or the
5513 5513 argument to --rev if given) is printed.
5514 5514
5515 5515 This command is equivalent to::
5516 5516
5517 5517 hg log -r "p1()+p2()" or
5518 5518 hg log -r "p1(REV)+p2(REV)" or
5519 5519 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
5520 5520 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
5521 5521
5522 5522 See :hg:`summary` and :hg:`help revsets` for related information.
5523 5523
5524 5524 Returns 0 on success.
5525 5525 """
5526 5526
5527 5527 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
5528 5528
5529 5529 if file_:
5530 5530 m = scmutil.match(ctx, (file_,), opts)
5531 5531 if m.anypats() or len(m.files()) != 1:
5532 5532 raise error.Abort(_('can only specify an explicit filename'))
5533 5533 file_ = m.files()[0]
5534 5534 filenodes = []
5535 5535 for cp in ctx.parents():
5536 5536 if not cp:
5537 5537 continue
5538 5538 try:
5539 5539 filenodes.append(cp.filenode(file_))
5540 5540 except error.LookupError:
5541 5541 pass
5542 5542 if not filenodes:
5543 5543 raise error.Abort(_("'%s' not found in manifest!") % file_)
5544 5544 p = []
5545 5545 for fn in filenodes:
5546 5546 fctx = repo.filectx(file_, fileid=fn)
5547 5547 p.append(fctx.node())
5548 5548 else:
5549 5549 p = [cp.node() for cp in ctx.parents()]
5550 5550
5551 5551 displayer = cmdutil.show_changeset(ui, repo, opts)
5552 5552 for n in p:
5553 5553 if n != nullid:
5554 5554 displayer.show(repo[n])
5555 5555 displayer.close()
5556 5556
5557 5557 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
5558 5558 def paths(ui, repo, search=None, **opts):
5559 5559 """show aliases for remote repositories
5560 5560
5561 5561 Show definition of symbolic path name NAME. If no name is given,
5562 5562 show definition of all available names.
5563 5563
5564 5564 Option -q/--quiet suppresses all output when searching for NAME
5565 5565 and shows only the path names when listing all definitions.
5566 5566
5567 5567 Path names are defined in the [paths] section of your
5568 5568 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
5569 5569 repository, ``.hg/hgrc`` is used, too.
5570 5570
5571 5571 The path names ``default`` and ``default-push`` have a special
5572 5572 meaning. When performing a push or pull operation, they are used
5573 5573 as fallbacks if no location is specified on the command-line.
5574 5574 When ``default-push`` is set, it will be used for push and
5575 5575 ``default`` will be used for pull; otherwise ``default`` is used
5576 5576 as the fallback for both. When cloning a repository, the clone
5577 5577 source is written as ``default`` in ``.hg/hgrc``.
5578 5578
5579 5579 .. note::
5580 5580
5581 5581 ``default`` and ``default-push`` apply to all inbound (e.g.
5582 5582 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
5583 5583 and :hg:`bundle`) operations.
5584 5584
5585 5585 See :hg:`help urls` for more information.
5586 5586
5587 5587 Returns 0 on success.
5588 5588 """
5589 5589 if search:
5590 5590 pathitems = [(name, path) for name, path in ui.paths.iteritems()
5591 5591 if name == search]
5592 5592 else:
5593 5593 pathitems = sorted(ui.paths.iteritems())
5594 5594
5595 5595 fm = ui.formatter('paths', opts)
5596 5596 if fm:
5597 5597 hidepassword = str
5598 5598 else:
5599 5599 hidepassword = util.hidepassword
5600 5600 if ui.quiet:
5601 5601 namefmt = '%s\n'
5602 5602 else:
5603 5603 namefmt = '%s = '
5604 5604 showsubopts = not search and not ui.quiet
5605 5605
5606 5606 for name, path in pathitems:
5607 5607 fm.startitem()
5608 5608 fm.condwrite(not search, 'name', namefmt, name)
5609 5609 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
5610 5610 for subopt, value in sorted(path.suboptions.items()):
5611 5611 assert subopt not in ('name', 'url')
5612 5612 if showsubopts:
5613 5613 fm.plain('%s:%s = ' % (name, subopt))
5614 5614 fm.condwrite(showsubopts, subopt, '%s\n', value)
5615 5615
5616 5616 fm.end()
5617 5617
5618 5618 if search and not pathitems:
5619 5619 if not ui.quiet:
5620 5620 ui.warn(_("not found!\n"))
5621 5621 return 1
5622 5622 else:
5623 5623 return 0
5624 5624
5625 5625 @command('phase',
5626 5626 [('p', 'public', False, _('set changeset phase to public')),
5627 5627 ('d', 'draft', False, _('set changeset phase to draft')),
5628 5628 ('s', 'secret', False, _('set changeset phase to secret')),
5629 5629 ('f', 'force', False, _('allow to move boundary backward')),
5630 5630 ('r', 'rev', [], _('target revision'), _('REV')),
5631 5631 ],
5632 5632 _('[-p|-d|-s] [-f] [-r] [REV...]'))
5633 5633 def phase(ui, repo, *revs, **opts):
5634 5634 """set or show the current phase name
5635 5635
5636 5636 With no argument, show the phase name of the current revision(s).
5637 5637
5638 5638 With one of -p/--public, -d/--draft or -s/--secret, change the
5639 5639 phase value of the specified revisions.
5640 5640
5641 5641 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
5642 5642 lower phase to an higher phase. Phases are ordered as follows::
5643 5643
5644 5644 public < draft < secret
5645 5645
5646 5646 Returns 0 on success, 1 if some phases could not be changed.
5647 5647
5648 5648 (For more information about the phases concept, see :hg:`help phases`.)
5649 5649 """
5650 5650 # search for a unique phase argument
5651 5651 targetphase = None
5652 5652 for idx, name in enumerate(phases.phasenames):
5653 5653 if opts[name]:
5654 5654 if targetphase is not None:
5655 5655 raise error.Abort(_('only one phase can be specified'))
5656 5656 targetphase = idx
5657 5657
5658 5658 # look for specified revision
5659 5659 revs = list(revs)
5660 5660 revs.extend(opts['rev'])
5661 5661 if not revs:
5662 5662 # display both parents as the second parent phase can influence
5663 5663 # the phase of a merge commit
5664 5664 revs = [c.rev() for c in repo[None].parents()]
5665 5665
5666 5666 revs = scmutil.revrange(repo, revs)
5667 5667
5668 5668 lock = None
5669 5669 ret = 0
5670 5670 if targetphase is None:
5671 5671 # display
5672 5672 for r in revs:
5673 5673 ctx = repo[r]
5674 5674 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
5675 5675 else:
5676 5676 tr = None
5677 5677 lock = repo.lock()
5678 5678 try:
5679 5679 tr = repo.transaction("phase")
5680 5680 # set phase
5681 5681 if not revs:
5682 5682 raise error.Abort(_('empty revision set'))
5683 5683 nodes = [repo[r].node() for r in revs]
5684 5684 # moving revision from public to draft may hide them
5685 5685 # We have to check result on an unfiltered repository
5686 5686 unfi = repo.unfiltered()
5687 5687 getphase = unfi._phasecache.phase
5688 5688 olddata = [getphase(unfi, r) for r in unfi]
5689 5689 phases.advanceboundary(repo, tr, targetphase, nodes)
5690 5690 if opts['force']:
5691 5691 phases.retractboundary(repo, tr, targetphase, nodes)
5692 5692 tr.close()
5693 5693 finally:
5694 5694 if tr is not None:
5695 5695 tr.release()
5696 5696 lock.release()
5697 5697 getphase = unfi._phasecache.phase
5698 5698 newdata = [getphase(unfi, r) for r in unfi]
5699 5699 changes = sum(newdata[r] != olddata[r] for r in unfi)
5700 5700 cl = unfi.changelog
5701 5701 rejected = [n for n in nodes
5702 5702 if newdata[cl.rev(n)] < targetphase]
5703 5703 if rejected:
5704 5704 ui.warn(_('cannot move %i changesets to a higher '
5705 5705 'phase, use --force\n') % len(rejected))
5706 5706 ret = 1
5707 5707 if changes:
5708 5708 msg = _('phase changed for %i changesets\n') % changes
5709 5709 if ret:
5710 5710 ui.status(msg)
5711 5711 else:
5712 5712 ui.note(msg)
5713 5713 else:
5714 5714 ui.warn(_('no phases changed\n'))
5715 5715 return ret
5716 5716
5717 5717 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
5718 5718 """Run after a changegroup has been added via pull/unbundle
5719 5719
5720 5720 This takes arguments below:
5721 5721
5722 5722 :modheads: change of heads by pull/unbundle
5723 5723 :optupdate: updating working directory is needed or not
5724 5724 :checkout: update destination revision (or None to default destination)
5725 5725 :brev: a name, which might be a bookmark to be activated after updating
5726 5726 """
5727 5727 if modheads == 0:
5728 5728 return
5729 5729 if optupdate:
5730 5730 try:
5731 5731 return hg.updatetotally(ui, repo, checkout, brev)
5732 5732 except error.UpdateAbort as inst:
5733 5733 msg = _("not updating: %s") % str(inst)
5734 5734 hint = inst.hint
5735 5735 raise error.UpdateAbort(msg, hint=hint)
5736 5736 if modheads > 1:
5737 5737 currentbranchheads = len(repo.branchheads())
5738 5738 if currentbranchheads == modheads:
5739 5739 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
5740 5740 elif currentbranchheads > 1:
5741 5741 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
5742 5742 "merge)\n"))
5743 5743 else:
5744 5744 ui.status(_("(run 'hg heads' to see heads)\n"))
5745 5745 else:
5746 5746 ui.status(_("(run 'hg update' to get a working copy)\n"))
5747 5747
5748 5748 @command('^pull',
5749 5749 [('u', 'update', None,
5750 5750 _('update to new branch head if changesets were pulled')),
5751 5751 ('f', 'force', None, _('run even when remote repository is unrelated')),
5752 5752 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
5753 5753 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
5754 5754 ('b', 'branch', [], _('a specific branch you would like to pull'),
5755 5755 _('BRANCH')),
5756 5756 ] + remoteopts,
5757 5757 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
5758 5758 def pull(ui, repo, source="default", **opts):
5759 5759 """pull changes from the specified source
5760 5760
5761 5761 Pull changes from a remote repository to a local one.
5762 5762
5763 5763 This finds all changes from the repository at the specified path
5764 5764 or URL and adds them to a local repository (the current one unless
5765 5765 -R is specified). By default, this does not update the copy of the
5766 5766 project in the working directory.
5767 5767
5768 5768 Use :hg:`incoming` if you want to see what would have been added
5769 5769 by a pull at the time you issued this command. If you then decide
5770 5770 to add those changes to the repository, you should use :hg:`pull
5771 5771 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5772 5772
5773 5773 If SOURCE is omitted, the 'default' path will be used.
5774 5774 See :hg:`help urls` for more information.
5775 5775
5776 5776 Returns 0 on success, 1 if an update had unresolved files.
5777 5777 """
5778 5778 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
5779 5779 ui.status(_('pulling from %s\n') % util.hidepassword(source))
5780 5780 other = hg.peer(repo, opts, source)
5781 5781 try:
5782 5782 revs, checkout = hg.addbranchrevs(repo, other, branches,
5783 5783 opts.get('rev'))
5784 5784
5785 5785
5786 5786 pullopargs = {}
5787 5787 if opts.get('bookmark'):
5788 5788 if not revs:
5789 5789 revs = []
5790 5790 # The list of bookmark used here is not the one used to actually
5791 5791 # update the bookmark name. This can result in the revision pulled
5792 5792 # not ending up with the name of the bookmark because of a race
5793 5793 # condition on the server. (See issue 4689 for details)
5794 5794 remotebookmarks = other.listkeys('bookmarks')
5795 5795 pullopargs['remotebookmarks'] = remotebookmarks
5796 5796 for b in opts['bookmark']:
5797 5797 if b not in remotebookmarks:
5798 5798 raise error.Abort(_('remote bookmark %s not found!') % b)
5799 5799 revs.append(remotebookmarks[b])
5800 5800
5801 5801 if revs:
5802 5802 try:
5803 5803 # When 'rev' is a bookmark name, we cannot guarantee that it
5804 5804 # will be updated with that name because of a race condition
5805 5805 # server side. (See issue 4689 for details)
5806 5806 oldrevs = revs
5807 5807 revs = [] # actually, nodes
5808 5808 for r in oldrevs:
5809 5809 node = other.lookup(r)
5810 5810 revs.append(node)
5811 5811 if r == checkout:
5812 5812 checkout = node
5813 5813 except error.CapabilityError:
5814 5814 err = _("other repository doesn't support revision lookup, "
5815 5815 "so a rev cannot be specified.")
5816 5816 raise error.Abort(err)
5817 5817
5818 5818 pullopargs.update(opts.get('opargs', {}))
5819 5819 modheads = exchange.pull(repo, other, heads=revs,
5820 5820 force=opts.get('force'),
5821 5821 bookmarks=opts.get('bookmark', ()),
5822 5822 opargs=pullopargs).cgresult
5823 5823
5824 5824 # brev is a name, which might be a bookmark to be activated at
5825 5825 # the end of the update. In other words, it is an explicit
5826 5826 # destination of the update
5827 5827 brev = None
5828 5828
5829 5829 if checkout:
5830 5830 checkout = str(repo.changelog.rev(checkout))
5831 5831
5832 5832 # order below depends on implementation of
5833 5833 # hg.addbranchrevs(). opts['bookmark'] is ignored,
5834 5834 # because 'checkout' is determined without it.
5835 5835 if opts.get('rev'):
5836 5836 brev = opts['rev'][0]
5837 5837 elif opts.get('branch'):
5838 5838 brev = opts['branch'][0]
5839 5839 else:
5840 5840 brev = branches[0]
5841 5841 repo._subtoppath = source
5842 5842 try:
5843 5843 ret = postincoming(ui, repo, modheads, opts.get('update'),
5844 5844 checkout, brev)
5845 5845
5846 5846 finally:
5847 5847 del repo._subtoppath
5848 5848
5849 5849 finally:
5850 5850 other.close()
5851 5851 return ret
5852 5852
5853 5853 @command('^push',
5854 5854 [('f', 'force', None, _('force push')),
5855 5855 ('r', 'rev', [],
5856 5856 _('a changeset intended to be included in the destination'),
5857 5857 _('REV')),
5858 5858 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
5859 5859 ('b', 'branch', [],
5860 5860 _('a specific branch you would like to push'), _('BRANCH')),
5861 5861 ('', 'new-branch', False, _('allow pushing a new branch')),
5862 5862 ] + remoteopts,
5863 5863 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
5864 5864 def push(ui, repo, dest=None, **opts):
5865 5865 """push changes to the specified destination
5866 5866
5867 5867 Push changesets from the local repository to the specified
5868 5868 destination.
5869 5869
5870 5870 This operation is symmetrical to pull: it is identical to a pull
5871 5871 in the destination repository from the current one.
5872 5872
5873 5873 By default, push will not allow creation of new heads at the
5874 5874 destination, since multiple heads would make it unclear which head
5875 5875 to use. In this situation, it is recommended to pull and merge
5876 5876 before pushing.
5877 5877
5878 5878 Use --new-branch if you want to allow push to create a new named
5879 5879 branch that is not present at the destination. This allows you to
5880 5880 only create a new branch without forcing other changes.
5881 5881
5882 5882 .. note::
5883 5883
5884 5884 Extra care should be taken with the -f/--force option,
5885 5885 which will push all new heads on all branches, an action which will
5886 5886 almost always cause confusion for collaborators.
5887 5887
5888 5888 If -r/--rev is used, the specified revision and all its ancestors
5889 5889 will be pushed to the remote repository.
5890 5890
5891 5891 If -B/--bookmark is used, the specified bookmarked revision, its
5892 5892 ancestors, and the bookmark will be pushed to the remote
5893 5893 repository. Specifying ``.`` is equivalent to specifying the active
5894 5894 bookmark's name.
5895 5895
5896 5896 Please see :hg:`help urls` for important details about ``ssh://``
5897 5897 URLs. If DESTINATION is omitted, a default path will be used.
5898 5898
5899 5899 Returns 0 if push was successful, 1 if nothing to push.
5900 5900 """
5901 5901
5902 5902 if opts.get('bookmark'):
5903 5903 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
5904 5904 for b in opts['bookmark']:
5905 5905 # translate -B options to -r so changesets get pushed
5906 5906 b = repo._bookmarks.expandname(b)
5907 5907 if b in repo._bookmarks:
5908 5908 opts.setdefault('rev', []).append(b)
5909 5909 else:
5910 5910 # if we try to push a deleted bookmark, translate it to null
5911 5911 # this lets simultaneous -r, -b options continue working
5912 5912 opts.setdefault('rev', []).append("null")
5913 5913
5914 5914 path = ui.paths.getpath(dest, default=('default-push', 'default'))
5915 5915 if not path:
5916 5916 raise error.Abort(_('default repository not configured!'),
5917 5917 hint=_('see the "path" section in "hg help config"'))
5918 5918 dest = path.pushloc or path.loc
5919 5919 branches = (path.branch, opts.get('branch') or [])
5920 5920 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
5921 5921 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
5922 5922 other = hg.peer(repo, opts, dest)
5923 5923
5924 5924 if revs:
5925 5925 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
5926 5926 if not revs:
5927 5927 raise error.Abort(_("specified revisions evaluate to an empty set"),
5928 5928 hint=_("use different revision arguments"))
5929 5929
5930 5930 repo._subtoppath = dest
5931 5931 try:
5932 5932 # push subrepos depth-first for coherent ordering
5933 5933 c = repo['']
5934 5934 subs = c.substate # only repos that are committed
5935 5935 for s in sorted(subs):
5936 5936 result = c.sub(s).push(opts)
5937 5937 if result == 0:
5938 5938 return not result
5939 5939 finally:
5940 5940 del repo._subtoppath
5941 5941 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
5942 5942 newbranch=opts.get('new_branch'),
5943 5943 bookmarks=opts.get('bookmark', ()),
5944 5944 opargs=opts.get('opargs'))
5945 5945
5946 5946 result = not pushop.cgresult
5947 5947
5948 5948 if pushop.bkresult is not None:
5949 5949 if pushop.bkresult == 2:
5950 5950 result = 2
5951 5951 elif not result and pushop.bkresult:
5952 5952 result = 2
5953 5953
5954 5954 return result
5955 5955
5956 5956 @command('recover', [])
5957 5957 def recover(ui, repo):
5958 5958 """roll back an interrupted transaction
5959 5959
5960 5960 Recover from an interrupted commit or pull.
5961 5961
5962 5962 This command tries to fix the repository status after an
5963 5963 interrupted operation. It should only be necessary when Mercurial
5964 5964 suggests it.
5965 5965
5966 5966 Returns 0 if successful, 1 if nothing to recover or verify fails.
5967 5967 """
5968 5968 if repo.recover():
5969 5969 return hg.verify(repo)
5970 5970 return 1
5971 5971
5972 5972 @command('^remove|rm',
5973 5973 [('A', 'after', None, _('record delete for missing files')),
5974 5974 ('f', 'force', None,
5975 5975 _('forget added files, delete modified files')),
5976 5976 ] + subrepoopts + walkopts,
5977 5977 _('[OPTION]... FILE...'),
5978 5978 inferrepo=True)
5979 5979 def remove(ui, repo, *pats, **opts):
5980 5980 """remove the specified files on the next commit
5981 5981
5982 5982 Schedule the indicated files for removal from the current branch.
5983 5983
5984 5984 This command schedules the files to be removed at the next commit.
5985 5985 To undo a remove before that, see :hg:`revert`. To undo added
5986 5986 files, see :hg:`forget`.
5987 5987
5988 5988 .. container:: verbose
5989 5989
5990 5990 -A/--after can be used to remove only files that have already
5991 5991 been deleted, -f/--force can be used to force deletion, and -Af
5992 5992 can be used to remove files from the next revision without
5993 5993 deleting them from the working directory.
5994 5994
5995 5995 The following table details the behavior of remove for different
5996 5996 file states (columns) and option combinations (rows). The file
5997 5997 states are Added [A], Clean [C], Modified [M] and Missing [!]
5998 5998 (as reported by :hg:`status`). The actions are Warn, Remove
5999 5999 (from branch) and Delete (from disk):
6000 6000
6001 6001 ========= == == == ==
6002 6002 opt/state A C M !
6003 6003 ========= == == == ==
6004 6004 none W RD W R
6005 6005 -f R RD RD R
6006 6006 -A W W W R
6007 6007 -Af R R R R
6008 6008 ========= == == == ==
6009 6009
6010 6010 .. note::
6011 6011
6012 6012 :hg:`remove` never deletes files in Added [A] state from the
6013 6013 working directory, not even if ``--force`` is specified.
6014 6014
6015 6015 Returns 0 on success, 1 if any warnings encountered.
6016 6016 """
6017 6017
6018 6018 after, force = opts.get('after'), opts.get('force')
6019 6019 if not pats and not after:
6020 6020 raise error.Abort(_('no files specified'))
6021 6021
6022 6022 m = scmutil.match(repo[None], pats, opts)
6023 6023 subrepos = opts.get('subrepos')
6024 6024 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
6025 6025
6026 6026 @command('rename|move|mv',
6027 6027 [('A', 'after', None, _('record a rename that has already occurred')),
6028 6028 ('f', 'force', None, _('forcibly copy over an existing managed file')),
6029 6029 ] + walkopts + dryrunopts,
6030 6030 _('[OPTION]... SOURCE... DEST'))
6031 6031 def rename(ui, repo, *pats, **opts):
6032 6032 """rename files; equivalent of copy + remove
6033 6033
6034 6034 Mark dest as copies of sources; mark sources for deletion. If dest
6035 6035 is a directory, copies are put in that directory. If dest is a
6036 6036 file, there can only be one source.
6037 6037
6038 6038 By default, this command copies the contents of files as they
6039 6039 exist in the working directory. If invoked with -A/--after, the
6040 6040 operation is recorded, but no copying is performed.
6041 6041
6042 6042 This command takes effect at the next commit. To undo a rename
6043 6043 before that, see :hg:`revert`.
6044 6044
6045 6045 Returns 0 on success, 1 if errors are encountered.
6046 6046 """
6047 6047 with repo.wlock(False):
6048 6048 return cmdutil.copy(ui, repo, pats, opts, rename=True)
6049 6049
6050 6050 @command('resolve',
6051 6051 [('a', 'all', None, _('select all unresolved files')),
6052 6052 ('l', 'list', None, _('list state of files needing merge')),
6053 6053 ('m', 'mark', None, _('mark files as resolved')),
6054 6054 ('u', 'unmark', None, _('mark files as unresolved')),
6055 6055 ('n', 'no-status', None, _('hide status prefix'))]
6056 6056 + mergetoolopts + walkopts + formatteropts,
6057 6057 _('[OPTION]... [FILE]...'),
6058 6058 inferrepo=True)
6059 6059 def resolve(ui, repo, *pats, **opts):
6060 6060 """redo merges or set/view the merge status of files
6061 6061
6062 6062 Merges with unresolved conflicts are often the result of
6063 6063 non-interactive merging using the ``internal:merge`` configuration
6064 6064 setting, or a command-line merge tool like ``diff3``. The resolve
6065 6065 command is used to manage the files involved in a merge, after
6066 6066 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
6067 6067 working directory must have two parents). See :hg:`help
6068 6068 merge-tools` for information on configuring merge tools.
6069 6069
6070 6070 The resolve command can be used in the following ways:
6071 6071
6072 6072 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
6073 6073 files, discarding any previous merge attempts. Re-merging is not
6074 6074 performed for files already marked as resolved. Use ``--all/-a``
6075 6075 to select all unresolved files. ``--tool`` can be used to specify
6076 6076 the merge tool used for the given files. It overrides the HGMERGE
6077 6077 environment variable and your configuration files. Previous file
6078 6078 contents are saved with a ``.orig`` suffix.
6079 6079
6080 6080 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
6081 6081 (e.g. after having manually fixed-up the files). The default is
6082 6082 to mark all unresolved files.
6083 6083
6084 6084 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
6085 6085 default is to mark all resolved files.
6086 6086
6087 6087 - :hg:`resolve -l`: list files which had or still have conflicts.
6088 6088 In the printed list, ``U`` = unresolved and ``R`` = resolved.
6089 6089
6090 6090 .. note::
6091 6091
6092 6092 Mercurial will not let you commit files with unresolved merge
6093 6093 conflicts. You must use :hg:`resolve -m ...` before you can
6094 6094 commit after a conflicting merge.
6095 6095
6096 6096 Returns 0 on success, 1 if any files fail a resolve attempt.
6097 6097 """
6098 6098
6099 6099 flaglist = 'all mark unmark list no_status'.split()
6100 6100 all, mark, unmark, show, nostatus = \
6101 6101 [opts.get(o) for o in flaglist]
6102 6102
6103 6103 if (show and (mark or unmark)) or (mark and unmark):
6104 6104 raise error.Abort(_("too many options specified"))
6105 6105 if pats and all:
6106 6106 raise error.Abort(_("can't specify --all and patterns"))
6107 6107 if not (all or pats or show or mark or unmark):
6108 6108 raise error.Abort(_('no files or directories specified'),
6109 6109 hint=('use --all to re-merge all unresolved files'))
6110 6110
6111 6111 if show:
6112 6112 fm = ui.formatter('resolve', opts)
6113 6113 ms = mergemod.mergestate.read(repo)
6114 6114 m = scmutil.match(repo[None], pats, opts)
6115 6115 for f in ms:
6116 6116 if not m(f):
6117 6117 continue
6118 6118 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
6119 6119 'd': 'driverresolved'}[ms[f]]
6120 6120 fm.startitem()
6121 6121 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
6122 6122 fm.write('path', '%s\n', f, label=l)
6123 6123 fm.end()
6124 6124 return 0
6125 6125
6126 6126 with repo.wlock():
6127 6127 ms = mergemod.mergestate.read(repo)
6128 6128
6129 6129 if not (ms.active() or repo.dirstate.p2() != nullid):
6130 6130 raise error.Abort(
6131 6131 _('resolve command not applicable when not merging'))
6132 6132
6133 6133 wctx = repo[None]
6134 6134
6135 6135 if ms.mergedriver and ms.mdstate() == 'u':
6136 6136 proceed = mergemod.driverpreprocess(repo, ms, wctx)
6137 6137 ms.commit()
6138 6138 # allow mark and unmark to go through
6139 6139 if not mark and not unmark and not proceed:
6140 6140 return 1
6141 6141
6142 6142 m = scmutil.match(wctx, pats, opts)
6143 6143 ret = 0
6144 6144 didwork = False
6145 6145 runconclude = False
6146 6146
6147 6147 tocomplete = []
6148 6148 for f in ms:
6149 6149 if not m(f):
6150 6150 continue
6151 6151
6152 6152 didwork = True
6153 6153
6154 6154 # don't let driver-resolved files be marked, and run the conclude
6155 6155 # step if asked to resolve
6156 6156 if ms[f] == "d":
6157 6157 exact = m.exact(f)
6158 6158 if mark:
6159 6159 if exact:
6160 6160 ui.warn(_('not marking %s as it is driver-resolved\n')
6161 6161 % f)
6162 6162 elif unmark:
6163 6163 if exact:
6164 6164 ui.warn(_('not unmarking %s as it is driver-resolved\n')
6165 6165 % f)
6166 6166 else:
6167 6167 runconclude = True
6168 6168 continue
6169 6169
6170 6170 if mark:
6171 6171 ms.mark(f, "r")
6172 6172 elif unmark:
6173 6173 ms.mark(f, "u")
6174 6174 else:
6175 6175 # backup pre-resolve (merge uses .orig for its own purposes)
6176 6176 a = repo.wjoin(f)
6177 6177 try:
6178 6178 util.copyfile(a, a + ".resolve")
6179 6179 except (IOError, OSError) as inst:
6180 6180 if inst.errno != errno.ENOENT:
6181 6181 raise
6182 6182
6183 6183 try:
6184 6184 # preresolve file
6185 6185 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
6186 6186 'resolve')
6187 6187 complete, r = ms.preresolve(f, wctx)
6188 6188 if not complete:
6189 6189 tocomplete.append(f)
6190 6190 elif r:
6191 6191 ret = 1
6192 6192 finally:
6193 6193 ui.setconfig('ui', 'forcemerge', '', 'resolve')
6194 6194 ms.commit()
6195 6195
6196 6196 # replace filemerge's .orig file with our resolve file, but only
6197 6197 # for merges that are complete
6198 6198 if complete:
6199 6199 try:
6200 6200 util.rename(a + ".resolve",
6201 6201 scmutil.origpath(ui, repo, a))
6202 6202 except OSError as inst:
6203 6203 if inst.errno != errno.ENOENT:
6204 6204 raise
6205 6205
6206 6206 for f in tocomplete:
6207 6207 try:
6208 6208 # resolve file
6209 6209 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
6210 6210 'resolve')
6211 6211 r = ms.resolve(f, wctx)
6212 6212 if r:
6213 6213 ret = 1
6214 6214 finally:
6215 6215 ui.setconfig('ui', 'forcemerge', '', 'resolve')
6216 6216 ms.commit()
6217 6217
6218 6218 # replace filemerge's .orig file with our resolve file
6219 6219 a = repo.wjoin(f)
6220 6220 try:
6221 6221 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
6222 6222 except OSError as inst:
6223 6223 if inst.errno != errno.ENOENT:
6224 6224 raise
6225 6225
6226 6226 ms.commit()
6227 6227 ms.recordactions()
6228 6228
6229 6229 if not didwork and pats:
6230 6230 hint = None
6231 6231 if not any([p for p in pats if p.find(':') >= 0]):
6232 6232 pats = ['path:%s' % p for p in pats]
6233 6233 m = scmutil.match(wctx, pats, opts)
6234 6234 for f in ms:
6235 6235 if not m(f):
6236 6236 continue
6237 6237 flags = ''.join(['-%s ' % o[0] for o in flaglist
6238 6238 if opts.get(o)])
6239 6239 hint = _("(try: hg resolve %s%s)\n") % (
6240 6240 flags,
6241 6241 ' '.join(pats))
6242 6242 break
6243 6243 ui.warn(_("arguments do not match paths that need resolving\n"))
6244 6244 if hint:
6245 6245 ui.warn(hint)
6246 6246 elif ms.mergedriver and ms.mdstate() != 's':
6247 6247 # run conclude step when either a driver-resolved file is requested
6248 6248 # or there are no driver-resolved files
6249 6249 # we can't use 'ret' to determine whether any files are unresolved
6250 6250 # because we might not have tried to resolve some
6251 6251 if ((runconclude or not list(ms.driverresolved()))
6252 6252 and not list(ms.unresolved())):
6253 6253 proceed = mergemod.driverconclude(repo, ms, wctx)
6254 6254 ms.commit()
6255 6255 if not proceed:
6256 6256 return 1
6257 6257
6258 6258 # Nudge users into finishing an unfinished operation
6259 6259 unresolvedf = list(ms.unresolved())
6260 6260 driverresolvedf = list(ms.driverresolved())
6261 6261 if not unresolvedf and not driverresolvedf:
6262 6262 ui.status(_('(no more unresolved files)\n'))
6263 6263 cmdutil.checkafterresolved(repo)
6264 6264 elif not unresolvedf:
6265 6265 ui.status(_('(no more unresolved files -- '
6266 6266 'run "hg resolve --all" to conclude)\n'))
6267 6267
6268 6268 return ret
6269 6269
6270 6270 @command('revert',
6271 6271 [('a', 'all', None, _('revert all changes when no arguments given')),
6272 6272 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
6273 6273 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
6274 6274 ('C', 'no-backup', None, _('do not save backup copies of files')),
6275 6275 ('i', 'interactive', None,
6276 6276 _('interactively select the changes (EXPERIMENTAL)')),
6277 6277 ] + walkopts + dryrunopts,
6278 6278 _('[OPTION]... [-r REV] [NAME]...'))
6279 6279 def revert(ui, repo, *pats, **opts):
6280 6280 """restore files to their checkout state
6281 6281
6282 6282 .. note::
6283 6283
6284 6284 To check out earlier revisions, you should use :hg:`update REV`.
6285 6285 To cancel an uncommitted merge (and lose your changes),
6286 6286 use :hg:`update --clean .`.
6287 6287
6288 6288 With no revision specified, revert the specified files or directories
6289 6289 to the contents they had in the parent of the working directory.
6290 6290 This restores the contents of files to an unmodified
6291 6291 state and unschedules adds, removes, copies, and renames. If the
6292 6292 working directory has two parents, you must explicitly specify a
6293 6293 revision.
6294 6294
6295 6295 Using the -r/--rev or -d/--date options, revert the given files or
6296 6296 directories to their states as of a specific revision. Because
6297 6297 revert does not change the working directory parents, this will
6298 6298 cause these files to appear modified. This can be helpful to "back
6299 6299 out" some or all of an earlier change. See :hg:`backout` for a
6300 6300 related method.
6301 6301
6302 6302 Modified files are saved with a .orig suffix before reverting.
6303 6303 To disable these backups, use --no-backup. It is possible to store
6304 6304 the backup files in a custom directory relative to the root of the
6305 6305 repository by setting the ``ui.origbackuppath`` configuration
6306 6306 option.
6307 6307
6308 6308 See :hg:`help dates` for a list of formats valid for -d/--date.
6309 6309
6310 6310 See :hg:`help backout` for a way to reverse the effect of an
6311 6311 earlier changeset.
6312 6312
6313 6313 Returns 0 on success.
6314 6314 """
6315 6315
6316 6316 if opts.get("date"):
6317 6317 if opts.get("rev"):
6318 6318 raise error.Abort(_("you can't specify a revision and a date"))
6319 6319 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
6320 6320
6321 6321 parent, p2 = repo.dirstate.parents()
6322 6322 if not opts.get('rev') and p2 != nullid:
6323 6323 # revert after merge is a trap for new users (issue2915)
6324 6324 raise error.Abort(_('uncommitted merge with no revision specified'),
6325 6325 hint=_("use 'hg update' or see 'hg help revert'"))
6326 6326
6327 6327 ctx = scmutil.revsingle(repo, opts.get('rev'))
6328 6328
6329 6329 if (not (pats or opts.get('include') or opts.get('exclude') or
6330 6330 opts.get('all') or opts.get('interactive'))):
6331 6331 msg = _("no files or directories specified")
6332 6332 if p2 != nullid:
6333 6333 hint = _("uncommitted merge, use --all to discard all changes,"
6334 6334 " or 'hg update -C .' to abort the merge")
6335 6335 raise error.Abort(msg, hint=hint)
6336 6336 dirty = any(repo.status())
6337 6337 node = ctx.node()
6338 6338 if node != parent:
6339 6339 if dirty:
6340 6340 hint = _("uncommitted changes, use --all to discard all"
6341 6341 " changes, or 'hg update %s' to update") % ctx.rev()
6342 6342 else:
6343 6343 hint = _("use --all to revert all files,"
6344 6344 " or 'hg update %s' to update") % ctx.rev()
6345 6345 elif dirty:
6346 6346 hint = _("uncommitted changes, use --all to discard all changes")
6347 6347 else:
6348 6348 hint = _("use --all to revert all files")
6349 6349 raise error.Abort(msg, hint=hint)
6350 6350
6351 6351 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
6352 6352
6353 6353 @command('rollback', dryrunopts +
6354 6354 [('f', 'force', False, _('ignore safety measures'))])
6355 6355 def rollback(ui, repo, **opts):
6356 6356 """roll back the last transaction (DANGEROUS) (DEPRECATED)
6357 6357
6358 6358 Please use :hg:`commit --amend` instead of rollback to correct
6359 6359 mistakes in the last commit.
6360 6360
6361 6361 This command should be used with care. There is only one level of
6362 6362 rollback, and there is no way to undo a rollback. It will also
6363 6363 restore the dirstate at the time of the last transaction, losing
6364 6364 any dirstate changes since that time. This command does not alter
6365 6365 the working directory.
6366 6366
6367 6367 Transactions are used to encapsulate the effects of all commands
6368 6368 that create new changesets or propagate existing changesets into a
6369 6369 repository.
6370 6370
6371 6371 .. container:: verbose
6372 6372
6373 6373 For example, the following commands are transactional, and their
6374 6374 effects can be rolled back:
6375 6375
6376 6376 - commit
6377 6377 - import
6378 6378 - pull
6379 6379 - push (with this repository as the destination)
6380 6380 - unbundle
6381 6381
6382 6382 To avoid permanent data loss, rollback will refuse to rollback a
6383 6383 commit transaction if it isn't checked out. Use --force to
6384 6384 override this protection.
6385 6385
6386 6386 The rollback command can be entirely disabled by setting the
6387 6387 ``ui.rollback`` configuration setting to false. If you're here
6388 6388 because you want to use rollback and it's disabled, you can
6389 6389 re-enable the command by setting ``ui.rollback`` to true.
6390 6390
6391 6391 This command is not intended for use on public repositories. Once
6392 6392 changes are visible for pull by other users, rolling a transaction
6393 6393 back locally is ineffective (someone else may already have pulled
6394 6394 the changes). Furthermore, a race is possible with readers of the
6395 6395 repository; for example an in-progress pull from the repository
6396 6396 may fail if a rollback is performed.
6397 6397
6398 6398 Returns 0 on success, 1 if no rollback data is available.
6399 6399 """
6400 6400 if not ui.configbool('ui', 'rollback', True):
6401 6401 raise error.Abort(_('rollback is disabled because it is unsafe'),
6402 6402 hint=('see `hg help -v rollback` for information'))
6403 6403 return repo.rollback(dryrun=opts.get('dry_run'),
6404 6404 force=opts.get('force'))
6405 6405
6406 6406 @command('root', [])
6407 6407 def root(ui, repo):
6408 6408 """print the root (top) of the current working directory
6409 6409
6410 6410 Print the root directory of the current repository.
6411 6411
6412 6412 Returns 0 on success.
6413 6413 """
6414 6414 ui.write(repo.root + "\n")
6415 6415
6416 6416 @command('^serve',
6417 6417 [('A', 'accesslog', '', _('name of access log file to write to'),
6418 6418 _('FILE')),
6419 6419 ('d', 'daemon', None, _('run server in background')),
6420 6420 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
6421 6421 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
6422 6422 # use string type, then we can check if something was passed
6423 6423 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
6424 6424 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
6425 6425 _('ADDR')),
6426 6426 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
6427 6427 _('PREFIX')),
6428 6428 ('n', 'name', '',
6429 6429 _('name to show in web pages (default: working directory)'), _('NAME')),
6430 6430 ('', 'web-conf', '',
6431 6431 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
6432 6432 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
6433 6433 _('FILE')),
6434 6434 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
6435 6435 ('', 'stdio', None, _('for remote clients')),
6436 6436 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
6437 6437 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
6438 6438 ('', 'style', '', _('template style to use'), _('STYLE')),
6439 6439 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
6440 6440 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
6441 6441 _('[OPTION]...'),
6442 6442 optionalrepo=True)
6443 6443 def serve(ui, repo, **opts):
6444 6444 """start stand-alone webserver
6445 6445
6446 6446 Start a local HTTP repository browser and pull server. You can use
6447 6447 this for ad-hoc sharing and browsing of repositories. It is
6448 6448 recommended to use a real web server to serve a repository for
6449 6449 longer periods of time.
6450 6450
6451 6451 Please note that the server does not implement access control.
6452 6452 This means that, by default, anybody can read from the server and
6453 6453 nobody can write to it by default. Set the ``web.allow_push``
6454 6454 option to ``*`` to allow everybody to push to the server. You
6455 6455 should use a real web server if you need to authenticate users.
6456 6456
6457 6457 By default, the server logs accesses to stdout and errors to
6458 6458 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
6459 6459 files.
6460 6460
6461 6461 To have the server choose a free port number to listen on, specify
6462 6462 a port number of 0; in this case, the server will print the port
6463 6463 number it uses.
6464 6464
6465 6465 Returns 0 on success.
6466 6466 """
6467 6467
6468 6468 if opts["stdio"] and opts["cmdserver"]:
6469 6469 raise error.Abort(_("cannot use --stdio with --cmdserver"))
6470 6470
6471 6471 if opts["stdio"]:
6472 6472 if repo is None:
6473 6473 raise error.RepoError(_("there is no Mercurial repository here"
6474 6474 " (.hg not found)"))
6475 6475 s = sshserver.sshserver(ui, repo)
6476 6476 s.serve_forever()
6477 6477
6478 6478 if opts["cmdserver"]:
6479 6479 service = commandserver.createservice(ui, repo, opts)
6480 6480 else:
6481 6481 service = hgweb.createservice(ui, repo, opts)
6482 6482 return cmdutil.service(opts, initfn=service.init, runfn=service.run)
6483 6483
6484 6484 @command('^status|st',
6485 6485 [('A', 'all', None, _('show status of all files')),
6486 6486 ('m', 'modified', None, _('show only modified files')),
6487 6487 ('a', 'added', None, _('show only added files')),
6488 6488 ('r', 'removed', None, _('show only removed files')),
6489 6489 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
6490 6490 ('c', 'clean', None, _('show only files without changes')),
6491 6491 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
6492 6492 ('i', 'ignored', None, _('show only ignored files')),
6493 6493 ('n', 'no-status', None, _('hide status prefix')),
6494 6494 ('C', 'copies', None, _('show source of copied files')),
6495 6495 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
6496 6496 ('', 'rev', [], _('show difference from revision'), _('REV')),
6497 6497 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
6498 6498 ] + walkopts + subrepoopts + formatteropts,
6499 6499 _('[OPTION]... [FILE]...'),
6500 6500 inferrepo=True)
6501 6501 def status(ui, repo, *pats, **opts):
6502 6502 """show changed files in the working directory
6503 6503
6504 6504 Show status of files in the repository. If names are given, only
6505 6505 files that match are shown. Files that are clean or ignored or
6506 6506 the source of a copy/move operation, are not listed unless
6507 6507 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
6508 6508 Unless options described with "show only ..." are given, the
6509 6509 options -mardu are used.
6510 6510
6511 6511 Option -q/--quiet hides untracked (unknown and ignored) files
6512 6512 unless explicitly requested with -u/--unknown or -i/--ignored.
6513 6513
6514 6514 .. note::
6515 6515
6516 6516 :hg:`status` may appear to disagree with diff if permissions have
6517 6517 changed or a merge has occurred. The standard diff format does
6518 6518 not report permission changes and diff only reports changes
6519 6519 relative to one merge parent.
6520 6520
6521 6521 If one revision is given, it is used as the base revision.
6522 6522 If two revisions are given, the differences between them are
6523 6523 shown. The --change option can also be used as a shortcut to list
6524 6524 the changed files of a revision from its first parent.
6525 6525
6526 6526 The codes used to show the status of files are::
6527 6527
6528 6528 M = modified
6529 6529 A = added
6530 6530 R = removed
6531 6531 C = clean
6532 6532 ! = missing (deleted by non-hg command, but still tracked)
6533 6533 ? = not tracked
6534 6534 I = ignored
6535 6535 = origin of the previous file (with --copies)
6536 6536
6537 6537 .. container:: verbose
6538 6538
6539 6539 Examples:
6540 6540
6541 6541 - show changes in the working directory relative to a
6542 6542 changeset::
6543 6543
6544 6544 hg status --rev 9353
6545 6545
6546 6546 - show changes in the working directory relative to the
6547 6547 current directory (see :hg:`help patterns` for more information)::
6548 6548
6549 6549 hg status re:
6550 6550
6551 6551 - show all changes including copies in an existing changeset::
6552 6552
6553 6553 hg status --copies --change 9353
6554 6554
6555 6555 - get a NUL separated list of added files, suitable for xargs::
6556 6556
6557 6557 hg status -an0
6558 6558
6559 6559 Returns 0 on success.
6560 6560 """
6561 6561
6562 6562 revs = opts.get('rev')
6563 6563 change = opts.get('change')
6564 6564
6565 6565 if revs and change:
6566 6566 msg = _('cannot specify --rev and --change at the same time')
6567 6567 raise error.Abort(msg)
6568 6568 elif change:
6569 6569 node2 = scmutil.revsingle(repo, change, None).node()
6570 6570 node1 = repo[node2].p1().node()
6571 6571 else:
6572 6572 node1, node2 = scmutil.revpair(repo, revs)
6573 6573
6574 6574 if pats:
6575 6575 cwd = repo.getcwd()
6576 6576 else:
6577 6577 cwd = ''
6578 6578
6579 6579 if opts.get('print0'):
6580 6580 end = '\0'
6581 6581 else:
6582 6582 end = '\n'
6583 6583 copy = {}
6584 6584 states = 'modified added removed deleted unknown ignored clean'.split()
6585 6585 show = [k for k in states if opts.get(k)]
6586 6586 if opts.get('all'):
6587 6587 show += ui.quiet and (states[:4] + ['clean']) or states
6588 6588 if not show:
6589 6589 if ui.quiet:
6590 6590 show = states[:4]
6591 6591 else:
6592 6592 show = states[:5]
6593 6593
6594 6594 m = scmutil.match(repo[node2], pats, opts)
6595 6595 stat = repo.status(node1, node2, m,
6596 6596 'ignored' in show, 'clean' in show, 'unknown' in show,
6597 6597 opts.get('subrepos'))
6598 6598 changestates = zip(states, 'MAR!?IC', stat)
6599 6599
6600 6600 if (opts.get('all') or opts.get('copies')
6601 6601 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
6602 6602 copy = copies.pathcopies(repo[node1], repo[node2], m)
6603 6603
6604 6604 fm = ui.formatter('status', opts)
6605 6605 fmt = '%s' + end
6606 6606 showchar = not opts.get('no_status')
6607 6607
6608 6608 for state, char, files in changestates:
6609 6609 if state in show:
6610 6610 label = 'status.' + state
6611 6611 for f in files:
6612 6612 fm.startitem()
6613 6613 fm.condwrite(showchar, 'status', '%s ', char, label=label)
6614 6614 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
6615 6615 if f in copy:
6616 6616 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
6617 6617 label='status.copied')
6618 6618 fm.end()
6619 6619
6620 6620 @command('^summary|sum',
6621 6621 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
6622 6622 def summary(ui, repo, **opts):
6623 6623 """summarize working directory state
6624 6624
6625 6625 This generates a brief summary of the working directory state,
6626 6626 including parents, branch, commit status, phase and available updates.
6627 6627
6628 6628 With the --remote option, this will check the default paths for
6629 6629 incoming and outgoing changes. This can be time-consuming.
6630 6630
6631 6631 Returns 0 on success.
6632 6632 """
6633 6633
6634 6634 ctx = repo[None]
6635 6635 parents = ctx.parents()
6636 6636 pnode = parents[0].node()
6637 6637 marks = []
6638 6638
6639 6639 ms = None
6640 6640 try:
6641 6641 ms = mergemod.mergestate.read(repo)
6642 6642 except error.UnsupportedMergeRecords as e:
6643 6643 s = ' '.join(e.recordtypes)
6644 6644 ui.warn(
6645 6645 _('warning: merge state has unsupported record types: %s\n') % s)
6646 6646 unresolved = 0
6647 6647 else:
6648 6648 unresolved = [f for f in ms if ms[f] == 'u']
6649 6649
6650 6650 for p in parents:
6651 6651 # label with log.changeset (instead of log.parent) since this
6652 6652 # shows a working directory parent *changeset*:
6653 6653 # i18n: column positioning for "hg summary"
6654 6654 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
6655 6655 label='log.changeset changeset.%s' % p.phasestr())
6656 6656 ui.write(' '.join(p.tags()), label='log.tag')
6657 6657 if p.bookmarks():
6658 6658 marks.extend(p.bookmarks())
6659 6659 if p.rev() == -1:
6660 6660 if not len(repo):
6661 6661 ui.write(_(' (empty repository)'))
6662 6662 else:
6663 6663 ui.write(_(' (no revision checked out)'))
6664 6664 ui.write('\n')
6665 6665 if p.description():
6666 6666 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
6667 6667 label='log.summary')
6668 6668
6669 6669 branch = ctx.branch()
6670 6670 bheads = repo.branchheads(branch)
6671 6671 # i18n: column positioning for "hg summary"
6672 6672 m = _('branch: %s\n') % branch
6673 6673 if branch != 'default':
6674 6674 ui.write(m, label='log.branch')
6675 6675 else:
6676 6676 ui.status(m, label='log.branch')
6677 6677
6678 6678 if marks:
6679 6679 active = repo._activebookmark
6680 6680 # i18n: column positioning for "hg summary"
6681 6681 ui.write(_('bookmarks:'), label='log.bookmark')
6682 6682 if active is not None:
6683 6683 if active in marks:
6684 6684 ui.write(' *' + active, label=activebookmarklabel)
6685 6685 marks.remove(active)
6686 6686 else:
6687 6687 ui.write(' [%s]' % active, label=activebookmarklabel)
6688 6688 for m in marks:
6689 6689 ui.write(' ' + m, label='log.bookmark')
6690 6690 ui.write('\n', label='log.bookmark')
6691 6691
6692 6692 status = repo.status(unknown=True)
6693 6693
6694 6694 c = repo.dirstate.copies()
6695 6695 copied, renamed = [], []
6696 6696 for d, s in c.iteritems():
6697 6697 if s in status.removed:
6698 6698 status.removed.remove(s)
6699 6699 renamed.append(d)
6700 6700 else:
6701 6701 copied.append(d)
6702 6702 if d in status.added:
6703 6703 status.added.remove(d)
6704 6704
6705 6705 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
6706 6706
6707 6707 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
6708 6708 (ui.label(_('%d added'), 'status.added'), status.added),
6709 6709 (ui.label(_('%d removed'), 'status.removed'), status.removed),
6710 6710 (ui.label(_('%d renamed'), 'status.copied'), renamed),
6711 6711 (ui.label(_('%d copied'), 'status.copied'), copied),
6712 6712 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
6713 6713 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
6714 6714 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
6715 6715 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
6716 6716 t = []
6717 6717 for l, s in labels:
6718 6718 if s:
6719 6719 t.append(l % len(s))
6720 6720
6721 6721 t = ', '.join(t)
6722 6722 cleanworkdir = False
6723 6723
6724 6724 if repo.vfs.exists('graftstate'):
6725 6725 t += _(' (graft in progress)')
6726 6726 if repo.vfs.exists('updatestate'):
6727 6727 t += _(' (interrupted update)')
6728 6728 elif len(parents) > 1:
6729 6729 t += _(' (merge)')
6730 6730 elif branch != parents[0].branch():
6731 6731 t += _(' (new branch)')
6732 6732 elif (parents[0].closesbranch() and
6733 6733 pnode in repo.branchheads(branch, closed=True)):
6734 6734 t += _(' (head closed)')
6735 6735 elif not (status.modified or status.added or status.removed or renamed or
6736 6736 copied or subs):
6737 6737 t += _(' (clean)')
6738 6738 cleanworkdir = True
6739 6739 elif pnode not in bheads:
6740 6740 t += _(' (new branch head)')
6741 6741
6742 6742 if parents:
6743 6743 pendingphase = max(p.phase() for p in parents)
6744 6744 else:
6745 6745 pendingphase = phases.public
6746 6746
6747 6747 if pendingphase > phases.newcommitphase(ui):
6748 6748 t += ' (%s)' % phases.phasenames[pendingphase]
6749 6749
6750 6750 if cleanworkdir:
6751 6751 # i18n: column positioning for "hg summary"
6752 6752 ui.status(_('commit: %s\n') % t.strip())
6753 6753 else:
6754 6754 # i18n: column positioning for "hg summary"
6755 6755 ui.write(_('commit: %s\n') % t.strip())
6756 6756
6757 6757 # all ancestors of branch heads - all ancestors of parent = new csets
6758 6758 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
6759 6759 bheads))
6760 6760
6761 6761 if new == 0:
6762 6762 # i18n: column positioning for "hg summary"
6763 6763 ui.status(_('update: (current)\n'))
6764 6764 elif pnode not in bheads:
6765 6765 # i18n: column positioning for "hg summary"
6766 6766 ui.write(_('update: %d new changesets (update)\n') % new)
6767 6767 else:
6768 6768 # i18n: column positioning for "hg summary"
6769 6769 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
6770 6770 (new, len(bheads)))
6771 6771
6772 6772 t = []
6773 6773 draft = len(repo.revs('draft()'))
6774 6774 if draft:
6775 6775 t.append(_('%d draft') % draft)
6776 6776 secret = len(repo.revs('secret()'))
6777 6777 if secret:
6778 6778 t.append(_('%d secret') % secret)
6779 6779
6780 6780 if draft or secret:
6781 6781 ui.status(_('phases: %s\n') % ', '.join(t))
6782 6782
6783 6783 if obsolete.isenabled(repo, obsolete.createmarkersopt):
6784 6784 for trouble in ("unstable", "divergent", "bumped"):
6785 6785 numtrouble = len(repo.revs(trouble + "()"))
6786 6786 # We write all the possibilities to ease translation
6787 6787 troublemsg = {
6788 6788 "unstable": _("unstable: %d changesets"),
6789 6789 "divergent": _("divergent: %d changesets"),
6790 6790 "bumped": _("bumped: %d changesets"),
6791 6791 }
6792 6792 if numtrouble > 0:
6793 6793 ui.status(troublemsg[trouble] % numtrouble + "\n")
6794 6794
6795 6795 cmdutil.summaryhooks(ui, repo)
6796 6796
6797 6797 if opts.get('remote'):
6798 6798 needsincoming, needsoutgoing = True, True
6799 6799 else:
6800 6800 needsincoming, needsoutgoing = False, False
6801 6801 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
6802 6802 if i:
6803 6803 needsincoming = True
6804 6804 if o:
6805 6805 needsoutgoing = True
6806 6806 if not needsincoming and not needsoutgoing:
6807 6807 return
6808 6808
6809 6809 def getincoming():
6810 6810 source, branches = hg.parseurl(ui.expandpath('default'))
6811 6811 sbranch = branches[0]
6812 6812 try:
6813 6813 other = hg.peer(repo, {}, source)
6814 6814 except error.RepoError:
6815 6815 if opts.get('remote'):
6816 6816 raise
6817 6817 return source, sbranch, None, None, None
6818 6818 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
6819 6819 if revs:
6820 6820 revs = [other.lookup(rev) for rev in revs]
6821 6821 ui.debug('comparing with %s\n' % util.hidepassword(source))
6822 6822 repo.ui.pushbuffer()
6823 6823 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
6824 6824 repo.ui.popbuffer()
6825 6825 return source, sbranch, other, commoninc, commoninc[1]
6826 6826
6827 6827 if needsincoming:
6828 6828 source, sbranch, sother, commoninc, incoming = getincoming()
6829 6829 else:
6830 6830 source = sbranch = sother = commoninc = incoming = None
6831 6831
6832 6832 def getoutgoing():
6833 6833 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
6834 6834 dbranch = branches[0]
6835 6835 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
6836 6836 if source != dest:
6837 6837 try:
6838 6838 dother = hg.peer(repo, {}, dest)
6839 6839 except error.RepoError:
6840 6840 if opts.get('remote'):
6841 6841 raise
6842 6842 return dest, dbranch, None, None
6843 6843 ui.debug('comparing with %s\n' % util.hidepassword(dest))
6844 6844 elif sother is None:
6845 6845 # there is no explicit destination peer, but source one is invalid
6846 6846 return dest, dbranch, None, None
6847 6847 else:
6848 6848 dother = sother
6849 6849 if (source != dest or (sbranch is not None and sbranch != dbranch)):
6850 6850 common = None
6851 6851 else:
6852 6852 common = commoninc
6853 6853 if revs:
6854 6854 revs = [repo.lookup(rev) for rev in revs]
6855 6855 repo.ui.pushbuffer()
6856 6856 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
6857 6857 commoninc=common)
6858 6858 repo.ui.popbuffer()
6859 6859 return dest, dbranch, dother, outgoing
6860 6860
6861 6861 if needsoutgoing:
6862 6862 dest, dbranch, dother, outgoing = getoutgoing()
6863 6863 else:
6864 6864 dest = dbranch = dother = outgoing = None
6865 6865
6866 6866 if opts.get('remote'):
6867 6867 t = []
6868 6868 if incoming:
6869 6869 t.append(_('1 or more incoming'))
6870 6870 o = outgoing.missing
6871 6871 if o:
6872 6872 t.append(_('%d outgoing') % len(o))
6873 6873 other = dother or sother
6874 6874 if 'bookmarks' in other.listkeys('namespaces'):
6875 6875 counts = bookmarks.summary(repo, other)
6876 6876 if counts[0] > 0:
6877 6877 t.append(_('%d incoming bookmarks') % counts[0])
6878 6878 if counts[1] > 0:
6879 6879 t.append(_('%d outgoing bookmarks') % counts[1])
6880 6880
6881 6881 if t:
6882 6882 # i18n: column positioning for "hg summary"
6883 6883 ui.write(_('remote: %s\n') % (', '.join(t)))
6884 6884 else:
6885 6885 # i18n: column positioning for "hg summary"
6886 6886 ui.status(_('remote: (synced)\n'))
6887 6887
6888 6888 cmdutil.summaryremotehooks(ui, repo, opts,
6889 6889 ((source, sbranch, sother, commoninc),
6890 6890 (dest, dbranch, dother, outgoing)))
6891 6891
6892 6892 @command('tag',
6893 6893 [('f', 'force', None, _('force tag')),
6894 6894 ('l', 'local', None, _('make the tag local')),
6895 6895 ('r', 'rev', '', _('revision to tag'), _('REV')),
6896 6896 ('', 'remove', None, _('remove a tag')),
6897 6897 # -l/--local is already there, commitopts cannot be used
6898 6898 ('e', 'edit', None, _('invoke editor on commit messages')),
6899 6899 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
6900 6900 ] + commitopts2,
6901 6901 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
6902 6902 def tag(ui, repo, name1, *names, **opts):
6903 6903 """add one or more tags for the current or given revision
6904 6904
6905 6905 Name a particular revision using <name>.
6906 6906
6907 6907 Tags are used to name particular revisions of the repository and are
6908 6908 very useful to compare different revisions, to go back to significant
6909 6909 earlier versions or to mark branch points as releases, etc. Changing
6910 6910 an existing tag is normally disallowed; use -f/--force to override.
6911 6911
6912 6912 If no revision is given, the parent of the working directory is
6913 6913 used.
6914 6914
6915 6915 To facilitate version control, distribution, and merging of tags,
6916 6916 they are stored as a file named ".hgtags" which is managed similarly
6917 6917 to other project files and can be hand-edited if necessary. This
6918 6918 also means that tagging creates a new commit. The file
6919 6919 ".hg/localtags" is used for local tags (not shared among
6920 6920 repositories).
6921 6921
6922 6922 Tag commits are usually made at the head of a branch. If the parent
6923 6923 of the working directory is not a branch head, :hg:`tag` aborts; use
6924 6924 -f/--force to force the tag commit to be based on a non-head
6925 6925 changeset.
6926 6926
6927 6927 See :hg:`help dates` for a list of formats valid for -d/--date.
6928 6928
6929 6929 Since tag names have priority over branch names during revision
6930 6930 lookup, using an existing branch name as a tag name is discouraged.
6931 6931
6932 6932 Returns 0 on success.
6933 6933 """
6934 6934 wlock = lock = None
6935 6935 try:
6936 6936 wlock = repo.wlock()
6937 6937 lock = repo.lock()
6938 6938 rev_ = "."
6939 6939 names = [t.strip() for t in (name1,) + names]
6940 6940 if len(names) != len(set(names)):
6941 6941 raise error.Abort(_('tag names must be unique'))
6942 6942 for n in names:
6943 6943 scmutil.checknewlabel(repo, n, 'tag')
6944 6944 if not n:
6945 6945 raise error.Abort(_('tag names cannot consist entirely of '
6946 6946 'whitespace'))
6947 6947 if opts.get('rev') and opts.get('remove'):
6948 6948 raise error.Abort(_("--rev and --remove are incompatible"))
6949 6949 if opts.get('rev'):
6950 6950 rev_ = opts['rev']
6951 6951 message = opts.get('message')
6952 6952 if opts.get('remove'):
6953 6953 if opts.get('local'):
6954 6954 expectedtype = 'local'
6955 6955 else:
6956 6956 expectedtype = 'global'
6957 6957
6958 6958 for n in names:
6959 6959 if not repo.tagtype(n):
6960 6960 raise error.Abort(_("tag '%s' does not exist") % n)
6961 6961 if repo.tagtype(n) != expectedtype:
6962 6962 if expectedtype == 'global':
6963 6963 raise error.Abort(_("tag '%s' is not a global tag") % n)
6964 6964 else:
6965 6965 raise error.Abort(_("tag '%s' is not a local tag") % n)
6966 6966 rev_ = 'null'
6967 6967 if not message:
6968 6968 # we don't translate commit messages
6969 6969 message = 'Removed tag %s' % ', '.join(names)
6970 6970 elif not opts.get('force'):
6971 6971 for n in names:
6972 6972 if n in repo.tags():
6973 6973 raise error.Abort(_("tag '%s' already exists "
6974 6974 "(use -f to force)") % n)
6975 6975 if not opts.get('local'):
6976 6976 p1, p2 = repo.dirstate.parents()
6977 6977 if p2 != nullid:
6978 6978 raise error.Abort(_('uncommitted merge'))
6979 6979 bheads = repo.branchheads()
6980 6980 if not opts.get('force') and bheads and p1 not in bheads:
6981 6981 raise error.Abort(_('not at a branch head (use -f to force)'))
6982 6982 r = scmutil.revsingle(repo, rev_).node()
6983 6983
6984 6984 if not message:
6985 6985 # we don't translate commit messages
6986 6986 message = ('Added tag %s for changeset %s' %
6987 6987 (', '.join(names), short(r)))
6988 6988
6989 6989 date = opts.get('date')
6990 6990 if date:
6991 6991 date = util.parsedate(date)
6992 6992
6993 6993 if opts.get('remove'):
6994 6994 editform = 'tag.remove'
6995 6995 else:
6996 6996 editform = 'tag.add'
6997 6997 editor = cmdutil.getcommiteditor(editform=editform, **opts)
6998 6998
6999 6999 # don't allow tagging the null rev
7000 7000 if (not opts.get('remove') and
7001 7001 scmutil.revsingle(repo, rev_).rev() == nullrev):
7002 7002 raise error.Abort(_("cannot tag null revision"))
7003 7003
7004 7004 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
7005 7005 editor=editor)
7006 7006 finally:
7007 7007 release(lock, wlock)
7008 7008
7009 7009 @command('tags', formatteropts, '')
7010 7010 def tags(ui, repo, **opts):
7011 7011 """list repository tags
7012 7012
7013 7013 This lists both regular and local tags. When the -v/--verbose
7014 7014 switch is used, a third column "local" is printed for local tags.
7015 7015 When the -q/--quiet switch is used, only the tag name is printed.
7016 7016
7017 7017 Returns 0 on success.
7018 7018 """
7019 7019
7020 7020 fm = ui.formatter('tags', opts)
7021 7021 hexfunc = fm.hexfunc
7022 7022 tagtype = ""
7023 7023
7024 7024 for t, n in reversed(repo.tagslist()):
7025 7025 hn = hexfunc(n)
7026 7026 label = 'tags.normal'
7027 7027 tagtype = ''
7028 7028 if repo.tagtype(t) == 'local':
7029 7029 label = 'tags.local'
7030 7030 tagtype = 'local'
7031 7031
7032 7032 fm.startitem()
7033 7033 fm.write('tag', '%s', t, label=label)
7034 7034 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
7035 7035 fm.condwrite(not ui.quiet, 'rev node', fmt,
7036 7036 repo.changelog.rev(n), hn, label=label)
7037 7037 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
7038 7038 tagtype, label=label)
7039 7039 fm.plain('\n')
7040 7040 fm.end()
7041 7041
7042 7042 @command('tip',
7043 7043 [('p', 'patch', None, _('show patch')),
7044 7044 ('g', 'git', None, _('use git extended diff format')),
7045 7045 ] + templateopts,
7046 7046 _('[-p] [-g]'))
7047 7047 def tip(ui, repo, **opts):
7048 7048 """show the tip revision (DEPRECATED)
7049 7049
7050 7050 The tip revision (usually just called the tip) is the changeset
7051 7051 most recently added to the repository (and therefore the most
7052 7052 recently changed head).
7053 7053
7054 7054 If you have just made a commit, that commit will be the tip. If
7055 7055 you have just pulled changes from another repository, the tip of
7056 7056 that repository becomes the current tip. The "tip" tag is special
7057 7057 and cannot be renamed or assigned to a different changeset.
7058 7058
7059 7059 This command is deprecated, please use :hg:`heads` instead.
7060 7060
7061 7061 Returns 0 on success.
7062 7062 """
7063 7063 displayer = cmdutil.show_changeset(ui, repo, opts)
7064 7064 displayer.show(repo['tip'])
7065 7065 displayer.close()
7066 7066
7067 7067 @command('unbundle',
7068 7068 [('u', 'update', None,
7069 7069 _('update to new branch head if changesets were unbundled'))],
7070 7070 _('[-u] FILE...'))
7071 7071 def unbundle(ui, repo, fname1, *fnames, **opts):
7072 7072 """apply one or more changegroup files
7073 7073
7074 7074 Apply one or more compressed changegroup files generated by the
7075 7075 bundle command.
7076 7076
7077 7077 Returns 0 on success, 1 if an update has unresolved files.
7078 7078 """
7079 7079 fnames = (fname1,) + fnames
7080 7080
7081 7081 with repo.lock():
7082 7082 for fname in fnames:
7083 7083 f = hg.openpath(ui, fname)
7084 7084 gen = exchange.readbundle(ui, f, fname)
7085 7085 if isinstance(gen, bundle2.unbundle20):
7086 7086 tr = repo.transaction('unbundle')
7087 7087 try:
7088 7088 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
7089 7089 url='bundle:' + fname)
7090 7090 tr.close()
7091 7091 except error.BundleUnknownFeatureError as exc:
7092 7092 raise error.Abort(_('%s: unknown bundle feature, %s')
7093 7093 % (fname, exc),
7094 7094 hint=_("see https://mercurial-scm.org/"
7095 7095 "wiki/BundleFeature for more "
7096 7096 "information"))
7097 7097 finally:
7098 7098 if tr:
7099 7099 tr.release()
7100 7100 changes = [r.get('return', 0)
7101 7101 for r in op.records['changegroup']]
7102 7102 modheads = changegroup.combineresults(changes)
7103 7103 elif isinstance(gen, streamclone.streamcloneapplier):
7104 7104 raise error.Abort(
7105 7105 _('packed bundles cannot be applied with '
7106 7106 '"hg unbundle"'),
7107 7107 hint=_('use "hg debugapplystreamclonebundle"'))
7108 7108 else:
7109 7109 modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname)
7110 7110
7111 7111 return postincoming(ui, repo, modheads, opts.get('update'), None, None)
7112 7112
7113 7113 @command('^update|up|checkout|co',
7114 7114 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
7115 7115 ('c', 'check', None, _('require clean working directory')),
7116 7116 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
7117 7117 ('r', 'rev', '', _('revision'), _('REV'))
7118 7118 ] + mergetoolopts,
7119 7119 _('[-c] [-C] [-d DATE] [[-r] REV]'))
7120 7120 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
7121 7121 tool=None):
7122 7122 """update working directory (or switch revisions)
7123 7123
7124 7124 Update the repository's working directory to the specified
7125 7125 changeset. If no changeset is specified, update to the tip of the
7126 7126 current named branch and move the active bookmark (see :hg:`help
7127 7127 bookmarks`).
7128 7128
7129 7129 Update sets the working directory's parent revision to the specified
7130 7130 changeset (see :hg:`help parents`).
7131 7131
7132 7132 If the changeset is not a descendant or ancestor of the working
7133 7133 directory's parent, the update is aborted. With the -c/--check
7134 7134 option, the working directory is checked for uncommitted changes; if
7135 7135 none are found, the working directory is updated to the specified
7136 7136 changeset.
7137 7137
7138 7138 .. container:: verbose
7139 7139
7140 7140 The following rules apply when the working directory contains
7141 7141 uncommitted changes:
7142 7142
7143 7143 1. If neither -c/--check nor -C/--clean is specified, and if
7144 7144 the requested changeset is an ancestor or descendant of
7145 7145 the working directory's parent, the uncommitted changes
7146 7146 are merged into the requested changeset and the merged
7147 7147 result is left uncommitted. If the requested changeset is
7148 7148 not an ancestor or descendant (that is, it is on another
7149 7149 branch), the update is aborted and the uncommitted changes
7150 7150 are preserved.
7151 7151
7152 7152 2. With the -c/--check option, the update is aborted and the
7153 7153 uncommitted changes are preserved.
7154 7154
7155 7155 3. With the -C/--clean option, uncommitted changes are discarded and
7156 7156 the working directory is updated to the requested changeset.
7157 7157
7158 7158 To cancel an uncommitted merge (and lose your changes), use
7159 7159 :hg:`update --clean .`.
7160 7160
7161 7161 Use null as the changeset to remove the working directory (like
7162 7162 :hg:`clone -U`).
7163 7163
7164 7164 If you want to revert just one file to an older revision, use
7165 7165 :hg:`revert [-r REV] NAME`.
7166 7166
7167 7167 See :hg:`help dates` for a list of formats valid for -d/--date.
7168 7168
7169 7169 Returns 0 on success, 1 if there are unresolved files.
7170 7170 """
7171 7171 if rev and node:
7172 7172 raise error.Abort(_("please specify just one revision"))
7173 7173
7174 7174 if rev is None or rev == '':
7175 7175 rev = node
7176 7176
7177 7177 if date and rev is not None:
7178 7178 raise error.Abort(_("you can't specify a revision and a date"))
7179 7179
7180 7180 if check and clean:
7181 7181 raise error.Abort(_("cannot specify both -c/--check and -C/--clean"))
7182 7182
7183 7183 with repo.wlock():
7184 7184 cmdutil.clearunfinished(repo)
7185 7185
7186 7186 if date:
7187 7187 rev = cmdutil.finddate(ui, repo, date)
7188 7188
7189 7189 # if we defined a bookmark, we have to remember the original name
7190 7190 brev = rev
7191 7191 rev = scmutil.revsingle(repo, rev, rev).rev()
7192 7192
7193 7193 if check:
7194 7194 cmdutil.bailifchanged(repo, merge=False)
7195 7195
7196 7196 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
7197 7197
7198 7198 return hg.updatetotally(ui, repo, rev, brev, clean=clean, check=check)
7199 7199
7200 7200 @command('verify', [])
7201 7201 def verify(ui, repo):
7202 7202 """verify the integrity of the repository
7203 7203
7204 7204 Verify the integrity of the current repository.
7205 7205
7206 7206 This will perform an extensive check of the repository's
7207 7207 integrity, validating the hashes and checksums of each entry in
7208 7208 the changelog, manifest, and tracked files, as well as the
7209 7209 integrity of their crosslinks and indices.
7210 7210
7211 7211 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
7212 7212 for more information about recovery from corruption of the
7213 7213 repository.
7214 7214
7215 7215 Returns 0 on success, 1 if errors are encountered.
7216 7216 """
7217 7217 return hg.verify(repo)
7218 7218
7219 7219 @command('version', [], norepo=True)
7220 7220 def version_(ui):
7221 7221 """output version and copyright information"""
7222 7222 ui.write(_("Mercurial Distributed SCM (version %s)\n")
7223 7223 % util.version())
7224 7224 ui.status(_(
7225 7225 "(see https://mercurial-scm.org for more information)\n"
7226 7226 "\nCopyright (C) 2005-2016 Matt Mackall and others\n"
7227 7227 "This is free software; see the source for copying conditions. "
7228 7228 "There is NO\nwarranty; "
7229 7229 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
7230 7230 ))
7231 7231
7232 7232 ui.note(_("\nEnabled extensions:\n\n"))
7233 7233 if ui.verbose:
7234 7234 # format names and versions into columns
7235 7235 names = []
7236 7236 vers = []
7237 7237 place = []
7238 7238 for name, module in extensions.extensions():
7239 7239 names.append(name)
7240 7240 vers.append(extensions.moduleversion(module))
7241 7241 if extensions.ismoduleinternal(module):
7242 7242 place.append(_("internal"))
7243 7243 else:
7244 7244 place.append(_("external"))
7245 7245 if names:
7246 7246 maxnamelen = max(len(n) for n in names)
7247 7247 for i, name in enumerate(names):
7248 7248 ui.write(" %-*s %s %s\n" %
7249 7249 (maxnamelen, name, place[i], vers[i]))
7250 7250
7251 7251 def loadcmdtable(ui, name, cmdtable):
7252 7252 """Load command functions from specified cmdtable
7253 7253 """
7254 7254 overrides = [cmd for cmd in cmdtable if cmd in table]
7255 7255 if overrides:
7256 7256 ui.warn(_("extension '%s' overrides commands: %s\n")
7257 7257 % (name, " ".join(overrides)))
7258 7258 table.update(cmdtable)
@@ -1,3394 +1,3398
1 1 # revset.py - revision set queries for mercurial
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import heapq
11 11 import re
12 12
13 13 from .i18n import _
14 14 from . import (
15 15 destutil,
16 16 encoding,
17 17 error,
18 18 hbisect,
19 19 match as matchmod,
20 20 node,
21 21 obsolete as obsmod,
22 22 parser,
23 23 pathutil,
24 24 phases,
25 25 registrar,
26 26 repoview,
27 27 util,
28 28 )
29 29
30 30 def _revancestors(repo, revs, followfirst):
31 31 """Like revlog.ancestors(), but supports followfirst."""
32 32 if followfirst:
33 33 cut = 1
34 34 else:
35 35 cut = None
36 36 cl = repo.changelog
37 37
38 38 def iterate():
39 39 revs.sort(reverse=True)
40 40 irevs = iter(revs)
41 41 h = []
42 42
43 43 inputrev = next(irevs, None)
44 44 if inputrev is not None:
45 45 heapq.heappush(h, -inputrev)
46 46
47 47 seen = set()
48 48 while h:
49 49 current = -heapq.heappop(h)
50 50 if current == inputrev:
51 51 inputrev = next(irevs, None)
52 52 if inputrev is not None:
53 53 heapq.heappush(h, -inputrev)
54 54 if current not in seen:
55 55 seen.add(current)
56 56 yield current
57 57 for parent in cl.parentrevs(current)[:cut]:
58 58 if parent != node.nullrev:
59 59 heapq.heappush(h, -parent)
60 60
61 61 return generatorset(iterate(), iterasc=False)
62 62
63 63 def _revdescendants(repo, revs, followfirst):
64 64 """Like revlog.descendants() but supports followfirst."""
65 65 if followfirst:
66 66 cut = 1
67 67 else:
68 68 cut = None
69 69
70 70 def iterate():
71 71 cl = repo.changelog
72 72 # XXX this should be 'parentset.min()' assuming 'parentset' is a
73 73 # smartset (and if it is not, it should.)
74 74 first = min(revs)
75 75 nullrev = node.nullrev
76 76 if first == nullrev:
77 77 # Are there nodes with a null first parent and a non-null
78 78 # second one? Maybe. Do we care? Probably not.
79 79 for i in cl:
80 80 yield i
81 81 else:
82 82 seen = set(revs)
83 83 for i in cl.revs(first + 1):
84 84 for x in cl.parentrevs(i)[:cut]:
85 85 if x != nullrev and x in seen:
86 86 seen.add(i)
87 87 yield i
88 88 break
89 89
90 90 return generatorset(iterate(), iterasc=True)
91 91
92 92 def _reachablerootspure(repo, minroot, roots, heads, includepath):
93 93 """return (heads(::<roots> and ::<heads>))
94 94
95 95 If includepath is True, return (<roots>::<heads>)."""
96 96 if not roots:
97 97 return []
98 98 parentrevs = repo.changelog.parentrevs
99 99 roots = set(roots)
100 100 visit = list(heads)
101 101 reachable = set()
102 102 seen = {}
103 103 # prefetch all the things! (because python is slow)
104 104 reached = reachable.add
105 105 dovisit = visit.append
106 106 nextvisit = visit.pop
107 107 # open-code the post-order traversal due to the tiny size of
108 108 # sys.getrecursionlimit()
109 109 while visit:
110 110 rev = nextvisit()
111 111 if rev in roots:
112 112 reached(rev)
113 113 if not includepath:
114 114 continue
115 115 parents = parentrevs(rev)
116 116 seen[rev] = parents
117 117 for parent in parents:
118 118 if parent >= minroot and parent not in seen:
119 119 dovisit(parent)
120 120 if not reachable:
121 121 return baseset()
122 122 if not includepath:
123 123 return reachable
124 124 for rev in sorted(seen):
125 125 for parent in seen[rev]:
126 126 if parent in reachable:
127 127 reached(rev)
128 128 return reachable
129 129
130 130 def reachableroots(repo, roots, heads, includepath=False):
131 131 """return (heads(::<roots> and ::<heads>))
132 132
133 133 If includepath is True, return (<roots>::<heads>)."""
134 134 if not roots:
135 135 return baseset()
136 136 minroot = roots.min()
137 137 roots = list(roots)
138 138 heads = list(heads)
139 139 try:
140 140 revs = repo.changelog.reachableroots(minroot, heads, roots, includepath)
141 141 except AttributeError:
142 142 revs = _reachablerootspure(repo, minroot, roots, heads, includepath)
143 143 revs = baseset(revs)
144 144 revs.sort()
145 145 return revs
146 146
147 147 elements = {
148 148 # token-type: binding-strength, primary, prefix, infix, suffix
149 149 "(": (21, None, ("group", 1, ")"), ("func", 1, ")"), None),
150 150 "##": (20, None, None, ("_concat", 20), None),
151 151 "~": (18, None, None, ("ancestor", 18), None),
152 152 "^": (18, None, None, ("parent", 18), ("parentpost", 18)),
153 153 "-": (5, None, ("negate", 19), ("minus", 5), None),
154 154 "::": (17, None, ("dagrangepre", 17), ("dagrange", 17),
155 155 ("dagrangepost", 17)),
156 156 "..": (17, None, ("dagrangepre", 17), ("dagrange", 17),
157 157 ("dagrangepost", 17)),
158 158 ":": (15, "rangeall", ("rangepre", 15), ("range", 15), ("rangepost", 15)),
159 159 "not": (10, None, ("not", 10), None, None),
160 160 "!": (10, None, ("not", 10), None, None),
161 161 "and": (5, None, None, ("and", 5), None),
162 162 "&": (5, None, None, ("and", 5), None),
163 163 "%": (5, None, None, ("only", 5), ("onlypost", 5)),
164 164 "or": (4, None, None, ("or", 4), None),
165 165 "|": (4, None, None, ("or", 4), None),
166 166 "+": (4, None, None, ("or", 4), None),
167 167 "=": (3, None, None, ("keyvalue", 3), None),
168 168 ",": (2, None, None, ("list", 2), None),
169 169 ")": (0, None, None, None, None),
170 170 "symbol": (0, "symbol", None, None, None),
171 171 "string": (0, "string", None, None, None),
172 172 "end": (0, None, None, None, None),
173 173 }
174 174
175 175 keywords = set(['and', 'or', 'not'])
176 176
177 177 # default set of valid characters for the initial letter of symbols
178 178 _syminitletters = set(c for c in [chr(i) for i in xrange(256)]
179 179 if c.isalnum() or c in '._@' or ord(c) > 127)
180 180
181 181 # default set of valid characters for non-initial letters of symbols
182 182 _symletters = set(c for c in [chr(i) for i in xrange(256)]
183 183 if c.isalnum() or c in '-._/@' or ord(c) > 127)
184 184
185 185 def tokenize(program, lookup=None, syminitletters=None, symletters=None):
186 186 '''
187 187 Parse a revset statement into a stream of tokens
188 188
189 189 ``syminitletters`` is the set of valid characters for the initial
190 190 letter of symbols.
191 191
192 192 By default, character ``c`` is recognized as valid for initial
193 193 letter of symbols, if ``c.isalnum() or c in '._@' or ord(c) > 127``.
194 194
195 195 ``symletters`` is the set of valid characters for non-initial
196 196 letters of symbols.
197 197
198 198 By default, character ``c`` is recognized as valid for non-initial
199 199 letters of symbols, if ``c.isalnum() or c in '-._/@' or ord(c) > 127``.
200 200
201 201 Check that @ is a valid unquoted token character (issue3686):
202 202 >>> list(tokenize("@::"))
203 203 [('symbol', '@', 0), ('::', None, 1), ('end', None, 3)]
204 204
205 205 '''
206 206 if syminitletters is None:
207 207 syminitletters = _syminitletters
208 208 if symletters is None:
209 209 symletters = _symletters
210 210
211 211 if program and lookup:
212 212 # attempt to parse old-style ranges first to deal with
213 213 # things like old-tag which contain query metacharacters
214 214 parts = program.split(':', 1)
215 215 if all(lookup(sym) for sym in parts if sym):
216 216 if parts[0]:
217 217 yield ('symbol', parts[0], 0)
218 218 if len(parts) > 1:
219 219 s = len(parts[0])
220 220 yield (':', None, s)
221 221 if parts[1]:
222 222 yield ('symbol', parts[1], s + 1)
223 223 yield ('end', None, len(program))
224 224 return
225 225
226 226 pos, l = 0, len(program)
227 227 while pos < l:
228 228 c = program[pos]
229 229 if c.isspace(): # skip inter-token whitespace
230 230 pass
231 231 elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully
232 232 yield ('::', None, pos)
233 233 pos += 1 # skip ahead
234 234 elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully
235 235 yield ('..', None, pos)
236 236 pos += 1 # skip ahead
237 237 elif c == '#' and program[pos:pos + 2] == '##': # look ahead carefully
238 238 yield ('##', None, pos)
239 239 pos += 1 # skip ahead
240 240 elif c in "():=,-|&+!~^%": # handle simple operators
241 241 yield (c, None, pos)
242 242 elif (c in '"\'' or c == 'r' and
243 243 program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings
244 244 if c == 'r':
245 245 pos += 1
246 246 c = program[pos]
247 247 decode = lambda x: x
248 248 else:
249 249 decode = parser.unescapestr
250 250 pos += 1
251 251 s = pos
252 252 while pos < l: # find closing quote
253 253 d = program[pos]
254 254 if d == '\\': # skip over escaped characters
255 255 pos += 2
256 256 continue
257 257 if d == c:
258 258 yield ('string', decode(program[s:pos]), s)
259 259 break
260 260 pos += 1
261 261 else:
262 262 raise error.ParseError(_("unterminated string"), s)
263 263 # gather up a symbol/keyword
264 264 elif c in syminitletters:
265 265 s = pos
266 266 pos += 1
267 267 while pos < l: # find end of symbol
268 268 d = program[pos]
269 269 if d not in symletters:
270 270 break
271 271 if d == '.' and program[pos - 1] == '.': # special case for ..
272 272 pos -= 1
273 273 break
274 274 pos += 1
275 275 sym = program[s:pos]
276 276 if sym in keywords: # operator keywords
277 277 yield (sym, None, s)
278 278 elif '-' in sym:
279 279 # some jerk gave us foo-bar-baz, try to check if it's a symbol
280 280 if lookup and lookup(sym):
281 281 # looks like a real symbol
282 282 yield ('symbol', sym, s)
283 283 else:
284 284 # looks like an expression
285 285 parts = sym.split('-')
286 286 for p in parts[:-1]:
287 287 if p: # possible consecutive -
288 288 yield ('symbol', p, s)
289 289 s += len(p)
290 290 yield ('-', None, pos)
291 291 s += 1
292 292 if parts[-1]: # possible trailing -
293 293 yield ('symbol', parts[-1], s)
294 294 else:
295 295 yield ('symbol', sym, s)
296 296 pos -= 1
297 297 else:
298 298 raise error.ParseError(_("syntax error in revset '%s'") %
299 299 program, pos)
300 300 pos += 1
301 301 yield ('end', None, pos)
302 302
303 303 # helpers
304 304
305 305 def getstring(x, err):
306 306 if x and (x[0] == 'string' or x[0] == 'symbol'):
307 307 return x[1]
308 308 raise error.ParseError(err)
309 309
310 310 def getlist(x):
311 311 if not x:
312 312 return []
313 313 if x[0] == 'list':
314 314 return list(x[1:])
315 315 return [x]
316 316
317 317 def getargs(x, min, max, err):
318 318 l = getlist(x)
319 319 if len(l) < min or (max >= 0 and len(l) > max):
320 320 raise error.ParseError(err)
321 321 return l
322 322
323 323 def getargsdict(x, funcname, keys):
324 324 return parser.buildargsdict(getlist(x), funcname, keys.split(),
325 325 keyvaluenode='keyvalue', keynode='symbol')
326 326
327 327 def getset(repo, subset, x):
328 328 if not x:
329 329 raise error.ParseError(_("missing argument"))
330 330 s = methods[x[0]](repo, subset, *x[1:])
331 331 if util.safehasattr(s, 'isascending'):
332 332 return s
333 333 # else case should not happen, because all non-func are internal,
334 334 # ignoring for now.
335 335 if x[0] == 'func' and x[1][0] == 'symbol' and x[1][1] in symbols:
336 336 repo.ui.develwarn('revset "%s" use list instead of smartset, '
337 337 '(upgrade your code)' % x[1][1],
338 338 config='old-revset')
339 339 return baseset(s)
340 340
341 341 def _getrevsource(repo, r):
342 342 extra = repo[r].extra()
343 343 for label in ('source', 'transplant_source', 'rebase_source'):
344 344 if label in extra:
345 345 try:
346 346 return repo[extra[label]].rev()
347 347 except error.RepoLookupError:
348 348 pass
349 349 return None
350 350
351 351 # operator methods
352 352
353 353 def stringset(repo, subset, x):
354 354 x = repo[x].rev()
355 355 if (x in subset
356 356 or x == node.nullrev and isinstance(subset, fullreposet)):
357 357 return baseset([x])
358 358 return baseset()
359 359
360 360 def rangeset(repo, subset, x, y):
361 361 m = getset(repo, fullreposet(repo), x)
362 362 n = getset(repo, fullreposet(repo), y)
363 363
364 364 if not m or not n:
365 365 return baseset()
366 366 m, n = m.first(), n.last()
367 367
368 368 if m == n:
369 369 r = baseset([m])
370 370 elif n == node.wdirrev:
371 371 r = spanset(repo, m, len(repo)) + baseset([n])
372 372 elif m == node.wdirrev:
373 373 r = baseset([m]) + spanset(repo, len(repo) - 1, n - 1)
374 374 elif m < n:
375 375 r = spanset(repo, m, n + 1)
376 376 else:
377 377 r = spanset(repo, m, n - 1)
378 378 # XXX We should combine with subset first: 'subset & baseset(...)'. This is
379 379 # necessary to ensure we preserve the order in subset.
380 380 #
381 381 # This has performance implication, carrying the sorting over when possible
382 382 # would be more efficient.
383 383 return r & subset
384 384
385 385 def dagrange(repo, subset, x, y):
386 386 r = fullreposet(repo)
387 387 xs = reachableroots(repo, getset(repo, r, x), getset(repo, r, y),
388 388 includepath=True)
389 389 # XXX We should combine with subset first: 'subset & baseset(...)'. This is
390 390 # necessary to ensure we preserve the order in subset.
391 391 return xs & subset
392 392
393 393 def andset(repo, subset, x, y):
394 394 return getset(repo, getset(repo, subset, x), y)
395 395
396 396 def differenceset(repo, subset, x, y):
397 397 return getset(repo, subset, x) - getset(repo, subset, y)
398 398
399 399 def orset(repo, subset, *xs):
400 400 assert xs
401 401 if len(xs) == 1:
402 402 return getset(repo, subset, xs[0])
403 403 p = len(xs) // 2
404 404 a = orset(repo, subset, *xs[:p])
405 405 b = orset(repo, subset, *xs[p:])
406 406 return a + b
407 407
408 408 def notset(repo, subset, x):
409 409 return subset - getset(repo, subset, x)
410 410
411 411 def listset(repo, subset, *xs):
412 412 raise error.ParseError(_("can't use a list in this context"),
413 413 hint=_('see hg help "revsets.x or y"'))
414 414
415 415 def keyvaluepair(repo, subset, k, v):
416 416 raise error.ParseError(_("can't use a key-value pair in this context"))
417 417
418 418 def func(repo, subset, a, b):
419 419 if a[0] == 'symbol' and a[1] in symbols:
420 420 return symbols[a[1]](repo, subset, b)
421 421
422 422 keep = lambda fn: getattr(fn, '__doc__', None) is not None
423 423
424 424 syms = [s for (s, fn) in symbols.items() if keep(fn)]
425 425 raise error.UnknownIdentifier(a[1], syms)
426 426
427 427 # functions
428 428
429 429 # symbols are callables like:
430 430 # fn(repo, subset, x)
431 431 # with:
432 432 # repo - current repository instance
433 433 # subset - of revisions to be examined
434 434 # x - argument in tree form
435 435 symbols = {}
436 436
437 437 # symbols which can't be used for a DoS attack for any given input
438 438 # (e.g. those which accept regexes as plain strings shouldn't be included)
439 439 # functions that just return a lot of changesets (like all) don't count here
440 440 safesymbols = set()
441 441
442 442 predicate = registrar.revsetpredicate()
443 443
444 444 @predicate('_destupdate')
445 445 def _destupdate(repo, subset, x):
446 446 # experimental revset for update destination
447 447 args = getargsdict(x, 'limit', 'clean check')
448 448 return subset & baseset([destutil.destupdate(repo, **args)[0]])
449 449
450 450 @predicate('_destmerge')
451 451 def _destmerge(repo, subset, x):
452 452 # experimental revset for merge destination
453 453 sourceset = None
454 454 if x is not None:
455 455 sourceset = getset(repo, fullreposet(repo), x)
456 456 return subset & baseset([destutil.destmerge(repo, sourceset=sourceset)])
457 457
458 458 @predicate('adds(pattern)', safe=True)
459 459 def adds(repo, subset, x):
460 460 """Changesets that add a file matching pattern.
461 461
462 462 The pattern without explicit kind like ``glob:`` is expected to be
463 463 relative to the current directory and match against a file or a
464 464 directory.
465 465 """
466 466 # i18n: "adds" is a keyword
467 467 pat = getstring(x, _("adds requires a pattern"))
468 468 return checkstatus(repo, subset, pat, 1)
469 469
470 470 @predicate('ancestor(*changeset)', safe=True)
471 471 def ancestor(repo, subset, x):
472 472 """A greatest common ancestor of the changesets.
473 473
474 474 Accepts 0 or more changesets.
475 475 Will return empty list when passed no args.
476 476 Greatest common ancestor of a single changeset is that changeset.
477 477 """
478 478 # i18n: "ancestor" is a keyword
479 479 l = getlist(x)
480 480 rl = fullreposet(repo)
481 481 anc = None
482 482
483 483 # (getset(repo, rl, i) for i in l) generates a list of lists
484 484 for revs in (getset(repo, rl, i) for i in l):
485 485 for r in revs:
486 486 if anc is None:
487 487 anc = repo[r]
488 488 else:
489 489 anc = anc.ancestor(repo[r])
490 490
491 491 if anc is not None and anc.rev() in subset:
492 492 return baseset([anc.rev()])
493 493 return baseset()
494 494
495 495 def _ancestors(repo, subset, x, followfirst=False):
496 496 heads = getset(repo, fullreposet(repo), x)
497 497 if not heads:
498 498 return baseset()
499 499 s = _revancestors(repo, heads, followfirst)
500 500 return subset & s
501 501
502 502 @predicate('ancestors(set)', safe=True)
503 503 def ancestors(repo, subset, x):
504 504 """Changesets that are ancestors of a changeset in set.
505 505 """
506 506 return _ancestors(repo, subset, x)
507 507
508 508 @predicate('_firstancestors', safe=True)
509 509 def _firstancestors(repo, subset, x):
510 510 # ``_firstancestors(set)``
511 511 # Like ``ancestors(set)`` but follows only the first parents.
512 512 return _ancestors(repo, subset, x, followfirst=True)
513 513
514 514 def ancestorspec(repo, subset, x, n):
515 515 """``set~n``
516 516 Changesets that are the Nth ancestor (first parents only) of a changeset
517 517 in set.
518 518 """
519 519 try:
520 520 n = int(n[1])
521 521 except (TypeError, ValueError):
522 522 raise error.ParseError(_("~ expects a number"))
523 523 ps = set()
524 524 cl = repo.changelog
525 525 for r in getset(repo, fullreposet(repo), x):
526 526 for i in range(n):
527 527 r = cl.parentrevs(r)[0]
528 528 ps.add(r)
529 529 return subset & ps
530 530
531 531 @predicate('author(string)', safe=True)
532 532 def author(repo, subset, x):
533 533 """Alias for ``user(string)``.
534 534 """
535 535 # i18n: "author" is a keyword
536 536 n = encoding.lower(getstring(x, _("author requires a string")))
537 537 kind, pattern, matcher = _substringmatcher(n)
538 538 return subset.filter(lambda x: matcher(encoding.lower(repo[x].user())),
539 539 condrepr=('<user %r>', n))
540 540
541 541 @predicate('bisect(string)', safe=True)
542 542 def bisect(repo, subset, x):
543 543 """Changesets marked in the specified bisect status:
544 544
545 545 - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip
546 546 - ``goods``, ``bads`` : csets topologically good/bad
547 547 - ``range`` : csets taking part in the bisection
548 548 - ``pruned`` : csets that are goods, bads or skipped
549 549 - ``untested`` : csets whose fate is yet unknown
550 550 - ``ignored`` : csets ignored due to DAG topology
551 551 - ``current`` : the cset currently being bisected
552 552 """
553 553 # i18n: "bisect" is a keyword
554 554 status = getstring(x, _("bisect requires a string")).lower()
555 555 state = set(hbisect.get(repo, status))
556 556 return subset & state
557 557
558 558 # Backward-compatibility
559 559 # - no help entry so that we do not advertise it any more
560 560 @predicate('bisected', safe=True)
561 561 def bisected(repo, subset, x):
562 562 return bisect(repo, subset, x)
563 563
564 564 @predicate('bookmark([name])', safe=True)
565 565 def bookmark(repo, subset, x):
566 566 """The named bookmark or all bookmarks.
567 567
568 568 If `name` starts with `re:`, the remainder of the name is treated as
569 569 a regular expression. To match a bookmark that actually starts with `re:`,
570 570 use the prefix `literal:`.
571 571 """
572 572 # i18n: "bookmark" is a keyword
573 573 args = getargs(x, 0, 1, _('bookmark takes one or no arguments'))
574 574 if args:
575 575 bm = getstring(args[0],
576 576 # i18n: "bookmark" is a keyword
577 577 _('the argument to bookmark must be a string'))
578 578 kind, pattern, matcher = util.stringmatcher(bm)
579 579 bms = set()
580 580 if kind == 'literal':
581 581 bmrev = repo._bookmarks.get(pattern, None)
582 582 if not bmrev:
583 583 raise error.RepoLookupError(_("bookmark '%s' does not exist")
584 584 % pattern)
585 585 bms.add(repo[bmrev].rev())
586 586 else:
587 587 matchrevs = set()
588 588 for name, bmrev in repo._bookmarks.iteritems():
589 589 if matcher(name):
590 590 matchrevs.add(bmrev)
591 591 if not matchrevs:
592 592 raise error.RepoLookupError(_("no bookmarks exist"
593 593 " that match '%s'") % pattern)
594 594 for bmrev in matchrevs:
595 595 bms.add(repo[bmrev].rev())
596 596 else:
597 597 bms = set([repo[r].rev()
598 598 for r in repo._bookmarks.values()])
599 599 bms -= set([node.nullrev])
600 600 return subset & bms
601 601
602 602 @predicate('branch(string or set)', safe=True)
603 603 def branch(repo, subset, x):
604 604 """
605 605 All changesets belonging to the given branch or the branches of the given
606 606 changesets.
607 607
608 608 If `string` starts with `re:`, the remainder of the name is treated as
609 609 a regular expression. To match a branch that actually starts with `re:`,
610 610 use the prefix `literal:`.
611 611 """
612 612 getbi = repo.revbranchcache().branchinfo
613 613
614 614 try:
615 615 b = getstring(x, '')
616 616 except error.ParseError:
617 617 # not a string, but another revspec, e.g. tip()
618 618 pass
619 619 else:
620 620 kind, pattern, matcher = util.stringmatcher(b)
621 621 if kind == 'literal':
622 622 # note: falls through to the revspec case if no branch with
623 623 # this name exists and pattern kind is not specified explicitly
624 624 if pattern in repo.branchmap():
625 625 return subset.filter(lambda r: matcher(getbi(r)[0]),
626 626 condrepr=('<branch %r>', b))
627 627 if b.startswith('literal:'):
628 628 raise error.RepoLookupError(_("branch '%s' does not exist")
629 629 % pattern)
630 630 else:
631 631 return subset.filter(lambda r: matcher(getbi(r)[0]),
632 632 condrepr=('<branch %r>', b))
633 633
634 634 s = getset(repo, fullreposet(repo), x)
635 635 b = set()
636 636 for r in s:
637 637 b.add(getbi(r)[0])
638 638 c = s.__contains__
639 639 return subset.filter(lambda r: c(r) or getbi(r)[0] in b,
640 640 condrepr=lambda: '<branch %r>' % sorted(b))
641 641
642 642 @predicate('bumped()', safe=True)
643 643 def bumped(repo, subset, x):
644 644 """Mutable changesets marked as successors of public changesets.
645 645
646 646 Only non-public and non-obsolete changesets can be `bumped`.
647 647 """
648 648 # i18n: "bumped" is a keyword
649 649 getargs(x, 0, 0, _("bumped takes no arguments"))
650 650 bumped = obsmod.getrevs(repo, 'bumped')
651 651 return subset & bumped
652 652
653 653 @predicate('bundle()', safe=True)
654 654 def bundle(repo, subset, x):
655 655 """Changesets in the bundle.
656 656
657 657 Bundle must be specified by the -R option."""
658 658
659 659 try:
660 660 bundlerevs = repo.changelog.bundlerevs
661 661 except AttributeError:
662 662 raise error.Abort(_("no bundle provided - specify with -R"))
663 663 return subset & bundlerevs
664 664
665 665 def checkstatus(repo, subset, pat, field):
666 666 hasset = matchmod.patkind(pat) == 'set'
667 667
668 668 mcache = [None]
669 669 def matches(x):
670 670 c = repo[x]
671 671 if not mcache[0] or hasset:
672 672 mcache[0] = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
673 673 m = mcache[0]
674 674 fname = None
675 675 if not m.anypats() and len(m.files()) == 1:
676 676 fname = m.files()[0]
677 677 if fname is not None:
678 678 if fname not in c.files():
679 679 return False
680 680 else:
681 681 for f in c.files():
682 682 if m(f):
683 683 break
684 684 else:
685 685 return False
686 686 files = repo.status(c.p1().node(), c.node())[field]
687 687 if fname is not None:
688 688 if fname in files:
689 689 return True
690 690 else:
691 691 for f in files:
692 692 if m(f):
693 693 return True
694 694
695 695 return subset.filter(matches, condrepr=('<status[%r] %r>', field, pat))
696 696
697 697 def _children(repo, narrow, parentset):
698 698 if not parentset:
699 699 return baseset()
700 700 cs = set()
701 701 pr = repo.changelog.parentrevs
702 702 minrev = parentset.min()
703 703 for r in narrow:
704 704 if r <= minrev:
705 705 continue
706 706 for p in pr(r):
707 707 if p in parentset:
708 708 cs.add(r)
709 709 # XXX using a set to feed the baseset is wrong. Sets are not ordered.
710 710 # This does not break because of other fullreposet misbehavior.
711 711 return baseset(cs)
712 712
713 713 @predicate('children(set)', safe=True)
714 714 def children(repo, subset, x):
715 715 """Child changesets of changesets in set.
716 716 """
717 717 s = getset(repo, fullreposet(repo), x)
718 718 cs = _children(repo, subset, s)
719 719 return subset & cs
720 720
721 721 @predicate('closed()', safe=True)
722 722 def closed(repo, subset, x):
723 723 """Changeset is closed.
724 724 """
725 725 # i18n: "closed" is a keyword
726 726 getargs(x, 0, 0, _("closed takes no arguments"))
727 727 return subset.filter(lambda r: repo[r].closesbranch(),
728 728 condrepr='<branch closed>')
729 729
730 730 @predicate('contains(pattern)')
731 731 def contains(repo, subset, x):
732 732 """The revision's manifest contains a file matching pattern (but might not
733 733 modify it). See :hg:`help patterns` for information about file patterns.
734 734
735 735 The pattern without explicit kind like ``glob:`` is expected to be
736 736 relative to the current directory and match against a file exactly
737 737 for efficiency.
738 738 """
739 739 # i18n: "contains" is a keyword
740 740 pat = getstring(x, _("contains requires a pattern"))
741 741
742 742 def matches(x):
743 743 if not matchmod.patkind(pat):
744 744 pats = pathutil.canonpath(repo.root, repo.getcwd(), pat)
745 745 if pats in repo[x]:
746 746 return True
747 747 else:
748 748 c = repo[x]
749 749 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
750 750 for f in c.manifest():
751 751 if m(f):
752 752 return True
753 753 return False
754 754
755 755 return subset.filter(matches, condrepr=('<contains %r>', pat))
756 756
757 757 @predicate('converted([id])', safe=True)
758 758 def converted(repo, subset, x):
759 759 """Changesets converted from the given identifier in the old repository if
760 760 present, or all converted changesets if no identifier is specified.
761 761 """
762 762
763 763 # There is exactly no chance of resolving the revision, so do a simple
764 764 # string compare and hope for the best
765 765
766 766 rev = None
767 767 # i18n: "converted" is a keyword
768 768 l = getargs(x, 0, 1, _('converted takes one or no arguments'))
769 769 if l:
770 770 # i18n: "converted" is a keyword
771 771 rev = getstring(l[0], _('converted requires a revision'))
772 772
773 773 def _matchvalue(r):
774 774 source = repo[r].extra().get('convert_revision', None)
775 775 return source is not None and (rev is None or source.startswith(rev))
776 776
777 777 return subset.filter(lambda r: _matchvalue(r),
778 778 condrepr=('<converted %r>', rev))
779 779
780 780 @predicate('date(interval)', safe=True)
781 781 def date(repo, subset, x):
782 782 """Changesets within the interval, see :hg:`help dates`.
783 783 """
784 784 # i18n: "date" is a keyword
785 785 ds = getstring(x, _("date requires a string"))
786 786 dm = util.matchdate(ds)
787 787 return subset.filter(lambda x: dm(repo[x].date()[0]),
788 788 condrepr=('<date %r>', ds))
789 789
790 790 @predicate('desc(string)', safe=True)
791 791 def desc(repo, subset, x):
792 792 """Search commit message for string. The match is case-insensitive.
793 793 """
794 794 # i18n: "desc" is a keyword
795 795 ds = encoding.lower(getstring(x, _("desc requires a string")))
796 796
797 797 def matches(x):
798 798 c = repo[x]
799 799 return ds in encoding.lower(c.description())
800 800
801 801 return subset.filter(matches, condrepr=('<desc %r>', ds))
802 802
803 803 def _descendants(repo, subset, x, followfirst=False):
804 804 roots = getset(repo, fullreposet(repo), x)
805 805 if not roots:
806 806 return baseset()
807 807 s = _revdescendants(repo, roots, followfirst)
808 808
809 809 # Both sets need to be ascending in order to lazily return the union
810 810 # in the correct order.
811 811 base = subset & roots
812 812 desc = subset & s
813 813 result = base + desc
814 814 if subset.isascending():
815 815 result.sort()
816 816 elif subset.isdescending():
817 817 result.sort(reverse=True)
818 818 else:
819 819 result = subset & result
820 820 return result
821 821
822 822 @predicate('descendants(set)', safe=True)
823 823 def descendants(repo, subset, x):
824 824 """Changesets which are descendants of changesets in set.
825 825 """
826 826 return _descendants(repo, subset, x)
827 827
828 828 @predicate('_firstdescendants', safe=True)
829 829 def _firstdescendants(repo, subset, x):
830 830 # ``_firstdescendants(set)``
831 831 # Like ``descendants(set)`` but follows only the first parents.
832 832 return _descendants(repo, subset, x, followfirst=True)
833 833
834 834 @predicate('destination([set])', safe=True)
835 835 def destination(repo, subset, x):
836 836 """Changesets that were created by a graft, transplant or rebase operation,
837 837 with the given revisions specified as the source. Omitting the optional set
838 838 is the same as passing all().
839 839 """
840 840 if x is not None:
841 841 sources = getset(repo, fullreposet(repo), x)
842 842 else:
843 843 sources = fullreposet(repo)
844 844
845 845 dests = set()
846 846
847 847 # subset contains all of the possible destinations that can be returned, so
848 848 # iterate over them and see if their source(s) were provided in the arg set.
849 849 # Even if the immediate src of r is not in the arg set, src's source (or
850 850 # further back) may be. Scanning back further than the immediate src allows
851 851 # transitive transplants and rebases to yield the same results as transitive
852 852 # grafts.
853 853 for r in subset:
854 854 src = _getrevsource(repo, r)
855 855 lineage = None
856 856
857 857 while src is not None:
858 858 if lineage is None:
859 859 lineage = list()
860 860
861 861 lineage.append(r)
862 862
863 863 # The visited lineage is a match if the current source is in the arg
864 864 # set. Since every candidate dest is visited by way of iterating
865 865 # subset, any dests further back in the lineage will be tested by a
866 866 # different iteration over subset. Likewise, if the src was already
867 867 # selected, the current lineage can be selected without going back
868 868 # further.
869 869 if src in sources or src in dests:
870 870 dests.update(lineage)
871 871 break
872 872
873 873 r = src
874 874 src = _getrevsource(repo, r)
875 875
876 876 return subset.filter(dests.__contains__,
877 877 condrepr=lambda: '<destination %r>' % sorted(dests))
878 878
879 879 @predicate('divergent()', safe=True)
880 880 def divergent(repo, subset, x):
881 881 """
882 882 Final successors of changesets with an alternative set of final successors.
883 883 """
884 884 # i18n: "divergent" is a keyword
885 885 getargs(x, 0, 0, _("divergent takes no arguments"))
886 886 divergent = obsmod.getrevs(repo, 'divergent')
887 887 return subset & divergent
888 888
889 889 @predicate('extinct()', safe=True)
890 890 def extinct(repo, subset, x):
891 891 """Obsolete changesets with obsolete descendants only.
892 892 """
893 893 # i18n: "extinct" is a keyword
894 894 getargs(x, 0, 0, _("extinct takes no arguments"))
895 895 extincts = obsmod.getrevs(repo, 'extinct')
896 896 return subset & extincts
897 897
898 898 @predicate('extra(label, [value])', safe=True)
899 899 def extra(repo, subset, x):
900 900 """Changesets with the given label in the extra metadata, with the given
901 901 optional value.
902 902
903 903 If `value` starts with `re:`, the remainder of the value is treated as
904 904 a regular expression. To match a value that actually starts with `re:`,
905 905 use the prefix `literal:`.
906 906 """
907 907 args = getargsdict(x, 'extra', 'label value')
908 908 if 'label' not in args:
909 909 # i18n: "extra" is a keyword
910 910 raise error.ParseError(_('extra takes at least 1 argument'))
911 911 # i18n: "extra" is a keyword
912 912 label = getstring(args['label'], _('first argument to extra must be '
913 913 'a string'))
914 914 value = None
915 915
916 916 if 'value' in args:
917 917 # i18n: "extra" is a keyword
918 918 value = getstring(args['value'], _('second argument to extra must be '
919 919 'a string'))
920 920 kind, value, matcher = util.stringmatcher(value)
921 921
922 922 def _matchvalue(r):
923 923 extra = repo[r].extra()
924 924 return label in extra and (value is None or matcher(extra[label]))
925 925
926 926 return subset.filter(lambda r: _matchvalue(r),
927 927 condrepr=('<extra[%r] %r>', label, value))
928 928
929 929 @predicate('filelog(pattern)', safe=True)
930 930 def filelog(repo, subset, x):
931 931 """Changesets connected to the specified filelog.
932 932
933 933 For performance reasons, visits only revisions mentioned in the file-level
934 934 filelog, rather than filtering through all changesets (much faster, but
935 935 doesn't include deletes or duplicate changes). For a slower, more accurate
936 936 result, use ``file()``.
937 937
938 938 The pattern without explicit kind like ``glob:`` is expected to be
939 939 relative to the current directory and match against a file exactly
940 940 for efficiency.
941 941
942 942 If some linkrev points to revisions filtered by the current repoview, we'll
943 943 work around it to return a non-filtered value.
944 944 """
945 945
946 946 # i18n: "filelog" is a keyword
947 947 pat = getstring(x, _("filelog requires a pattern"))
948 948 s = set()
949 949 cl = repo.changelog
950 950
951 951 if not matchmod.patkind(pat):
952 952 f = pathutil.canonpath(repo.root, repo.getcwd(), pat)
953 953 files = [f]
954 954 else:
955 955 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=repo[None])
956 956 files = (f for f in repo[None] if m(f))
957 957
958 958 for f in files:
959 959 fl = repo.file(f)
960 960 known = {}
961 961 scanpos = 0
962 962 for fr in list(fl):
963 963 fn = fl.node(fr)
964 964 if fn in known:
965 965 s.add(known[fn])
966 966 continue
967 967
968 968 lr = fl.linkrev(fr)
969 969 if lr in cl:
970 970 s.add(lr)
971 971 elif scanpos is not None:
972 972 # lowest matching changeset is filtered, scan further
973 973 # ahead in changelog
974 974 start = max(lr, scanpos) + 1
975 975 scanpos = None
976 976 for r in cl.revs(start):
977 977 # minimize parsing of non-matching entries
978 978 if f in cl.revision(r) and f in cl.readfiles(r):
979 979 try:
980 980 # try to use manifest delta fastpath
981 981 n = repo[r].filenode(f)
982 982 if n not in known:
983 983 if n == fn:
984 984 s.add(r)
985 985 scanpos = r
986 986 break
987 987 else:
988 988 known[n] = r
989 989 except error.ManifestLookupError:
990 990 # deletion in changelog
991 991 continue
992 992
993 993 return subset & s
994 994
995 995 @predicate('first(set, [n])', safe=True)
996 996 def first(repo, subset, x):
997 997 """An alias for limit().
998 998 """
999 999 return limit(repo, subset, x)
1000 1000
1001 1001 def _follow(repo, subset, x, name, followfirst=False):
1002 1002 l = getargs(x, 0, 1, _("%s takes no arguments or a pattern") % name)
1003 1003 c = repo['.']
1004 1004 if l:
1005 1005 x = getstring(l[0], _("%s expected a pattern") % name)
1006 1006 matcher = matchmod.match(repo.root, repo.getcwd(), [x],
1007 1007 ctx=repo[None], default='path')
1008 1008
1009 1009 files = c.manifest().walk(matcher)
1010 1010
1011 1011 s = set()
1012 1012 for fname in files:
1013 1013 fctx = c[fname]
1014 1014 s = s.union(set(c.rev() for c in fctx.ancestors(followfirst)))
1015 1015 # include the revision responsible for the most recent version
1016 1016 s.add(fctx.introrev())
1017 1017 else:
1018 1018 s = _revancestors(repo, baseset([c.rev()]), followfirst)
1019 1019
1020 1020 return subset & s
1021 1021
1022 1022 @predicate('follow([pattern])', safe=True)
1023 1023 def follow(repo, subset, x):
1024 1024 """
1025 1025 An alias for ``::.`` (ancestors of the working directory's first parent).
1026 1026 If pattern is specified, the histories of files matching given
1027 1027 pattern is followed, including copies.
1028 1028 """
1029 1029 return _follow(repo, subset, x, 'follow')
1030 1030
1031 1031 @predicate('_followfirst', safe=True)
1032 1032 def _followfirst(repo, subset, x):
1033 1033 # ``followfirst([pattern])``
1034 1034 # Like ``follow([pattern])`` but follows only the first parent of
1035 1035 # every revisions or files revisions.
1036 1036 return _follow(repo, subset, x, '_followfirst', followfirst=True)
1037 1037
1038 1038 @predicate('all()', safe=True)
1039 1039 def getall(repo, subset, x):
1040 1040 """All changesets, the same as ``0:tip``.
1041 1041 """
1042 1042 # i18n: "all" is a keyword
1043 1043 getargs(x, 0, 0, _("all takes no arguments"))
1044 1044 return subset & spanset(repo) # drop "null" if any
1045 1045
1046 1046 @predicate('grep(regex)')
1047 1047 def grep(repo, subset, x):
1048 1048 """Like ``keyword(string)`` but accepts a regex. Use ``grep(r'...')``
1049 1049 to ensure special escape characters are handled correctly. Unlike
1050 1050 ``keyword(string)``, the match is case-sensitive.
1051 1051 """
1052 1052 try:
1053 1053 # i18n: "grep" is a keyword
1054 1054 gr = re.compile(getstring(x, _("grep requires a string")))
1055 1055 except re.error as e:
1056 1056 raise error.ParseError(_('invalid match pattern: %s') % e)
1057 1057
1058 1058 def matches(x):
1059 1059 c = repo[x]
1060 1060 for e in c.files() + [c.user(), c.description()]:
1061 1061 if gr.search(e):
1062 1062 return True
1063 1063 return False
1064 1064
1065 1065 return subset.filter(matches, condrepr=('<grep %r>', gr.pattern))
1066 1066
1067 1067 @predicate('_matchfiles', safe=True)
1068 1068 def _matchfiles(repo, subset, x):
1069 1069 # _matchfiles takes a revset list of prefixed arguments:
1070 1070 #
1071 1071 # [p:foo, i:bar, x:baz]
1072 1072 #
1073 1073 # builds a match object from them and filters subset. Allowed
1074 1074 # prefixes are 'p:' for regular patterns, 'i:' for include
1075 1075 # patterns and 'x:' for exclude patterns. Use 'r:' prefix to pass
1076 1076 # a revision identifier, or the empty string to reference the
1077 1077 # working directory, from which the match object is
1078 1078 # initialized. Use 'd:' to set the default matching mode, default
1079 1079 # to 'glob'. At most one 'r:' and 'd:' argument can be passed.
1080 1080
1081 1081 l = getargs(x, 1, -1, "_matchfiles requires at least one argument")
1082 1082 pats, inc, exc = [], [], []
1083 1083 rev, default = None, None
1084 1084 for arg in l:
1085 1085 s = getstring(arg, "_matchfiles requires string arguments")
1086 1086 prefix, value = s[:2], s[2:]
1087 1087 if prefix == 'p:':
1088 1088 pats.append(value)
1089 1089 elif prefix == 'i:':
1090 1090 inc.append(value)
1091 1091 elif prefix == 'x:':
1092 1092 exc.append(value)
1093 1093 elif prefix == 'r:':
1094 1094 if rev is not None:
1095 1095 raise error.ParseError('_matchfiles expected at most one '
1096 1096 'revision')
1097 1097 if value != '': # empty means working directory; leave rev as None
1098 1098 rev = value
1099 1099 elif prefix == 'd:':
1100 1100 if default is not None:
1101 1101 raise error.ParseError('_matchfiles expected at most one '
1102 1102 'default mode')
1103 1103 default = value
1104 1104 else:
1105 1105 raise error.ParseError('invalid _matchfiles prefix: %s' % prefix)
1106 1106 if not default:
1107 1107 default = 'glob'
1108 1108
1109 1109 m = matchmod.match(repo.root, repo.getcwd(), pats, include=inc,
1110 1110 exclude=exc, ctx=repo[rev], default=default)
1111 1111
1112 1112 # This directly read the changelog data as creating changectx for all
1113 1113 # revisions is quite expensive.
1114 1114 getfiles = repo.changelog.readfiles
1115 1115 wdirrev = node.wdirrev
1116 1116 def matches(x):
1117 1117 if x == wdirrev:
1118 1118 files = repo[x].files()
1119 1119 else:
1120 1120 files = getfiles(x)
1121 1121 for f in files:
1122 1122 if m(f):
1123 1123 return True
1124 1124 return False
1125 1125
1126 1126 return subset.filter(matches,
1127 1127 condrepr=('<matchfiles patterns=%r, include=%r '
1128 1128 'exclude=%r, default=%r, rev=%r>',
1129 1129 pats, inc, exc, default, rev))
1130 1130
1131 1131 @predicate('file(pattern)', safe=True)
1132 1132 def hasfile(repo, subset, x):
1133 1133 """Changesets affecting files matched by pattern.
1134 1134
1135 1135 For a faster but less accurate result, consider using ``filelog()``
1136 1136 instead.
1137 1137
1138 1138 This predicate uses ``glob:`` as the default kind of pattern.
1139 1139 """
1140 1140 # i18n: "file" is a keyword
1141 1141 pat = getstring(x, _("file requires a pattern"))
1142 1142 return _matchfiles(repo, subset, ('string', 'p:' + pat))
1143 1143
1144 1144 @predicate('head()', safe=True)
1145 1145 def head(repo, subset, x):
1146 1146 """Changeset is a named branch head.
1147 1147 """
1148 1148 # i18n: "head" is a keyword
1149 1149 getargs(x, 0, 0, _("head takes no arguments"))
1150 1150 hs = set()
1151 1151 cl = repo.changelog
1152 1152 for b, ls in repo.branchmap().iteritems():
1153 1153 hs.update(cl.rev(h) for h in ls)
1154 1154 # XXX using a set to feed the baseset is wrong. Sets are not ordered.
1155 1155 # This does not break because of other fullreposet misbehavior.
1156 1156 # XXX We should combine with subset first: 'subset & baseset(...)'. This is
1157 1157 # necessary to ensure we preserve the order in subset.
1158 1158 return baseset(hs) & subset
1159 1159
1160 1160 @predicate('heads(set)', safe=True)
1161 1161 def heads(repo, subset, x):
1162 1162 """Members of set with no children in set.
1163 1163 """
1164 1164 s = getset(repo, subset, x)
1165 1165 ps = parents(repo, subset, x)
1166 1166 return s - ps
1167 1167
1168 1168 @predicate('hidden()', safe=True)
1169 1169 def hidden(repo, subset, x):
1170 1170 """Hidden changesets.
1171 1171 """
1172 1172 # i18n: "hidden" is a keyword
1173 1173 getargs(x, 0, 0, _("hidden takes no arguments"))
1174 1174 hiddenrevs = repoview.filterrevs(repo, 'visible')
1175 1175 return subset & hiddenrevs
1176 1176
1177 1177 @predicate('keyword(string)', safe=True)
1178 1178 def keyword(repo, subset, x):
1179 1179 """Search commit message, user name, and names of changed files for
1180 1180 string. The match is case-insensitive.
1181 1181 """
1182 1182 # i18n: "keyword" is a keyword
1183 1183 kw = encoding.lower(getstring(x, _("keyword requires a string")))
1184 1184
1185 1185 def matches(r):
1186 1186 c = repo[r]
1187 1187 return any(kw in encoding.lower(t)
1188 1188 for t in c.files() + [c.user(), c.description()])
1189 1189
1190 1190 return subset.filter(matches, condrepr=('<keyword %r>', kw))
1191 1191
1192 1192 @predicate('limit(set[, n[, offset]])', safe=True)
1193 1193 def limit(repo, subset, x):
1194 1194 """First n members of set, defaulting to 1, starting from offset.
1195 1195 """
1196 1196 args = getargsdict(x, 'limit', 'set n offset')
1197 1197 if 'set' not in args:
1198 1198 # i18n: "limit" is a keyword
1199 1199 raise error.ParseError(_("limit requires one to three arguments"))
1200 1200 try:
1201 1201 lim, ofs = 1, 0
1202 1202 if 'n' in args:
1203 1203 # i18n: "limit" is a keyword
1204 1204 lim = int(getstring(args['n'], _("limit requires a number")))
1205 1205 if 'offset' in args:
1206 1206 # i18n: "limit" is a keyword
1207 1207 ofs = int(getstring(args['offset'], _("limit requires a number")))
1208 1208 if ofs < 0:
1209 1209 raise error.ParseError(_("negative offset"))
1210 1210 except (TypeError, ValueError):
1211 1211 # i18n: "limit" is a keyword
1212 1212 raise error.ParseError(_("limit expects a number"))
1213 1213 os = getset(repo, fullreposet(repo), args['set'])
1214 1214 result = []
1215 1215 it = iter(os)
1216 1216 for x in xrange(ofs):
1217 1217 y = next(it, None)
1218 1218 if y is None:
1219 1219 break
1220 1220 for x in xrange(lim):
1221 1221 y = next(it, None)
1222 1222 if y is None:
1223 1223 break
1224 1224 elif y in subset:
1225 1225 result.append(y)
1226 1226 return baseset(result, datarepr=('<limit n=%d, offset=%d, %r, %r>',
1227 1227 lim, ofs, subset, os))
1228 1228
1229 1229 @predicate('last(set, [n])', safe=True)
1230 1230 def last(repo, subset, x):
1231 1231 """Last n members of set, defaulting to 1.
1232 1232 """
1233 1233 # i18n: "last" is a keyword
1234 1234 l = getargs(x, 1, 2, _("last requires one or two arguments"))
1235 1235 try:
1236 1236 lim = 1
1237 1237 if len(l) == 2:
1238 1238 # i18n: "last" is a keyword
1239 1239 lim = int(getstring(l[1], _("last requires a number")))
1240 1240 except (TypeError, ValueError):
1241 1241 # i18n: "last" is a keyword
1242 1242 raise error.ParseError(_("last expects a number"))
1243 1243 os = getset(repo, fullreposet(repo), l[0])
1244 1244 os.reverse()
1245 1245 result = []
1246 1246 it = iter(os)
1247 1247 for x in xrange(lim):
1248 1248 y = next(it, None)
1249 1249 if y is None:
1250 1250 break
1251 1251 elif y in subset:
1252 1252 result.append(y)
1253 1253 return baseset(result, datarepr=('<last n=%d, %r, %r>', lim, subset, os))
1254 1254
1255 1255 @predicate('max(set)', safe=True)
1256 1256 def maxrev(repo, subset, x):
1257 1257 """Changeset with highest revision number in set.
1258 1258 """
1259 1259 os = getset(repo, fullreposet(repo), x)
1260 1260 try:
1261 1261 m = os.max()
1262 1262 if m in subset:
1263 1263 return baseset([m], datarepr=('<max %r, %r>', subset, os))
1264 1264 except ValueError:
1265 1265 # os.max() throws a ValueError when the collection is empty.
1266 1266 # Same as python's max().
1267 1267 pass
1268 1268 return baseset(datarepr=('<max %r, %r>', subset, os))
1269 1269
1270 1270 @predicate('merge()', safe=True)
1271 1271 def merge(repo, subset, x):
1272 1272 """Changeset is a merge changeset.
1273 1273 """
1274 1274 # i18n: "merge" is a keyword
1275 1275 getargs(x, 0, 0, _("merge takes no arguments"))
1276 1276 cl = repo.changelog
1277 1277 return subset.filter(lambda r: cl.parentrevs(r)[1] != -1,
1278 1278 condrepr='<merge>')
1279 1279
1280 1280 @predicate('branchpoint()', safe=True)
1281 1281 def branchpoint(repo, subset, x):
1282 1282 """Changesets with more than one child.
1283 1283 """
1284 1284 # i18n: "branchpoint" is a keyword
1285 1285 getargs(x, 0, 0, _("branchpoint takes no arguments"))
1286 1286 cl = repo.changelog
1287 1287 if not subset:
1288 1288 return baseset()
1289 1289 # XXX this should be 'parentset.min()' assuming 'parentset' is a smartset
1290 1290 # (and if it is not, it should.)
1291 1291 baserev = min(subset)
1292 1292 parentscount = [0]*(len(repo) - baserev)
1293 1293 for r in cl.revs(start=baserev + 1):
1294 1294 for p in cl.parentrevs(r):
1295 1295 if p >= baserev:
1296 1296 parentscount[p - baserev] += 1
1297 1297 return subset.filter(lambda r: parentscount[r - baserev] > 1,
1298 1298 condrepr='<branchpoint>')
1299 1299
1300 1300 @predicate('min(set)', safe=True)
1301 1301 def minrev(repo, subset, x):
1302 1302 """Changeset with lowest revision number in set.
1303 1303 """
1304 1304 os = getset(repo, fullreposet(repo), x)
1305 1305 try:
1306 1306 m = os.min()
1307 1307 if m in subset:
1308 1308 return baseset([m], datarepr=('<min %r, %r>', subset, os))
1309 1309 except ValueError:
1310 1310 # os.min() throws a ValueError when the collection is empty.
1311 1311 # Same as python's min().
1312 1312 pass
1313 1313 return baseset(datarepr=('<min %r, %r>', subset, os))
1314 1314
1315 1315 @predicate('modifies(pattern)', safe=True)
1316 1316 def modifies(repo, subset, x):
1317 1317 """Changesets modifying files matched by pattern.
1318 1318
1319 1319 The pattern without explicit kind like ``glob:`` is expected to be
1320 1320 relative to the current directory and match against a file or a
1321 1321 directory.
1322 1322 """
1323 1323 # i18n: "modifies" is a keyword
1324 1324 pat = getstring(x, _("modifies requires a pattern"))
1325 1325 return checkstatus(repo, subset, pat, 0)
1326 1326
1327 1327 @predicate('named(namespace)')
1328 1328 def named(repo, subset, x):
1329 1329 """The changesets in a given namespace.
1330 1330
1331 1331 If `namespace` starts with `re:`, the remainder of the string is treated as
1332 1332 a regular expression. To match a namespace that actually starts with `re:`,
1333 1333 use the prefix `literal:`.
1334 1334 """
1335 1335 # i18n: "named" is a keyword
1336 1336 args = getargs(x, 1, 1, _('named requires a namespace argument'))
1337 1337
1338 1338 ns = getstring(args[0],
1339 1339 # i18n: "named" is a keyword
1340 1340 _('the argument to named must be a string'))
1341 1341 kind, pattern, matcher = util.stringmatcher(ns)
1342 1342 namespaces = set()
1343 1343 if kind == 'literal':
1344 1344 if pattern not in repo.names:
1345 1345 raise error.RepoLookupError(_("namespace '%s' does not exist")
1346 1346 % ns)
1347 1347 namespaces.add(repo.names[pattern])
1348 1348 else:
1349 1349 for name, ns in repo.names.iteritems():
1350 1350 if matcher(name):
1351 1351 namespaces.add(ns)
1352 1352 if not namespaces:
1353 1353 raise error.RepoLookupError(_("no namespace exists"
1354 1354 " that match '%s'") % pattern)
1355 1355
1356 1356 names = set()
1357 1357 for ns in namespaces:
1358 1358 for name in ns.listnames(repo):
1359 1359 if name not in ns.deprecated:
1360 1360 names.update(repo[n].rev() for n in ns.nodes(repo, name))
1361 1361
1362 1362 names -= set([node.nullrev])
1363 1363 return subset & names
1364 1364
1365 1365 @predicate('id(string)', safe=True)
1366 1366 def node_(repo, subset, x):
1367 1367 """Revision non-ambiguously specified by the given hex string prefix.
1368 1368 """
1369 1369 # i18n: "id" is a keyword
1370 1370 l = getargs(x, 1, 1, _("id requires one argument"))
1371 1371 # i18n: "id" is a keyword
1372 1372 n = getstring(l[0], _("id requires a string"))
1373 1373 if len(n) == 40:
1374 1374 try:
1375 1375 rn = repo.changelog.rev(node.bin(n))
1376 1376 except (LookupError, TypeError):
1377 1377 rn = None
1378 1378 else:
1379 1379 rn = None
1380 1380 pm = repo.changelog._partialmatch(n)
1381 1381 if pm is not None:
1382 1382 rn = repo.changelog.rev(pm)
1383 1383
1384 1384 if rn is None:
1385 1385 return baseset()
1386 1386 result = baseset([rn])
1387 1387 return result & subset
1388 1388
1389 1389 @predicate('obsolete()', safe=True)
1390 1390 def obsolete(repo, subset, x):
1391 1391 """Mutable changeset with a newer version."""
1392 1392 # i18n: "obsolete" is a keyword
1393 1393 getargs(x, 0, 0, _("obsolete takes no arguments"))
1394 1394 obsoletes = obsmod.getrevs(repo, 'obsolete')
1395 1395 return subset & obsoletes
1396 1396
1397 1397 @predicate('only(set, [set])', safe=True)
1398 1398 def only(repo, subset, x):
1399 1399 """Changesets that are ancestors of the first set that are not ancestors
1400 1400 of any other head in the repo. If a second set is specified, the result
1401 1401 is ancestors of the first set that are not ancestors of the second set
1402 1402 (i.e. ::<set1> - ::<set2>).
1403 1403 """
1404 1404 cl = repo.changelog
1405 1405 # i18n: "only" is a keyword
1406 1406 args = getargs(x, 1, 2, _('only takes one or two arguments'))
1407 1407 include = getset(repo, fullreposet(repo), args[0])
1408 1408 if len(args) == 1:
1409 1409 if not include:
1410 1410 return baseset()
1411 1411
1412 1412 descendants = set(_revdescendants(repo, include, False))
1413 1413 exclude = [rev for rev in cl.headrevs()
1414 1414 if not rev in descendants and not rev in include]
1415 1415 else:
1416 1416 exclude = getset(repo, fullreposet(repo), args[1])
1417 1417
1418 1418 results = set(cl.findmissingrevs(common=exclude, heads=include))
1419 1419 # XXX we should turn this into a baseset instead of a set, smartset may do
1420 1420 # some optimisations from the fact this is a baseset.
1421 1421 return subset & results
1422 1422
1423 1423 @predicate('origin([set])', safe=True)
1424 1424 def origin(repo, subset, x):
1425 1425 """
1426 1426 Changesets that were specified as a source for the grafts, transplants or
1427 1427 rebases that created the given revisions. Omitting the optional set is the
1428 1428 same as passing all(). If a changeset created by these operations is itself
1429 1429 specified as a source for one of these operations, only the source changeset
1430 1430 for the first operation is selected.
1431 1431 """
1432 1432 if x is not None:
1433 1433 dests = getset(repo, fullreposet(repo), x)
1434 1434 else:
1435 1435 dests = fullreposet(repo)
1436 1436
1437 1437 def _firstsrc(rev):
1438 1438 src = _getrevsource(repo, rev)
1439 1439 if src is None:
1440 1440 return None
1441 1441
1442 1442 while True:
1443 1443 prev = _getrevsource(repo, src)
1444 1444
1445 1445 if prev is None:
1446 1446 return src
1447 1447 src = prev
1448 1448
1449 1449 o = set([_firstsrc(r) for r in dests])
1450 1450 o -= set([None])
1451 1451 # XXX we should turn this into a baseset instead of a set, smartset may do
1452 1452 # some optimisations from the fact this is a baseset.
1453 1453 return subset & o
1454 1454
1455 1455 @predicate('outgoing([path])', safe=True)
1456 1456 def outgoing(repo, subset, x):
1457 1457 """Changesets not found in the specified destination repository, or the
1458 1458 default push location.
1459 1459 """
1460 1460 # Avoid cycles.
1461 1461 from . import (
1462 1462 discovery,
1463 1463 hg,
1464 1464 )
1465 1465 # i18n: "outgoing" is a keyword
1466 1466 l = getargs(x, 0, 1, _("outgoing takes one or no arguments"))
1467 1467 # i18n: "outgoing" is a keyword
1468 1468 dest = l and getstring(l[0], _("outgoing requires a repository path")) or ''
1469 1469 dest = repo.ui.expandpath(dest or 'default-push', dest or 'default')
1470 1470 dest, branches = hg.parseurl(dest)
1471 1471 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
1472 1472 if revs:
1473 1473 revs = [repo.lookup(rev) for rev in revs]
1474 1474 other = hg.peer(repo, {}, dest)
1475 1475 repo.ui.pushbuffer()
1476 1476 outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs)
1477 1477 repo.ui.popbuffer()
1478 1478 cl = repo.changelog
1479 1479 o = set([cl.rev(r) for r in outgoing.missing])
1480 1480 return subset & o
1481 1481
1482 1482 @predicate('p1([set])', safe=True)
1483 1483 def p1(repo, subset, x):
1484 1484 """First parent of changesets in set, or the working directory.
1485 1485 """
1486 1486 if x is None:
1487 1487 p = repo[x].p1().rev()
1488 1488 if p >= 0:
1489 1489 return subset & baseset([p])
1490 1490 return baseset()
1491 1491
1492 1492 ps = set()
1493 1493 cl = repo.changelog
1494 1494 for r in getset(repo, fullreposet(repo), x):
1495 1495 ps.add(cl.parentrevs(r)[0])
1496 1496 ps -= set([node.nullrev])
1497 1497 # XXX we should turn this into a baseset instead of a set, smartset may do
1498 1498 # some optimisations from the fact this is a baseset.
1499 1499 return subset & ps
1500 1500
1501 1501 @predicate('p2([set])', safe=True)
1502 1502 def p2(repo, subset, x):
1503 1503 """Second parent of changesets in set, or the working directory.
1504 1504 """
1505 1505 if x is None:
1506 1506 ps = repo[x].parents()
1507 1507 try:
1508 1508 p = ps[1].rev()
1509 1509 if p >= 0:
1510 1510 return subset & baseset([p])
1511 1511 return baseset()
1512 1512 except IndexError:
1513 1513 return baseset()
1514 1514
1515 1515 ps = set()
1516 1516 cl = repo.changelog
1517 1517 for r in getset(repo, fullreposet(repo), x):
1518 1518 ps.add(cl.parentrevs(r)[1])
1519 1519 ps -= set([node.nullrev])
1520 1520 # XXX we should turn this into a baseset instead of a set, smartset may do
1521 1521 # some optimisations from the fact this is a baseset.
1522 1522 return subset & ps
1523 1523
1524 1524 @predicate('parents([set])', safe=True)
1525 1525 def parents(repo, subset, x):
1526 1526 """
1527 1527 The set of all parents for all changesets in set, or the working directory.
1528 1528 """
1529 1529 if x is None:
1530 1530 ps = set(p.rev() for p in repo[x].parents())
1531 1531 else:
1532 1532 ps = set()
1533 1533 cl = repo.changelog
1534 1534 up = ps.update
1535 1535 parentrevs = cl.parentrevs
1536 1536 for r in getset(repo, fullreposet(repo), x):
1537 1537 if r == node.wdirrev:
1538 1538 up(p.rev() for p in repo[r].parents())
1539 1539 else:
1540 1540 up(parentrevs(r))
1541 1541 ps -= set([node.nullrev])
1542 1542 return subset & ps
1543 1543
1544 1544 def _phase(repo, subset, target):
1545 1545 """helper to select all rev in phase <target>"""
1546 1546 repo._phasecache.loadphaserevs(repo) # ensure phase's sets are loaded
1547 1547 if repo._phasecache._phasesets:
1548 1548 s = repo._phasecache._phasesets[target] - repo.changelog.filteredrevs
1549 1549 s = baseset(s)
1550 1550 s.sort() # set are non ordered, so we enforce ascending
1551 1551 return subset & s
1552 1552 else:
1553 1553 phase = repo._phasecache.phase
1554 1554 condition = lambda r: phase(repo, r) == target
1555 1555 return subset.filter(condition, condrepr=('<phase %r>', target),
1556 1556 cache=False)
1557 1557
1558 1558 @predicate('draft()', safe=True)
1559 1559 def draft(repo, subset, x):
1560 1560 """Changeset in draft phase."""
1561 1561 # i18n: "draft" is a keyword
1562 1562 getargs(x, 0, 0, _("draft takes no arguments"))
1563 1563 target = phases.draft
1564 1564 return _phase(repo, subset, target)
1565 1565
1566 1566 @predicate('secret()', safe=True)
1567 1567 def secret(repo, subset, x):
1568 1568 """Changeset in secret phase."""
1569 1569 # i18n: "secret" is a keyword
1570 1570 getargs(x, 0, 0, _("secret takes no arguments"))
1571 1571 target = phases.secret
1572 1572 return _phase(repo, subset, target)
1573 1573
1574 1574 def parentspec(repo, subset, x, n):
1575 1575 """``set^0``
1576 1576 The set.
1577 1577 ``set^1`` (or ``set^``), ``set^2``
1578 1578 First or second parent, respectively, of all changesets in set.
1579 1579 """
1580 1580 try:
1581 1581 n = int(n[1])
1582 1582 if n not in (0, 1, 2):
1583 1583 raise ValueError
1584 1584 except (TypeError, ValueError):
1585 1585 raise error.ParseError(_("^ expects a number 0, 1, or 2"))
1586 1586 ps = set()
1587 1587 cl = repo.changelog
1588 1588 for r in getset(repo, fullreposet(repo), x):
1589 1589 if n == 0:
1590 1590 ps.add(r)
1591 1591 elif n == 1:
1592 1592 ps.add(cl.parentrevs(r)[0])
1593 1593 elif n == 2:
1594 1594 parents = cl.parentrevs(r)
1595 1595 if len(parents) > 1:
1596 1596 ps.add(parents[1])
1597 1597 return subset & ps
1598 1598
1599 1599 @predicate('present(set)', safe=True)
1600 1600 def present(repo, subset, x):
1601 1601 """An empty set, if any revision in set isn't found; otherwise,
1602 1602 all revisions in set.
1603 1603
1604 1604 If any of specified revisions is not present in the local repository,
1605 1605 the query is normally aborted. But this predicate allows the query
1606 1606 to continue even in such cases.
1607 1607 """
1608 1608 try:
1609 1609 return getset(repo, subset, x)
1610 1610 except error.RepoLookupError:
1611 1611 return baseset()
1612 1612
1613 1613 # for internal use
1614 1614 @predicate('_notpublic', safe=True)
1615 1615 def _notpublic(repo, subset, x):
1616 1616 getargs(x, 0, 0, "_notpublic takes no arguments")
1617 1617 repo._phasecache.loadphaserevs(repo) # ensure phase's sets are loaded
1618 1618 if repo._phasecache._phasesets:
1619 1619 s = set()
1620 1620 for u in repo._phasecache._phasesets[1:]:
1621 1621 s.update(u)
1622 1622 s = baseset(s - repo.changelog.filteredrevs)
1623 1623 s.sort()
1624 1624 return subset & s
1625 1625 else:
1626 1626 phase = repo._phasecache.phase
1627 1627 target = phases.public
1628 1628 condition = lambda r: phase(repo, r) != target
1629 1629 return subset.filter(condition, condrepr=('<phase %r>', target),
1630 1630 cache=False)
1631 1631
1632 1632 @predicate('public()', safe=True)
1633 1633 def public(repo, subset, x):
1634 1634 """Changeset in public phase."""
1635 1635 # i18n: "public" is a keyword
1636 1636 getargs(x, 0, 0, _("public takes no arguments"))
1637 1637 phase = repo._phasecache.phase
1638 1638 target = phases.public
1639 1639 condition = lambda r: phase(repo, r) == target
1640 1640 return subset.filter(condition, condrepr=('<phase %r>', target),
1641 1641 cache=False)
1642 1642
1643 1643 @predicate('remote([id [,path]])', safe=True)
1644 1644 def remote(repo, subset, x):
1645 1645 """Local revision that corresponds to the given identifier in a
1646 1646 remote repository, if present. Here, the '.' identifier is a
1647 1647 synonym for the current local branch.
1648 1648 """
1649 1649
1650 1650 from . import hg # avoid start-up nasties
1651 1651 # i18n: "remote" is a keyword
1652 1652 l = getargs(x, 0, 2, _("remote takes zero, one, or two arguments"))
1653 1653
1654 1654 q = '.'
1655 1655 if len(l) > 0:
1656 1656 # i18n: "remote" is a keyword
1657 1657 q = getstring(l[0], _("remote requires a string id"))
1658 1658 if q == '.':
1659 1659 q = repo['.'].branch()
1660 1660
1661 1661 dest = ''
1662 1662 if len(l) > 1:
1663 1663 # i18n: "remote" is a keyword
1664 1664 dest = getstring(l[1], _("remote requires a repository path"))
1665 1665 dest = repo.ui.expandpath(dest or 'default')
1666 1666 dest, branches = hg.parseurl(dest)
1667 1667 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
1668 1668 if revs:
1669 1669 revs = [repo.lookup(rev) for rev in revs]
1670 1670 other = hg.peer(repo, {}, dest)
1671 1671 n = other.lookup(q)
1672 1672 if n in repo:
1673 1673 r = repo[n].rev()
1674 1674 if r in subset:
1675 1675 return baseset([r])
1676 1676 return baseset()
1677 1677
1678 1678 @predicate('removes(pattern)', safe=True)
1679 1679 def removes(repo, subset, x):
1680 1680 """Changesets which remove files matching pattern.
1681 1681
1682 1682 The pattern without explicit kind like ``glob:`` is expected to be
1683 1683 relative to the current directory and match against a file or a
1684 1684 directory.
1685 1685 """
1686 1686 # i18n: "removes" is a keyword
1687 1687 pat = getstring(x, _("removes requires a pattern"))
1688 1688 return checkstatus(repo, subset, pat, 2)
1689 1689
1690 1690 @predicate('rev(number)', safe=True)
1691 1691 def rev(repo, subset, x):
1692 1692 """Revision with the given numeric identifier.
1693 1693 """
1694 1694 # i18n: "rev" is a keyword
1695 1695 l = getargs(x, 1, 1, _("rev requires one argument"))
1696 1696 try:
1697 1697 # i18n: "rev" is a keyword
1698 1698 l = int(getstring(l[0], _("rev requires a number")))
1699 1699 except (TypeError, ValueError):
1700 1700 # i18n: "rev" is a keyword
1701 1701 raise error.ParseError(_("rev expects a number"))
1702 1702 if l not in repo.changelog and l != node.nullrev:
1703 1703 return baseset()
1704 1704 return subset & baseset([l])
1705 1705
1706 1706 @predicate('matching(revision [, field])', safe=True)
1707 1707 def matching(repo, subset, x):
1708 1708 """Changesets in which a given set of fields match the set of fields in the
1709 1709 selected revision or set.
1710 1710
1711 1711 To match more than one field pass the list of fields to match separated
1712 1712 by spaces (e.g. ``author description``).
1713 1713
1714 1714 Valid fields are most regular revision fields and some special fields.
1715 1715
1716 1716 Regular revision fields are ``description``, ``author``, ``branch``,
1717 1717 ``date``, ``files``, ``phase``, ``parents``, ``substate``, ``user``
1718 1718 and ``diff``.
1719 1719 Note that ``author`` and ``user`` are synonyms. ``diff`` refers to the
1720 1720 contents of the revision. Two revisions matching their ``diff`` will
1721 1721 also match their ``files``.
1722 1722
1723 1723 Special fields are ``summary`` and ``metadata``:
1724 1724 ``summary`` matches the first line of the description.
1725 1725 ``metadata`` is equivalent to matching ``description user date``
1726 1726 (i.e. it matches the main metadata fields).
1727 1727
1728 1728 ``metadata`` is the default field which is used when no fields are
1729 1729 specified. You can match more than one field at a time.
1730 1730 """
1731 1731 # i18n: "matching" is a keyword
1732 1732 l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments"))
1733 1733
1734 1734 revs = getset(repo, fullreposet(repo), l[0])
1735 1735
1736 1736 fieldlist = ['metadata']
1737 1737 if len(l) > 1:
1738 1738 fieldlist = getstring(l[1],
1739 1739 # i18n: "matching" is a keyword
1740 1740 _("matching requires a string "
1741 1741 "as its second argument")).split()
1742 1742
1743 1743 # Make sure that there are no repeated fields,
1744 1744 # expand the 'special' 'metadata' field type
1745 1745 # and check the 'files' whenever we check the 'diff'
1746 1746 fields = []
1747 1747 for field in fieldlist:
1748 1748 if field == 'metadata':
1749 1749 fields += ['user', 'description', 'date']
1750 1750 elif field == 'diff':
1751 1751 # a revision matching the diff must also match the files
1752 1752 # since matching the diff is very costly, make sure to
1753 1753 # also match the files first
1754 1754 fields += ['files', 'diff']
1755 1755 else:
1756 1756 if field == 'author':
1757 1757 field = 'user'
1758 1758 fields.append(field)
1759 1759 fields = set(fields)
1760 1760 if 'summary' in fields and 'description' in fields:
1761 1761 # If a revision matches its description it also matches its summary
1762 1762 fields.discard('summary')
1763 1763
1764 1764 # We may want to match more than one field
1765 1765 # Not all fields take the same amount of time to be matched
1766 1766 # Sort the selected fields in order of increasing matching cost
1767 1767 fieldorder = ['phase', 'parents', 'user', 'date', 'branch', 'summary',
1768 1768 'files', 'description', 'substate', 'diff']
1769 1769 def fieldkeyfunc(f):
1770 1770 try:
1771 1771 return fieldorder.index(f)
1772 1772 except ValueError:
1773 1773 # assume an unknown field is very costly
1774 1774 return len(fieldorder)
1775 1775 fields = list(fields)
1776 1776 fields.sort(key=fieldkeyfunc)
1777 1777
1778 1778 # Each field will be matched with its own "getfield" function
1779 1779 # which will be added to the getfieldfuncs array of functions
1780 1780 getfieldfuncs = []
1781 1781 _funcs = {
1782 1782 'user': lambda r: repo[r].user(),
1783 1783 'branch': lambda r: repo[r].branch(),
1784 1784 'date': lambda r: repo[r].date(),
1785 1785 'description': lambda r: repo[r].description(),
1786 1786 'files': lambda r: repo[r].files(),
1787 1787 'parents': lambda r: repo[r].parents(),
1788 1788 'phase': lambda r: repo[r].phase(),
1789 1789 'substate': lambda r: repo[r].substate,
1790 1790 'summary': lambda r: repo[r].description().splitlines()[0],
1791 1791 'diff': lambda r: list(repo[r].diff(git=True),)
1792 1792 }
1793 1793 for info in fields:
1794 1794 getfield = _funcs.get(info, None)
1795 1795 if getfield is None:
1796 1796 raise error.ParseError(
1797 1797 # i18n: "matching" is a keyword
1798 1798 _("unexpected field name passed to matching: %s") % info)
1799 1799 getfieldfuncs.append(getfield)
1800 1800 # convert the getfield array of functions into a "getinfo" function
1801 1801 # which returns an array of field values (or a single value if there
1802 1802 # is only one field to match)
1803 1803 getinfo = lambda r: [f(r) for f in getfieldfuncs]
1804 1804
1805 1805 def matches(x):
1806 1806 for rev in revs:
1807 1807 target = getinfo(rev)
1808 1808 match = True
1809 1809 for n, f in enumerate(getfieldfuncs):
1810 1810 if target[n] != f(x):
1811 1811 match = False
1812 1812 if match:
1813 1813 return True
1814 1814 return False
1815 1815
1816 1816 return subset.filter(matches, condrepr=('<matching%r %r>', fields, revs))
1817 1817
1818 1818 @predicate('reverse(set)', safe=True)
1819 1819 def reverse(repo, subset, x):
1820 1820 """Reverse order of set.
1821 1821 """
1822 1822 l = getset(repo, subset, x)
1823 1823 l.reverse()
1824 1824 return l
1825 1825
1826 1826 @predicate('roots(set)', safe=True)
1827 1827 def roots(repo, subset, x):
1828 1828 """Changesets in set with no parent changeset in set.
1829 1829 """
1830 1830 s = getset(repo, fullreposet(repo), x)
1831 1831 parents = repo.changelog.parentrevs
1832 1832 def filter(r):
1833 1833 for p in parents(r):
1834 1834 if 0 <= p and p in s:
1835 1835 return False
1836 1836 return True
1837 1837 return subset & s.filter(filter, condrepr='<roots>')
1838 1838
1839 1839 @predicate('sort(set[, [-]key...])', safe=True)
1840 1840 def sort(repo, subset, x):
1841 1841 """Sort set by keys. The default sort order is ascending, specify a key
1842 1842 as ``-key`` to sort in descending order.
1843 1843
1844 1844 The keys can be:
1845 1845
1846 1846 - ``rev`` for the revision number,
1847 1847 - ``branch`` for the branch name,
1848 1848 - ``desc`` for the commit message (description),
1849 1849 - ``user`` for user name (``author`` can be used as an alias),
1850 1850 - ``date`` for the commit date
1851 1851 """
1852 1852 # i18n: "sort" is a keyword
1853 1853 l = getargs(x, 1, 2, _("sort requires one or two arguments"))
1854 1854 keys = "rev"
1855 1855 if len(l) == 2:
1856 1856 # i18n: "sort" is a keyword
1857 1857 keys = getstring(l[1], _("sort spec must be a string"))
1858 1858
1859 1859 s = l[0]
1860 1860 keys = keys.split()
1861 1861 revs = getset(repo, subset, s)
1862 1862 if keys == ["rev"]:
1863 1863 revs.sort()
1864 1864 return revs
1865 1865 elif keys == ["-rev"]:
1866 1866 revs.sort(reverse=True)
1867 1867 return revs
1868 1868 # sort() is guaranteed to be stable
1869 1869 ctxs = [repo[r] for r in revs]
1870 1870 for k in reversed(keys):
1871 1871 if k == 'rev':
1872 1872 ctxs.sort(key=lambda c: c.rev())
1873 1873 elif k == '-rev':
1874 1874 ctxs.sort(key=lambda c: c.rev(), reverse=True)
1875 1875 elif k == 'branch':
1876 1876 ctxs.sort(key=lambda c: c.branch())
1877 1877 elif k == '-branch':
1878 1878 ctxs.sort(key=lambda c: c.branch(), reverse=True)
1879 1879 elif k == 'desc':
1880 1880 ctxs.sort(key=lambda c: c.description())
1881 1881 elif k == '-desc':
1882 1882 ctxs.sort(key=lambda c: c.description(), reverse=True)
1883 1883 elif k in 'user author':
1884 1884 ctxs.sort(key=lambda c: c.user())
1885 1885 elif k in '-user -author':
1886 1886 ctxs.sort(key=lambda c: c.user(), reverse=True)
1887 1887 elif k == 'date':
1888 1888 ctxs.sort(key=lambda c: c.date()[0])
1889 1889 elif k == '-date':
1890 1890 ctxs.sort(key=lambda c: c.date()[0], reverse=True)
1891 1891 else:
1892 1892 raise error.ParseError(_("unknown sort key %r") % k)
1893 1893 return baseset([c.rev() for c in ctxs])
1894 1894
1895 1895 @predicate('subrepo([pattern])')
1896 1896 def subrepo(repo, subset, x):
1897 1897 """Changesets that add, modify or remove the given subrepo. If no subrepo
1898 1898 pattern is named, any subrepo changes are returned.
1899 1899 """
1900 1900 # i18n: "subrepo" is a keyword
1901 1901 args = getargs(x, 0, 1, _('subrepo takes at most one argument'))
1902 1902 pat = None
1903 1903 if len(args) != 0:
1904 1904 pat = getstring(args[0], _("subrepo requires a pattern"))
1905 1905
1906 1906 m = matchmod.exact(repo.root, repo.root, ['.hgsubstate'])
1907 1907
1908 1908 def submatches(names):
1909 1909 k, p, m = util.stringmatcher(pat)
1910 1910 for name in names:
1911 1911 if m(name):
1912 1912 yield name
1913 1913
1914 1914 def matches(x):
1915 1915 c = repo[x]
1916 1916 s = repo.status(c.p1().node(), c.node(), match=m)
1917 1917
1918 1918 if pat is None:
1919 1919 return s.added or s.modified or s.removed
1920 1920
1921 1921 if s.added:
1922 1922 return any(submatches(c.substate.keys()))
1923 1923
1924 1924 if s.modified:
1925 1925 subs = set(c.p1().substate.keys())
1926 1926 subs.update(c.substate.keys())
1927 1927
1928 1928 for path in submatches(subs):
1929 1929 if c.p1().substate.get(path) != c.substate.get(path):
1930 1930 return True
1931 1931
1932 1932 if s.removed:
1933 1933 return any(submatches(c.p1().substate.keys()))
1934 1934
1935 1935 return False
1936 1936
1937 1937 return subset.filter(matches, condrepr=('<subrepo %r>', pat))
1938 1938
1939 1939 def _substringmatcher(pattern):
1940 1940 kind, pattern, matcher = util.stringmatcher(pattern)
1941 1941 if kind == 'literal':
1942 1942 matcher = lambda s: pattern in s
1943 1943 return kind, pattern, matcher
1944 1944
1945 1945 @predicate('tag([name])', safe=True)
1946 1946 def tag(repo, subset, x):
1947 1947 """The specified tag by name, or all tagged revisions if no name is given.
1948 1948
1949 1949 If `name` starts with `re:`, the remainder of the name is treated as
1950 1950 a regular expression. To match a tag that actually starts with `re:`,
1951 1951 use the prefix `literal:`.
1952 1952 """
1953 1953 # i18n: "tag" is a keyword
1954 1954 args = getargs(x, 0, 1, _("tag takes one or no arguments"))
1955 1955 cl = repo.changelog
1956 1956 if args:
1957 1957 pattern = getstring(args[0],
1958 1958 # i18n: "tag" is a keyword
1959 1959 _('the argument to tag must be a string'))
1960 1960 kind, pattern, matcher = util.stringmatcher(pattern)
1961 1961 if kind == 'literal':
1962 1962 # avoid resolving all tags
1963 1963 tn = repo._tagscache.tags.get(pattern, None)
1964 1964 if tn is None:
1965 1965 raise error.RepoLookupError(_("tag '%s' does not exist")
1966 1966 % pattern)
1967 1967 s = set([repo[tn].rev()])
1968 1968 else:
1969 1969 s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)])
1970 1970 else:
1971 1971 s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
1972 1972 return subset & s
1973 1973
1974 1974 @predicate('tagged', safe=True)
1975 1975 def tagged(repo, subset, x):
1976 1976 return tag(repo, subset, x)
1977 1977
1978 1978 @predicate('unstable()', safe=True)
1979 1979 def unstable(repo, subset, x):
1980 1980 """Non-obsolete changesets with obsolete ancestors.
1981 1981 """
1982 1982 # i18n: "unstable" is a keyword
1983 1983 getargs(x, 0, 0, _("unstable takes no arguments"))
1984 1984 unstables = obsmod.getrevs(repo, 'unstable')
1985 1985 return subset & unstables
1986 1986
1987 1987
1988 1988 @predicate('user(string)', safe=True)
1989 1989 def user(repo, subset, x):
1990 1990 """User name contains string. The match is case-insensitive.
1991 1991
1992 1992 If `string` starts with `re:`, the remainder of the string is treated as
1993 1993 a regular expression. To match a user that actually contains `re:`, use
1994 1994 the prefix `literal:`.
1995 1995 """
1996 1996 return author(repo, subset, x)
1997 1997
1998 1998 # experimental
1999 1999 @predicate('wdir', safe=True)
2000 2000 def wdir(repo, subset, x):
2001 2001 # i18n: "wdir" is a keyword
2002 2002 getargs(x, 0, 0, _("wdir takes no arguments"))
2003 2003 if node.wdirrev in subset or isinstance(subset, fullreposet):
2004 2004 return baseset([node.wdirrev])
2005 2005 return baseset()
2006 2006
2007 2007 # for internal use
2008 2008 @predicate('_list', safe=True)
2009 2009 def _list(repo, subset, x):
2010 2010 s = getstring(x, "internal error")
2011 2011 if not s:
2012 2012 return baseset()
2013 2013 # remove duplicates here. it's difficult for caller to deduplicate sets
2014 2014 # because different symbols can point to the same rev.
2015 2015 cl = repo.changelog
2016 2016 ls = []
2017 2017 seen = set()
2018 2018 for t in s.split('\0'):
2019 2019 try:
2020 2020 # fast path for integer revision
2021 2021 r = int(t)
2022 2022 if str(r) != t or r not in cl:
2023 2023 raise ValueError
2024 2024 revs = [r]
2025 2025 except ValueError:
2026 2026 revs = stringset(repo, subset, t)
2027 2027
2028 2028 for r in revs:
2029 2029 if r in seen:
2030 2030 continue
2031 2031 if (r in subset
2032 2032 or r == node.nullrev and isinstance(subset, fullreposet)):
2033 2033 ls.append(r)
2034 2034 seen.add(r)
2035 2035 return baseset(ls)
2036 2036
2037 2037 # for internal use
2038 2038 @predicate('_intlist', safe=True)
2039 2039 def _intlist(repo, subset, x):
2040 2040 s = getstring(x, "internal error")
2041 2041 if not s:
2042 2042 return baseset()
2043 2043 ls = [int(r) for r in s.split('\0')]
2044 2044 s = subset
2045 2045 return baseset([r for r in ls if r in s])
2046 2046
2047 2047 # for internal use
2048 2048 @predicate('_hexlist', safe=True)
2049 2049 def _hexlist(repo, subset, x):
2050 2050 s = getstring(x, "internal error")
2051 2051 if not s:
2052 2052 return baseset()
2053 2053 cl = repo.changelog
2054 2054 ls = [cl.rev(node.bin(r)) for r in s.split('\0')]
2055 2055 s = subset
2056 2056 return baseset([r for r in ls if r in s])
2057 2057
2058 2058 methods = {
2059 2059 "range": rangeset,
2060 2060 "dagrange": dagrange,
2061 2061 "string": stringset,
2062 2062 "symbol": stringset,
2063 2063 "and": andset,
2064 2064 "or": orset,
2065 2065 "not": notset,
2066 2066 "difference": differenceset,
2067 2067 "list": listset,
2068 2068 "keyvalue": keyvaluepair,
2069 2069 "func": func,
2070 2070 "ancestor": ancestorspec,
2071 2071 "parent": parentspec,
2072 2072 "parentpost": p1,
2073 2073 }
2074 2074
2075 2075 def _matchonly(revs, bases):
2076 2076 """
2077 2077 >>> f = lambda *args: _matchonly(*map(parse, args))
2078 2078 >>> f('ancestors(A)', 'not ancestors(B)')
2079 2079 ('list', ('symbol', 'A'), ('symbol', 'B'))
2080 2080 """
2081 2081 if (revs is not None
2082 2082 and revs[0] == 'func'
2083 2083 and getstring(revs[1], _('not a symbol')) == 'ancestors'
2084 2084 and bases is not None
2085 2085 and bases[0] == 'not'
2086 2086 and bases[1][0] == 'func'
2087 2087 and getstring(bases[1][1], _('not a symbol')) == 'ancestors'):
2088 2088 return ('list', revs[2], bases[1][2])
2089 2089
2090 def optimize(x, small):
2090 def _optimize(x, small):
2091 2091 if x is None:
2092 2092 return 0, x
2093 2093
2094 2094 smallbonus = 1
2095 2095 if small:
2096 2096 smallbonus = .5
2097 2097
2098 2098 op = x[0]
2099 2099 if op == 'minus':
2100 return optimize(('and', x[1], ('not', x[2])), small)
2100 return _optimize(('and', x[1], ('not', x[2])), small)
2101 2101 elif op == 'only':
2102 2102 t = ('func', ('symbol', 'only'), ('list', x[1], x[2]))
2103 return optimize(t, small)
2103 return _optimize(t, small)
2104 2104 elif op == 'onlypost':
2105 return optimize(('func', ('symbol', 'only'), x[1]), small)
2105 return _optimize(('func', ('symbol', 'only'), x[1]), small)
2106 2106 elif op == 'dagrangepre':
2107 return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
2107 return _optimize(('func', ('symbol', 'ancestors'), x[1]), small)
2108 2108 elif op == 'dagrangepost':
2109 return optimize(('func', ('symbol', 'descendants'), x[1]), small)
2109 return _optimize(('func', ('symbol', 'descendants'), x[1]), small)
2110 2110 elif op == 'rangeall':
2111 return optimize(('range', ('string', '0'), ('string', 'tip')), small)
2111 return _optimize(('range', ('string', '0'), ('string', 'tip')), small)
2112 2112 elif op == 'rangepre':
2113 return optimize(('range', ('string', '0'), x[1]), small)
2113 return _optimize(('range', ('string', '0'), x[1]), small)
2114 2114 elif op == 'rangepost':
2115 return optimize(('range', x[1], ('string', 'tip')), small)
2115 return _optimize(('range', x[1], ('string', 'tip')), small)
2116 2116 elif op == 'negate':
2117 2117 s = getstring(x[1], _("can't negate that"))
2118 return optimize(('string', '-' + s), small)
2118 return _optimize(('string', '-' + s), small)
2119 2119 elif op in 'string symbol negate':
2120 2120 return smallbonus, x # single revisions are small
2121 2121 elif op == 'and':
2122 wa, ta = optimize(x[1], True)
2123 wb, tb = optimize(x[2], True)
2122 wa, ta = _optimize(x[1], True)
2123 wb, tb = _optimize(x[2], True)
2124 2124 w = min(wa, wb)
2125 2125
2126 2126 # (::x and not ::y)/(not ::y and ::x) have a fast path
2127 2127 tm = _matchonly(ta, tb) or _matchonly(tb, ta)
2128 2128 if tm:
2129 2129 return w, ('func', ('symbol', 'only'), tm)
2130 2130
2131 2131 if tb is not None and tb[0] == 'not':
2132 2132 return wa, ('difference', ta, tb[1])
2133 2133
2134 2134 if wa > wb:
2135 2135 return w, (op, tb, ta)
2136 2136 return w, (op, ta, tb)
2137 2137 elif op == 'or':
2138 2138 # fast path for machine-generated expression, that is likely to have
2139 2139 # lots of trivial revisions: 'a + b + c()' to '_list(a b) + c()'
2140 2140 ws, ts, ss = [], [], []
2141 2141 def flushss():
2142 2142 if not ss:
2143 2143 return
2144 2144 if len(ss) == 1:
2145 2145 w, t = ss[0]
2146 2146 else:
2147 2147 s = '\0'.join(t[1] for w, t in ss)
2148 2148 y = ('func', ('symbol', '_list'), ('string', s))
2149 w, t = optimize(y, False)
2149 w, t = _optimize(y, False)
2150 2150 ws.append(w)
2151 2151 ts.append(t)
2152 2152 del ss[:]
2153 2153 for y in x[1:]:
2154 w, t = optimize(y, False)
2154 w, t = _optimize(y, False)
2155 2155 if t is not None and (t[0] == 'string' or t[0] == 'symbol'):
2156 2156 ss.append((w, t))
2157 2157 continue
2158 2158 flushss()
2159 2159 ws.append(w)
2160 2160 ts.append(t)
2161 2161 flushss()
2162 2162 if len(ts) == 1:
2163 2163 return ws[0], ts[0] # 'or' operation is fully optimized out
2164 2164 # we can't reorder trees by weight because it would change the order.
2165 2165 # ("sort(a + b)" == "sort(b + a)", but "a + b" != "b + a")
2166 2166 # ts = tuple(t for w, t in sorted(zip(ws, ts), key=lambda wt: wt[0]))
2167 2167 return max(ws), (op,) + tuple(ts)
2168 2168 elif op == 'not':
2169 2169 # Optimize not public() to _notpublic() because we have a fast version
2170 2170 if x[1] == ('func', ('symbol', 'public'), None):
2171 2171 newsym = ('func', ('symbol', '_notpublic'), None)
2172 o = optimize(newsym, not small)
2172 o = _optimize(newsym, not small)
2173 2173 return o[0], o[1]
2174 2174 else:
2175 o = optimize(x[1], not small)
2175 o = _optimize(x[1], not small)
2176 2176 return o[0], (op, o[1])
2177 2177 elif op == 'parentpost':
2178 o = optimize(x[1], small)
2178 o = _optimize(x[1], small)
2179 2179 return o[0], (op, o[1])
2180 2180 elif op == 'group':
2181 return optimize(x[1], small)
2181 return _optimize(x[1], small)
2182 2182 elif op in 'dagrange range parent ancestorspec':
2183 2183 if op == 'parent':
2184 2184 # x^:y means (x^) : y, not x ^ (:y)
2185 2185 post = ('parentpost', x[1])
2186 2186 if x[2][0] == 'dagrangepre':
2187 return optimize(('dagrange', post, x[2][1]), small)
2187 return _optimize(('dagrange', post, x[2][1]), small)
2188 2188 elif x[2][0] == 'rangepre':
2189 return optimize(('range', post, x[2][1]), small)
2190
2191 wa, ta = optimize(x[1], small)
2192 wb, tb = optimize(x[2], small)
2189 return _optimize(('range', post, x[2][1]), small)
2190
2191 wa, ta = _optimize(x[1], small)
2192 wb, tb = _optimize(x[2], small)
2193 2193 return wa + wb, (op, ta, tb)
2194 2194 elif op == 'list':
2195 ws, ts = zip(*(optimize(y, small) for y in x[1:]))
2195 ws, ts = zip(*(_optimize(y, small) for y in x[1:]))
2196 2196 return sum(ws), (op,) + ts
2197 2197 elif op == 'func':
2198 2198 f = getstring(x[1], _("not a symbol"))
2199 wa, ta = optimize(x[2], small)
2199 wa, ta = _optimize(x[2], small)
2200 2200 if f in ("author branch closed date desc file grep keyword "
2201 2201 "outgoing user"):
2202 2202 w = 10 # slow
2203 2203 elif f in "modifies adds removes":
2204 2204 w = 30 # slower
2205 2205 elif f == "contains":
2206 2206 w = 100 # very slow
2207 2207 elif f == "ancestor":
2208 2208 w = 1 * smallbonus
2209 2209 elif f in "reverse limit first _intlist":
2210 2210 w = 0
2211 2211 elif f in "sort":
2212 2212 w = 10 # assume most sorts look at changelog
2213 2213 else:
2214 2214 w = 1
2215 2215 return w + wa, (op, x[1], ta)
2216 2216 return 1, x
2217 2217
2218 def optimize(tree):
2219 _weight, newtree = _optimize(tree, small=True)
2220 return newtree
2221
2218 2222 # the set of valid characters for the initial letter of symbols in
2219 2223 # alias declarations and definitions
2220 2224 _aliassyminitletters = set(c for c in [chr(i) for i in xrange(256)]
2221 2225 if c.isalnum() or c in '._@$' or ord(c) > 127)
2222 2226
2223 2227 def _parsewith(spec, lookup=None, syminitletters=None):
2224 2228 """Generate a parse tree of given spec with given tokenizing options
2225 2229
2226 2230 >>> _parsewith('foo($1)', syminitletters=_aliassyminitletters)
2227 2231 ('func', ('symbol', 'foo'), ('symbol', '$1'))
2228 2232 >>> _parsewith('$1')
2229 2233 Traceback (most recent call last):
2230 2234 ...
2231 2235 ParseError: ("syntax error in revset '$1'", 0)
2232 2236 >>> _parsewith('foo bar')
2233 2237 Traceback (most recent call last):
2234 2238 ...
2235 2239 ParseError: ('invalid token', 4)
2236 2240 """
2237 2241 p = parser.parser(elements)
2238 2242 tree, pos = p.parse(tokenize(spec, lookup=lookup,
2239 2243 syminitletters=syminitletters))
2240 2244 if pos != len(spec):
2241 2245 raise error.ParseError(_('invalid token'), pos)
2242 2246 return parser.simplifyinfixops(tree, ('list', 'or'))
2243 2247
2244 2248 class _aliasrules(parser.basealiasrules):
2245 2249 """Parsing and expansion rule set of revset aliases"""
2246 2250 _section = _('revset alias')
2247 2251
2248 2252 @staticmethod
2249 2253 def _parse(spec):
2250 2254 """Parse alias declaration/definition ``spec``
2251 2255
2252 2256 This allows symbol names to use also ``$`` as an initial letter
2253 2257 (for backward compatibility), and callers of this function should
2254 2258 examine whether ``$`` is used also for unexpected symbols or not.
2255 2259 """
2256 2260 return _parsewith(spec, syminitletters=_aliassyminitletters)
2257 2261
2258 2262 @staticmethod
2259 2263 def _trygetfunc(tree):
2260 2264 if tree[0] == 'func' and tree[1][0] == 'symbol':
2261 2265 return tree[1][1], getlist(tree[2])
2262 2266
2263 2267 def expandaliases(ui, tree, showwarning=None):
2264 2268 aliases = _aliasrules.buildmap(ui.configitems('revsetalias'))
2265 2269 tree = _aliasrules.expand(aliases, tree)
2266 2270 if showwarning:
2267 2271 # warn about problematic (but not referred) aliases
2268 2272 for name, alias in sorted(aliases.iteritems()):
2269 2273 if alias.error and not alias.warned:
2270 2274 showwarning(_('warning: %s\n') % (alias.error))
2271 2275 alias.warned = True
2272 2276 return tree
2273 2277
2274 2278 def foldconcat(tree):
2275 2279 """Fold elements to be concatenated by `##`
2276 2280 """
2277 2281 if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
2278 2282 return tree
2279 2283 if tree[0] == '_concat':
2280 2284 pending = [tree]
2281 2285 l = []
2282 2286 while pending:
2283 2287 e = pending.pop()
2284 2288 if e[0] == '_concat':
2285 2289 pending.extend(reversed(e[1:]))
2286 2290 elif e[0] in ('string', 'symbol'):
2287 2291 l.append(e[1])
2288 2292 else:
2289 2293 msg = _("\"##\" can't concatenate \"%s\" element") % (e[0])
2290 2294 raise error.ParseError(msg)
2291 2295 return ('string', ''.join(l))
2292 2296 else:
2293 2297 return tuple(foldconcat(t) for t in tree)
2294 2298
2295 2299 def parse(spec, lookup=None):
2296 2300 return _parsewith(spec, lookup=lookup)
2297 2301
2298 2302 def posttreebuilthook(tree, repo):
2299 2303 # hook for extensions to execute code on the optimized tree
2300 2304 pass
2301 2305
2302 2306 def match(ui, spec, repo=None):
2303 2307 if not spec:
2304 2308 raise error.ParseError(_("empty query"))
2305 2309 lookup = None
2306 2310 if repo:
2307 2311 lookup = repo.__contains__
2308 2312 tree = parse(spec, lookup)
2309 2313 return _makematcher(ui, tree, repo)
2310 2314
2311 2315 def matchany(ui, specs, repo=None):
2312 2316 """Create a matcher that will include any revisions matching one of the
2313 2317 given specs"""
2314 2318 if not specs:
2315 2319 def mfunc(repo, subset=None):
2316 2320 return baseset()
2317 2321 return mfunc
2318 2322 if not all(specs):
2319 2323 raise error.ParseError(_("empty query"))
2320 2324 lookup = None
2321 2325 if repo:
2322 2326 lookup = repo.__contains__
2323 2327 if len(specs) == 1:
2324 2328 tree = parse(specs[0], lookup)
2325 2329 else:
2326 2330 tree = ('or',) + tuple(parse(s, lookup) for s in specs)
2327 2331 return _makematcher(ui, tree, repo)
2328 2332
2329 2333 def _makematcher(ui, tree, repo):
2330 2334 if ui:
2331 2335 tree = expandaliases(ui, tree, showwarning=ui.warn)
2332 2336 tree = foldconcat(tree)
2333 weight, tree = optimize(tree, True)
2337 tree = optimize(tree)
2334 2338 posttreebuilthook(tree, repo)
2335 2339 def mfunc(repo, subset=None):
2336 2340 if subset is None:
2337 2341 subset = fullreposet(repo)
2338 2342 if util.safehasattr(subset, 'isascending'):
2339 2343 result = getset(repo, subset, tree)
2340 2344 else:
2341 2345 result = getset(repo, baseset(subset), tree)
2342 2346 return result
2343 2347 return mfunc
2344 2348
2345 2349 def formatspec(expr, *args):
2346 2350 '''
2347 2351 This is a convenience function for using revsets internally, and
2348 2352 escapes arguments appropriately. Aliases are intentionally ignored
2349 2353 so that intended expression behavior isn't accidentally subverted.
2350 2354
2351 2355 Supported arguments:
2352 2356
2353 2357 %r = revset expression, parenthesized
2354 2358 %d = int(arg), no quoting
2355 2359 %s = string(arg), escaped and single-quoted
2356 2360 %b = arg.branch(), escaped and single-quoted
2357 2361 %n = hex(arg), single-quoted
2358 2362 %% = a literal '%'
2359 2363
2360 2364 Prefixing the type with 'l' specifies a parenthesized list of that type.
2361 2365
2362 2366 >>> formatspec('%r:: and %lr', '10 or 11', ("this()", "that()"))
2363 2367 '(10 or 11):: and ((this()) or (that()))'
2364 2368 >>> formatspec('%d:: and not %d::', 10, 20)
2365 2369 '10:: and not 20::'
2366 2370 >>> formatspec('%ld or %ld', [], [1])
2367 2371 "_list('') or 1"
2368 2372 >>> formatspec('keyword(%s)', 'foo\\xe9')
2369 2373 "keyword('foo\\\\xe9')"
2370 2374 >>> b = lambda: 'default'
2371 2375 >>> b.branch = b
2372 2376 >>> formatspec('branch(%b)', b)
2373 2377 "branch('default')"
2374 2378 >>> formatspec('root(%ls)', ['a', 'b', 'c', 'd'])
2375 2379 "root(_list('a\\x00b\\x00c\\x00d'))"
2376 2380 '''
2377 2381
2378 2382 def quote(s):
2379 2383 return repr(str(s))
2380 2384
2381 2385 def argtype(c, arg):
2382 2386 if c == 'd':
2383 2387 return str(int(arg))
2384 2388 elif c == 's':
2385 2389 return quote(arg)
2386 2390 elif c == 'r':
2387 2391 parse(arg) # make sure syntax errors are confined
2388 2392 return '(%s)' % arg
2389 2393 elif c == 'n':
2390 2394 return quote(node.hex(arg))
2391 2395 elif c == 'b':
2392 2396 return quote(arg.branch())
2393 2397
2394 2398 def listexp(s, t):
2395 2399 l = len(s)
2396 2400 if l == 0:
2397 2401 return "_list('')"
2398 2402 elif l == 1:
2399 2403 return argtype(t, s[0])
2400 2404 elif t == 'd':
2401 2405 return "_intlist('%s')" % "\0".join(str(int(a)) for a in s)
2402 2406 elif t == 's':
2403 2407 return "_list('%s')" % "\0".join(s)
2404 2408 elif t == 'n':
2405 2409 return "_hexlist('%s')" % "\0".join(node.hex(a) for a in s)
2406 2410 elif t == 'b':
2407 2411 return "_list('%s')" % "\0".join(a.branch() for a in s)
2408 2412
2409 2413 m = l // 2
2410 2414 return '(%s or %s)' % (listexp(s[:m], t), listexp(s[m:], t))
2411 2415
2412 2416 ret = ''
2413 2417 pos = 0
2414 2418 arg = 0
2415 2419 while pos < len(expr):
2416 2420 c = expr[pos]
2417 2421 if c == '%':
2418 2422 pos += 1
2419 2423 d = expr[pos]
2420 2424 if d == '%':
2421 2425 ret += d
2422 2426 elif d in 'dsnbr':
2423 2427 ret += argtype(d, args[arg])
2424 2428 arg += 1
2425 2429 elif d == 'l':
2426 2430 # a list of some type
2427 2431 pos += 1
2428 2432 d = expr[pos]
2429 2433 ret += listexp(list(args[arg]), d)
2430 2434 arg += 1
2431 2435 else:
2432 2436 raise error.Abort('unexpected revspec format character %s' % d)
2433 2437 else:
2434 2438 ret += c
2435 2439 pos += 1
2436 2440
2437 2441 return ret
2438 2442
2439 2443 def prettyformat(tree):
2440 2444 return parser.prettyformat(tree, ('string', 'symbol'))
2441 2445
2442 2446 def depth(tree):
2443 2447 if isinstance(tree, tuple):
2444 2448 return max(map(depth, tree)) + 1
2445 2449 else:
2446 2450 return 0
2447 2451
2448 2452 def funcsused(tree):
2449 2453 if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
2450 2454 return set()
2451 2455 else:
2452 2456 funcs = set()
2453 2457 for s in tree[1:]:
2454 2458 funcs |= funcsused(s)
2455 2459 if tree[0] == 'func':
2456 2460 funcs.add(tree[1][1])
2457 2461 return funcs
2458 2462
2459 2463 def _formatsetrepr(r):
2460 2464 """Format an optional printable representation of a set
2461 2465
2462 2466 ======== =================================
2463 2467 type(r) example
2464 2468 ======== =================================
2465 2469 tuple ('<not %r>', other)
2466 2470 str '<branch closed>'
2467 2471 callable lambda: '<branch %r>' % sorted(b)
2468 2472 object other
2469 2473 ======== =================================
2470 2474 """
2471 2475 if r is None:
2472 2476 return ''
2473 2477 elif isinstance(r, tuple):
2474 2478 return r[0] % r[1:]
2475 2479 elif isinstance(r, str):
2476 2480 return r
2477 2481 elif callable(r):
2478 2482 return r()
2479 2483 else:
2480 2484 return repr(r)
2481 2485
2482 2486 class abstractsmartset(object):
2483 2487
2484 2488 def __nonzero__(self):
2485 2489 """True if the smartset is not empty"""
2486 2490 raise NotImplementedError()
2487 2491
2488 2492 def __contains__(self, rev):
2489 2493 """provide fast membership testing"""
2490 2494 raise NotImplementedError()
2491 2495
2492 2496 def __iter__(self):
2493 2497 """iterate the set in the order it is supposed to be iterated"""
2494 2498 raise NotImplementedError()
2495 2499
2496 2500 # Attributes containing a function to perform a fast iteration in a given
2497 2501 # direction. A smartset can have none, one, or both defined.
2498 2502 #
2499 2503 # Default value is None instead of a function returning None to avoid
2500 2504 # initializing an iterator just for testing if a fast method exists.
2501 2505 fastasc = None
2502 2506 fastdesc = None
2503 2507
2504 2508 def isascending(self):
2505 2509 """True if the set will iterate in ascending order"""
2506 2510 raise NotImplementedError()
2507 2511
2508 2512 def isdescending(self):
2509 2513 """True if the set will iterate in descending order"""
2510 2514 raise NotImplementedError()
2511 2515
2512 2516 @util.cachefunc
2513 2517 def min(self):
2514 2518 """return the minimum element in the set"""
2515 2519 if self.fastasc is not None:
2516 2520 for r in self.fastasc():
2517 2521 return r
2518 2522 raise ValueError('arg is an empty sequence')
2519 2523 return min(self)
2520 2524
2521 2525 @util.cachefunc
2522 2526 def max(self):
2523 2527 """return the maximum element in the set"""
2524 2528 if self.fastdesc is not None:
2525 2529 for r in self.fastdesc():
2526 2530 return r
2527 2531 raise ValueError('arg is an empty sequence')
2528 2532 return max(self)
2529 2533
2530 2534 def first(self):
2531 2535 """return the first element in the set (user iteration perspective)
2532 2536
2533 2537 Return None if the set is empty"""
2534 2538 raise NotImplementedError()
2535 2539
2536 2540 def last(self):
2537 2541 """return the last element in the set (user iteration perspective)
2538 2542
2539 2543 Return None if the set is empty"""
2540 2544 raise NotImplementedError()
2541 2545
2542 2546 def __len__(self):
2543 2547 """return the length of the smartsets
2544 2548
2545 2549 This can be expensive on smartset that could be lazy otherwise."""
2546 2550 raise NotImplementedError()
2547 2551
2548 2552 def reverse(self):
2549 2553 """reverse the expected iteration order"""
2550 2554 raise NotImplementedError()
2551 2555
2552 2556 def sort(self, reverse=True):
2553 2557 """get the set to iterate in an ascending or descending order"""
2554 2558 raise NotImplementedError()
2555 2559
2556 2560 def __and__(self, other):
2557 2561 """Returns a new object with the intersection of the two collections.
2558 2562
2559 2563 This is part of the mandatory API for smartset."""
2560 2564 if isinstance(other, fullreposet):
2561 2565 return self
2562 2566 return self.filter(other.__contains__, condrepr=other, cache=False)
2563 2567
2564 2568 def __add__(self, other):
2565 2569 """Returns a new object with the union of the two collections.
2566 2570
2567 2571 This is part of the mandatory API for smartset."""
2568 2572 return addset(self, other)
2569 2573
2570 2574 def __sub__(self, other):
2571 2575 """Returns a new object with the substraction of the two collections.
2572 2576
2573 2577 This is part of the mandatory API for smartset."""
2574 2578 c = other.__contains__
2575 2579 return self.filter(lambda r: not c(r), condrepr=('<not %r>', other),
2576 2580 cache=False)
2577 2581
2578 2582 def filter(self, condition, condrepr=None, cache=True):
2579 2583 """Returns this smartset filtered by condition as a new smartset.
2580 2584
2581 2585 `condition` is a callable which takes a revision number and returns a
2582 2586 boolean. Optional `condrepr` provides a printable representation of
2583 2587 the given `condition`.
2584 2588
2585 2589 This is part of the mandatory API for smartset."""
2586 2590 # builtin cannot be cached. but do not needs to
2587 2591 if cache and util.safehasattr(condition, 'func_code'):
2588 2592 condition = util.cachefunc(condition)
2589 2593 return filteredset(self, condition, condrepr)
2590 2594
2591 2595 class baseset(abstractsmartset):
2592 2596 """Basic data structure that represents a revset and contains the basic
2593 2597 operation that it should be able to perform.
2594 2598
2595 2599 Every method in this class should be implemented by any smartset class.
2596 2600 """
2597 2601 def __init__(self, data=(), datarepr=None):
2598 2602 """
2599 2603 datarepr: a tuple of (format, obj, ...), a function or an object that
2600 2604 provides a printable representation of the given data.
2601 2605 """
2602 2606 self._ascending = None
2603 2607 if not isinstance(data, list):
2604 2608 if isinstance(data, set):
2605 2609 self._set = data
2606 2610 # set has no order we pick one for stability purpose
2607 2611 self._ascending = True
2608 2612 data = list(data)
2609 2613 self._list = data
2610 2614 self._datarepr = datarepr
2611 2615
2612 2616 @util.propertycache
2613 2617 def _set(self):
2614 2618 return set(self._list)
2615 2619
2616 2620 @util.propertycache
2617 2621 def _asclist(self):
2618 2622 asclist = self._list[:]
2619 2623 asclist.sort()
2620 2624 return asclist
2621 2625
2622 2626 def __iter__(self):
2623 2627 if self._ascending is None:
2624 2628 return iter(self._list)
2625 2629 elif self._ascending:
2626 2630 return iter(self._asclist)
2627 2631 else:
2628 2632 return reversed(self._asclist)
2629 2633
2630 2634 def fastasc(self):
2631 2635 return iter(self._asclist)
2632 2636
2633 2637 def fastdesc(self):
2634 2638 return reversed(self._asclist)
2635 2639
2636 2640 @util.propertycache
2637 2641 def __contains__(self):
2638 2642 return self._set.__contains__
2639 2643
2640 2644 def __nonzero__(self):
2641 2645 return bool(self._list)
2642 2646
2643 2647 def sort(self, reverse=False):
2644 2648 self._ascending = not bool(reverse)
2645 2649
2646 2650 def reverse(self):
2647 2651 if self._ascending is None:
2648 2652 self._list.reverse()
2649 2653 else:
2650 2654 self._ascending = not self._ascending
2651 2655
2652 2656 def __len__(self):
2653 2657 return len(self._list)
2654 2658
2655 2659 def isascending(self):
2656 2660 """Returns True if the collection is ascending order, False if not.
2657 2661
2658 2662 This is part of the mandatory API for smartset."""
2659 2663 if len(self) <= 1:
2660 2664 return True
2661 2665 return self._ascending is not None and self._ascending
2662 2666
2663 2667 def isdescending(self):
2664 2668 """Returns True if the collection is descending order, False if not.
2665 2669
2666 2670 This is part of the mandatory API for smartset."""
2667 2671 if len(self) <= 1:
2668 2672 return True
2669 2673 return self._ascending is not None and not self._ascending
2670 2674
2671 2675 def first(self):
2672 2676 if self:
2673 2677 if self._ascending is None:
2674 2678 return self._list[0]
2675 2679 elif self._ascending:
2676 2680 return self._asclist[0]
2677 2681 else:
2678 2682 return self._asclist[-1]
2679 2683 return None
2680 2684
2681 2685 def last(self):
2682 2686 if self:
2683 2687 if self._ascending is None:
2684 2688 return self._list[-1]
2685 2689 elif self._ascending:
2686 2690 return self._asclist[-1]
2687 2691 else:
2688 2692 return self._asclist[0]
2689 2693 return None
2690 2694
2691 2695 def __repr__(self):
2692 2696 d = {None: '', False: '-', True: '+'}[self._ascending]
2693 2697 s = _formatsetrepr(self._datarepr)
2694 2698 if not s:
2695 2699 l = self._list
2696 2700 # if _list has been built from a set, it might have a different
2697 2701 # order from one python implementation to another.
2698 2702 # We fallback to the sorted version for a stable output.
2699 2703 if self._ascending is not None:
2700 2704 l = self._asclist
2701 2705 s = repr(l)
2702 2706 return '<%s%s %s>' % (type(self).__name__, d, s)
2703 2707
2704 2708 class filteredset(abstractsmartset):
2705 2709 """Duck type for baseset class which iterates lazily over the revisions in
2706 2710 the subset and contains a function which tests for membership in the
2707 2711 revset
2708 2712 """
2709 2713 def __init__(self, subset, condition=lambda x: True, condrepr=None):
2710 2714 """
2711 2715 condition: a function that decide whether a revision in the subset
2712 2716 belongs to the revset or not.
2713 2717 condrepr: a tuple of (format, obj, ...), a function or an object that
2714 2718 provides a printable representation of the given condition.
2715 2719 """
2716 2720 self._subset = subset
2717 2721 self._condition = condition
2718 2722 self._condrepr = condrepr
2719 2723
2720 2724 def __contains__(self, x):
2721 2725 return x in self._subset and self._condition(x)
2722 2726
2723 2727 def __iter__(self):
2724 2728 return self._iterfilter(self._subset)
2725 2729
2726 2730 def _iterfilter(self, it):
2727 2731 cond = self._condition
2728 2732 for x in it:
2729 2733 if cond(x):
2730 2734 yield x
2731 2735
2732 2736 @property
2733 2737 def fastasc(self):
2734 2738 it = self._subset.fastasc
2735 2739 if it is None:
2736 2740 return None
2737 2741 return lambda: self._iterfilter(it())
2738 2742
2739 2743 @property
2740 2744 def fastdesc(self):
2741 2745 it = self._subset.fastdesc
2742 2746 if it is None:
2743 2747 return None
2744 2748 return lambda: self._iterfilter(it())
2745 2749
2746 2750 def __nonzero__(self):
2747 2751 fast = self.fastasc
2748 2752 if fast is None:
2749 2753 fast = self.fastdesc
2750 2754 if fast is not None:
2751 2755 it = fast()
2752 2756 else:
2753 2757 it = self
2754 2758
2755 2759 for r in it:
2756 2760 return True
2757 2761 return False
2758 2762
2759 2763 def __len__(self):
2760 2764 # Basic implementation to be changed in future patches.
2761 2765 # until this gets improved, we use generator expression
2762 2766 # here, since list compr is free to call __len__ again
2763 2767 # causing infinite recursion
2764 2768 l = baseset(r for r in self)
2765 2769 return len(l)
2766 2770
2767 2771 def sort(self, reverse=False):
2768 2772 self._subset.sort(reverse=reverse)
2769 2773
2770 2774 def reverse(self):
2771 2775 self._subset.reverse()
2772 2776
2773 2777 def isascending(self):
2774 2778 return self._subset.isascending()
2775 2779
2776 2780 def isdescending(self):
2777 2781 return self._subset.isdescending()
2778 2782
2779 2783 def first(self):
2780 2784 for x in self:
2781 2785 return x
2782 2786 return None
2783 2787
2784 2788 def last(self):
2785 2789 it = None
2786 2790 if self.isascending():
2787 2791 it = self.fastdesc
2788 2792 elif self.isdescending():
2789 2793 it = self.fastasc
2790 2794 if it is not None:
2791 2795 for x in it():
2792 2796 return x
2793 2797 return None #empty case
2794 2798 else:
2795 2799 x = None
2796 2800 for x in self:
2797 2801 pass
2798 2802 return x
2799 2803
2800 2804 def __repr__(self):
2801 2805 xs = [repr(self._subset)]
2802 2806 s = _formatsetrepr(self._condrepr)
2803 2807 if s:
2804 2808 xs.append(s)
2805 2809 return '<%s %s>' % (type(self).__name__, ', '.join(xs))
2806 2810
2807 2811 def _iterordered(ascending, iter1, iter2):
2808 2812 """produce an ordered iteration from two iterators with the same order
2809 2813
2810 2814 The ascending is used to indicated the iteration direction.
2811 2815 """
2812 2816 choice = max
2813 2817 if ascending:
2814 2818 choice = min
2815 2819
2816 2820 val1 = None
2817 2821 val2 = None
2818 2822 try:
2819 2823 # Consume both iterators in an ordered way until one is empty
2820 2824 while True:
2821 2825 if val1 is None:
2822 2826 val1 = iter1.next()
2823 2827 if val2 is None:
2824 2828 val2 = iter2.next()
2825 2829 next = choice(val1, val2)
2826 2830 yield next
2827 2831 if val1 == next:
2828 2832 val1 = None
2829 2833 if val2 == next:
2830 2834 val2 = None
2831 2835 except StopIteration:
2832 2836 # Flush any remaining values and consume the other one
2833 2837 it = iter2
2834 2838 if val1 is not None:
2835 2839 yield val1
2836 2840 it = iter1
2837 2841 elif val2 is not None:
2838 2842 # might have been equality and both are empty
2839 2843 yield val2
2840 2844 for val in it:
2841 2845 yield val
2842 2846
2843 2847 class addset(abstractsmartset):
2844 2848 """Represent the addition of two sets
2845 2849
2846 2850 Wrapper structure for lazily adding two structures without losing much
2847 2851 performance on the __contains__ method
2848 2852
2849 2853 If the ascending attribute is set, that means the two structures are
2850 2854 ordered in either an ascending or descending way. Therefore, we can add
2851 2855 them maintaining the order by iterating over both at the same time
2852 2856
2853 2857 >>> xs = baseset([0, 3, 2])
2854 2858 >>> ys = baseset([5, 2, 4])
2855 2859
2856 2860 >>> rs = addset(xs, ys)
2857 2861 >>> bool(rs), 0 in rs, 1 in rs, 5 in rs, rs.first(), rs.last()
2858 2862 (True, True, False, True, 0, 4)
2859 2863 >>> rs = addset(xs, baseset([]))
2860 2864 >>> bool(rs), 0 in rs, 1 in rs, rs.first(), rs.last()
2861 2865 (True, True, False, 0, 2)
2862 2866 >>> rs = addset(baseset([]), baseset([]))
2863 2867 >>> bool(rs), 0 in rs, rs.first(), rs.last()
2864 2868 (False, False, None, None)
2865 2869
2866 2870 iterate unsorted:
2867 2871 >>> rs = addset(xs, ys)
2868 2872 >>> # (use generator because pypy could call len())
2869 2873 >>> list(x for x in rs) # without _genlist
2870 2874 [0, 3, 2, 5, 4]
2871 2875 >>> assert not rs._genlist
2872 2876 >>> len(rs)
2873 2877 5
2874 2878 >>> [x for x in rs] # with _genlist
2875 2879 [0, 3, 2, 5, 4]
2876 2880 >>> assert rs._genlist
2877 2881
2878 2882 iterate ascending:
2879 2883 >>> rs = addset(xs, ys, ascending=True)
2880 2884 >>> # (use generator because pypy could call len())
2881 2885 >>> list(x for x in rs), list(x for x in rs.fastasc()) # without _asclist
2882 2886 ([0, 2, 3, 4, 5], [0, 2, 3, 4, 5])
2883 2887 >>> assert not rs._asclist
2884 2888 >>> len(rs)
2885 2889 5
2886 2890 >>> [x for x in rs], [x for x in rs.fastasc()]
2887 2891 ([0, 2, 3, 4, 5], [0, 2, 3, 4, 5])
2888 2892 >>> assert rs._asclist
2889 2893
2890 2894 iterate descending:
2891 2895 >>> rs = addset(xs, ys, ascending=False)
2892 2896 >>> # (use generator because pypy could call len())
2893 2897 >>> list(x for x in rs), list(x for x in rs.fastdesc()) # without _asclist
2894 2898 ([5, 4, 3, 2, 0], [5, 4, 3, 2, 0])
2895 2899 >>> assert not rs._asclist
2896 2900 >>> len(rs)
2897 2901 5
2898 2902 >>> [x for x in rs], [x for x in rs.fastdesc()]
2899 2903 ([5, 4, 3, 2, 0], [5, 4, 3, 2, 0])
2900 2904 >>> assert rs._asclist
2901 2905
2902 2906 iterate ascending without fastasc:
2903 2907 >>> rs = addset(xs, generatorset(ys), ascending=True)
2904 2908 >>> assert rs.fastasc is None
2905 2909 >>> [x for x in rs]
2906 2910 [0, 2, 3, 4, 5]
2907 2911
2908 2912 iterate descending without fastdesc:
2909 2913 >>> rs = addset(generatorset(xs), ys, ascending=False)
2910 2914 >>> assert rs.fastdesc is None
2911 2915 >>> [x for x in rs]
2912 2916 [5, 4, 3, 2, 0]
2913 2917 """
2914 2918 def __init__(self, revs1, revs2, ascending=None):
2915 2919 self._r1 = revs1
2916 2920 self._r2 = revs2
2917 2921 self._iter = None
2918 2922 self._ascending = ascending
2919 2923 self._genlist = None
2920 2924 self._asclist = None
2921 2925
2922 2926 def __len__(self):
2923 2927 return len(self._list)
2924 2928
2925 2929 def __nonzero__(self):
2926 2930 return bool(self._r1) or bool(self._r2)
2927 2931
2928 2932 @util.propertycache
2929 2933 def _list(self):
2930 2934 if not self._genlist:
2931 2935 self._genlist = baseset(iter(self))
2932 2936 return self._genlist
2933 2937
2934 2938 def __iter__(self):
2935 2939 """Iterate over both collections without repeating elements
2936 2940
2937 2941 If the ascending attribute is not set, iterate over the first one and
2938 2942 then over the second one checking for membership on the first one so we
2939 2943 dont yield any duplicates.
2940 2944
2941 2945 If the ascending attribute is set, iterate over both collections at the
2942 2946 same time, yielding only one value at a time in the given order.
2943 2947 """
2944 2948 if self._ascending is None:
2945 2949 if self._genlist:
2946 2950 return iter(self._genlist)
2947 2951 def arbitraryordergen():
2948 2952 for r in self._r1:
2949 2953 yield r
2950 2954 inr1 = self._r1.__contains__
2951 2955 for r in self._r2:
2952 2956 if not inr1(r):
2953 2957 yield r
2954 2958 return arbitraryordergen()
2955 2959 # try to use our own fast iterator if it exists
2956 2960 self._trysetasclist()
2957 2961 if self._ascending:
2958 2962 attr = 'fastasc'
2959 2963 else:
2960 2964 attr = 'fastdesc'
2961 2965 it = getattr(self, attr)
2962 2966 if it is not None:
2963 2967 return it()
2964 2968 # maybe half of the component supports fast
2965 2969 # get iterator for _r1
2966 2970 iter1 = getattr(self._r1, attr)
2967 2971 if iter1 is None:
2968 2972 # let's avoid side effect (not sure it matters)
2969 2973 iter1 = iter(sorted(self._r1, reverse=not self._ascending))
2970 2974 else:
2971 2975 iter1 = iter1()
2972 2976 # get iterator for _r2
2973 2977 iter2 = getattr(self._r2, attr)
2974 2978 if iter2 is None:
2975 2979 # let's avoid side effect (not sure it matters)
2976 2980 iter2 = iter(sorted(self._r2, reverse=not self._ascending))
2977 2981 else:
2978 2982 iter2 = iter2()
2979 2983 return _iterordered(self._ascending, iter1, iter2)
2980 2984
2981 2985 def _trysetasclist(self):
2982 2986 """populate the _asclist attribute if possible and necessary"""
2983 2987 if self._genlist is not None and self._asclist is None:
2984 2988 self._asclist = sorted(self._genlist)
2985 2989
2986 2990 @property
2987 2991 def fastasc(self):
2988 2992 self._trysetasclist()
2989 2993 if self._asclist is not None:
2990 2994 return self._asclist.__iter__
2991 2995 iter1 = self._r1.fastasc
2992 2996 iter2 = self._r2.fastasc
2993 2997 if None in (iter1, iter2):
2994 2998 return None
2995 2999 return lambda: _iterordered(True, iter1(), iter2())
2996 3000
2997 3001 @property
2998 3002 def fastdesc(self):
2999 3003 self._trysetasclist()
3000 3004 if self._asclist is not None:
3001 3005 return self._asclist.__reversed__
3002 3006 iter1 = self._r1.fastdesc
3003 3007 iter2 = self._r2.fastdesc
3004 3008 if None in (iter1, iter2):
3005 3009 return None
3006 3010 return lambda: _iterordered(False, iter1(), iter2())
3007 3011
3008 3012 def __contains__(self, x):
3009 3013 return x in self._r1 or x in self._r2
3010 3014
3011 3015 def sort(self, reverse=False):
3012 3016 """Sort the added set
3013 3017
3014 3018 For this we use the cached list with all the generated values and if we
3015 3019 know they are ascending or descending we can sort them in a smart way.
3016 3020 """
3017 3021 self._ascending = not reverse
3018 3022
3019 3023 def isascending(self):
3020 3024 return self._ascending is not None and self._ascending
3021 3025
3022 3026 def isdescending(self):
3023 3027 return self._ascending is not None and not self._ascending
3024 3028
3025 3029 def reverse(self):
3026 3030 if self._ascending is None:
3027 3031 self._list.reverse()
3028 3032 else:
3029 3033 self._ascending = not self._ascending
3030 3034
3031 3035 def first(self):
3032 3036 for x in self:
3033 3037 return x
3034 3038 return None
3035 3039
3036 3040 def last(self):
3037 3041 self.reverse()
3038 3042 val = self.first()
3039 3043 self.reverse()
3040 3044 return val
3041 3045
3042 3046 def __repr__(self):
3043 3047 d = {None: '', False: '-', True: '+'}[self._ascending]
3044 3048 return '<%s%s %r, %r>' % (type(self).__name__, d, self._r1, self._r2)
3045 3049
3046 3050 class generatorset(abstractsmartset):
3047 3051 """Wrap a generator for lazy iteration
3048 3052
3049 3053 Wrapper structure for generators that provides lazy membership and can
3050 3054 be iterated more than once.
3051 3055 When asked for membership it generates values until either it finds the
3052 3056 requested one or has gone through all the elements in the generator
3053 3057 """
3054 3058 def __init__(self, gen, iterasc=None):
3055 3059 """
3056 3060 gen: a generator producing the values for the generatorset.
3057 3061 """
3058 3062 self._gen = gen
3059 3063 self._asclist = None
3060 3064 self._cache = {}
3061 3065 self._genlist = []
3062 3066 self._finished = False
3063 3067 self._ascending = True
3064 3068 if iterasc is not None:
3065 3069 if iterasc:
3066 3070 self.fastasc = self._iterator
3067 3071 self.__contains__ = self._asccontains
3068 3072 else:
3069 3073 self.fastdesc = self._iterator
3070 3074 self.__contains__ = self._desccontains
3071 3075
3072 3076 def __nonzero__(self):
3073 3077 # Do not use 'for r in self' because it will enforce the iteration
3074 3078 # order (default ascending), possibly unrolling a whole descending
3075 3079 # iterator.
3076 3080 if self._genlist:
3077 3081 return True
3078 3082 for r in self._consumegen():
3079 3083 return True
3080 3084 return False
3081 3085
3082 3086 def __contains__(self, x):
3083 3087 if x in self._cache:
3084 3088 return self._cache[x]
3085 3089
3086 3090 # Use new values only, as existing values would be cached.
3087 3091 for l in self._consumegen():
3088 3092 if l == x:
3089 3093 return True
3090 3094
3091 3095 self._cache[x] = False
3092 3096 return False
3093 3097
3094 3098 def _asccontains(self, x):
3095 3099 """version of contains optimised for ascending generator"""
3096 3100 if x in self._cache:
3097 3101 return self._cache[x]
3098 3102
3099 3103 # Use new values only, as existing values would be cached.
3100 3104 for l in self._consumegen():
3101 3105 if l == x:
3102 3106 return True
3103 3107 if l > x:
3104 3108 break
3105 3109
3106 3110 self._cache[x] = False
3107 3111 return False
3108 3112
3109 3113 def _desccontains(self, x):
3110 3114 """version of contains optimised for descending generator"""
3111 3115 if x in self._cache:
3112 3116 return self._cache[x]
3113 3117
3114 3118 # Use new values only, as existing values would be cached.
3115 3119 for l in self._consumegen():
3116 3120 if l == x:
3117 3121 return True
3118 3122 if l < x:
3119 3123 break
3120 3124
3121 3125 self._cache[x] = False
3122 3126 return False
3123 3127
3124 3128 def __iter__(self):
3125 3129 if self._ascending:
3126 3130 it = self.fastasc
3127 3131 else:
3128 3132 it = self.fastdesc
3129 3133 if it is not None:
3130 3134 return it()
3131 3135 # we need to consume the iterator
3132 3136 for x in self._consumegen():
3133 3137 pass
3134 3138 # recall the same code
3135 3139 return iter(self)
3136 3140
3137 3141 def _iterator(self):
3138 3142 if self._finished:
3139 3143 return iter(self._genlist)
3140 3144
3141 3145 # We have to use this complex iteration strategy to allow multiple
3142 3146 # iterations at the same time. We need to be able to catch revision
3143 3147 # removed from _consumegen and added to genlist in another instance.
3144 3148 #
3145 3149 # Getting rid of it would provide an about 15% speed up on this
3146 3150 # iteration.
3147 3151 genlist = self._genlist
3148 3152 nextrev = self._consumegen().next
3149 3153 _len = len # cache global lookup
3150 3154 def gen():
3151 3155 i = 0
3152 3156 while True:
3153 3157 if i < _len(genlist):
3154 3158 yield genlist[i]
3155 3159 else:
3156 3160 yield nextrev()
3157 3161 i += 1
3158 3162 return gen()
3159 3163
3160 3164 def _consumegen(self):
3161 3165 cache = self._cache
3162 3166 genlist = self._genlist.append
3163 3167 for item in self._gen:
3164 3168 cache[item] = True
3165 3169 genlist(item)
3166 3170 yield item
3167 3171 if not self._finished:
3168 3172 self._finished = True
3169 3173 asc = self._genlist[:]
3170 3174 asc.sort()
3171 3175 self._asclist = asc
3172 3176 self.fastasc = asc.__iter__
3173 3177 self.fastdesc = asc.__reversed__
3174 3178
3175 3179 def __len__(self):
3176 3180 for x in self._consumegen():
3177 3181 pass
3178 3182 return len(self._genlist)
3179 3183
3180 3184 def sort(self, reverse=False):
3181 3185 self._ascending = not reverse
3182 3186
3183 3187 def reverse(self):
3184 3188 self._ascending = not self._ascending
3185 3189
3186 3190 def isascending(self):
3187 3191 return self._ascending
3188 3192
3189 3193 def isdescending(self):
3190 3194 return not self._ascending
3191 3195
3192 3196 def first(self):
3193 3197 if self._ascending:
3194 3198 it = self.fastasc
3195 3199 else:
3196 3200 it = self.fastdesc
3197 3201 if it is None:
3198 3202 # we need to consume all and try again
3199 3203 for x in self._consumegen():
3200 3204 pass
3201 3205 return self.first()
3202 3206 return next(it(), None)
3203 3207
3204 3208 def last(self):
3205 3209 if self._ascending:
3206 3210 it = self.fastdesc
3207 3211 else:
3208 3212 it = self.fastasc
3209 3213 if it is None:
3210 3214 # we need to consume all and try again
3211 3215 for x in self._consumegen():
3212 3216 pass
3213 3217 return self.first()
3214 3218 return next(it(), None)
3215 3219
3216 3220 def __repr__(self):
3217 3221 d = {False: '-', True: '+'}[self._ascending]
3218 3222 return '<%s%s>' % (type(self).__name__, d)
3219 3223
3220 3224 class spanset(abstractsmartset):
3221 3225 """Duck type for baseset class which represents a range of revisions and
3222 3226 can work lazily and without having all the range in memory
3223 3227
3224 3228 Note that spanset(x, y) behave almost like xrange(x, y) except for two
3225 3229 notable points:
3226 3230 - when x < y it will be automatically descending,
3227 3231 - revision filtered with this repoview will be skipped.
3228 3232
3229 3233 """
3230 3234 def __init__(self, repo, start=0, end=None):
3231 3235 """
3232 3236 start: first revision included the set
3233 3237 (default to 0)
3234 3238 end: first revision excluded (last+1)
3235 3239 (default to len(repo)
3236 3240
3237 3241 Spanset will be descending if `end` < `start`.
3238 3242 """
3239 3243 if end is None:
3240 3244 end = len(repo)
3241 3245 self._ascending = start <= end
3242 3246 if not self._ascending:
3243 3247 start, end = end + 1, start +1
3244 3248 self._start = start
3245 3249 self._end = end
3246 3250 self._hiddenrevs = repo.changelog.filteredrevs
3247 3251
3248 3252 def sort(self, reverse=False):
3249 3253 self._ascending = not reverse
3250 3254
3251 3255 def reverse(self):
3252 3256 self._ascending = not self._ascending
3253 3257
3254 3258 def _iterfilter(self, iterrange):
3255 3259 s = self._hiddenrevs
3256 3260 for r in iterrange:
3257 3261 if r not in s:
3258 3262 yield r
3259 3263
3260 3264 def __iter__(self):
3261 3265 if self._ascending:
3262 3266 return self.fastasc()
3263 3267 else:
3264 3268 return self.fastdesc()
3265 3269
3266 3270 def fastasc(self):
3267 3271 iterrange = xrange(self._start, self._end)
3268 3272 if self._hiddenrevs:
3269 3273 return self._iterfilter(iterrange)
3270 3274 return iter(iterrange)
3271 3275
3272 3276 def fastdesc(self):
3273 3277 iterrange = xrange(self._end - 1, self._start - 1, -1)
3274 3278 if self._hiddenrevs:
3275 3279 return self._iterfilter(iterrange)
3276 3280 return iter(iterrange)
3277 3281
3278 3282 def __contains__(self, rev):
3279 3283 hidden = self._hiddenrevs
3280 3284 return ((self._start <= rev < self._end)
3281 3285 and not (hidden and rev in hidden))
3282 3286
3283 3287 def __nonzero__(self):
3284 3288 for r in self:
3285 3289 return True
3286 3290 return False
3287 3291
3288 3292 def __len__(self):
3289 3293 if not self._hiddenrevs:
3290 3294 return abs(self._end - self._start)
3291 3295 else:
3292 3296 count = 0
3293 3297 start = self._start
3294 3298 end = self._end
3295 3299 for rev in self._hiddenrevs:
3296 3300 if (end < rev <= start) or (start <= rev < end):
3297 3301 count += 1
3298 3302 return abs(self._end - self._start) - count
3299 3303
3300 3304 def isascending(self):
3301 3305 return self._ascending
3302 3306
3303 3307 def isdescending(self):
3304 3308 return not self._ascending
3305 3309
3306 3310 def first(self):
3307 3311 if self._ascending:
3308 3312 it = self.fastasc
3309 3313 else:
3310 3314 it = self.fastdesc
3311 3315 for x in it():
3312 3316 return x
3313 3317 return None
3314 3318
3315 3319 def last(self):
3316 3320 if self._ascending:
3317 3321 it = self.fastdesc
3318 3322 else:
3319 3323 it = self.fastasc
3320 3324 for x in it():
3321 3325 return x
3322 3326 return None
3323 3327
3324 3328 def __repr__(self):
3325 3329 d = {False: '-', True: '+'}[self._ascending]
3326 3330 return '<%s%s %d:%d>' % (type(self).__name__, d,
3327 3331 self._start, self._end - 1)
3328 3332
3329 3333 class fullreposet(spanset):
3330 3334 """a set containing all revisions in the repo
3331 3335
3332 3336 This class exists to host special optimization and magic to handle virtual
3333 3337 revisions such as "null".
3334 3338 """
3335 3339
3336 3340 def __init__(self, repo):
3337 3341 super(fullreposet, self).__init__(repo)
3338 3342
3339 3343 def __and__(self, other):
3340 3344 """As self contains the whole repo, all of the other set should also be
3341 3345 in self. Therefore `self & other = other`.
3342 3346
3343 3347 This boldly assumes the other contains valid revs only.
3344 3348 """
3345 3349 # other not a smartset, make is so
3346 3350 if not util.safehasattr(other, 'isascending'):
3347 3351 # filter out hidden revision
3348 3352 # (this boldly assumes all smartset are pure)
3349 3353 #
3350 3354 # `other` was used with "&", let's assume this is a set like
3351 3355 # object.
3352 3356 other = baseset(other - self._hiddenrevs)
3353 3357
3354 3358 # XXX As fullreposet is also used as bootstrap, this is wrong.
3355 3359 #
3356 3360 # With a giveme312() revset returning [3,1,2], this makes
3357 3361 # 'hg log -r "giveme312()"' -> 1, 2, 3 (wrong)
3358 3362 # We cannot just drop it because other usage still need to sort it:
3359 3363 # 'hg log -r "all() and giveme312()"' -> 1, 2, 3 (right)
3360 3364 #
3361 3365 # There is also some faulty revset implementations that rely on it
3362 3366 # (eg: children as of its state in e8075329c5fb)
3363 3367 #
3364 3368 # When we fix the two points above we can move this into the if clause
3365 3369 other.sort(reverse=self.isdescending())
3366 3370 return other
3367 3371
3368 3372 def prettyformatset(revs):
3369 3373 lines = []
3370 3374 rs = repr(revs)
3371 3375 p = 0
3372 3376 while p < len(rs):
3373 3377 q = rs.find('<', p + 1)
3374 3378 if q < 0:
3375 3379 q = len(rs)
3376 3380 l = rs.count('<', 0, p) - rs.count('>', 0, p)
3377 3381 assert l >= 0
3378 3382 lines.append((l, rs[p:q].rstrip()))
3379 3383 p = q
3380 3384 return '\n'.join(' ' * l + s for l, s in lines)
3381 3385
3382 3386 def loadpredicate(ui, extname, registrarobj):
3383 3387 """Load revset predicates from specified registrarobj
3384 3388 """
3385 3389 for name, func in registrarobj._table.iteritems():
3386 3390 symbols[name] = func
3387 3391 if func._safe:
3388 3392 safesymbols.add(name)
3389 3393
3390 3394 # load built-in predicates explicitly to setup safesymbols
3391 3395 loadpredicate(None, None, predicate)
3392 3396
3393 3397 # tell hggettext to extract docstrings from these functions:
3394 3398 i18nfunctions = symbols.values()
@@ -1,1378 +1,1378
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import contextlib
11 11 import errno
12 12 import glob
13 13 import os
14 14 import re
15 15 import shutil
16 16 import stat
17 17 import tempfile
18 18 import threading
19 19
20 20 from .i18n import _
21 21 from .node import wdirrev
22 22 from . import (
23 23 encoding,
24 24 error,
25 25 match as matchmod,
26 26 osutil,
27 27 pathutil,
28 28 phases,
29 29 revset,
30 30 similar,
31 31 util,
32 32 )
33 33
34 34 if os.name == 'nt':
35 35 from . import scmwindows as scmplatform
36 36 else:
37 37 from . import scmposix as scmplatform
38 38
39 39 systemrcpath = scmplatform.systemrcpath
40 40 userrcpath = scmplatform.userrcpath
41 41
42 42 class status(tuple):
43 43 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
44 44 and 'ignored' properties are only relevant to the working copy.
45 45 '''
46 46
47 47 __slots__ = ()
48 48
49 49 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
50 50 clean):
51 51 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
52 52 ignored, clean))
53 53
54 54 @property
55 55 def modified(self):
56 56 '''files that have been modified'''
57 57 return self[0]
58 58
59 59 @property
60 60 def added(self):
61 61 '''files that have been added'''
62 62 return self[1]
63 63
64 64 @property
65 65 def removed(self):
66 66 '''files that have been removed'''
67 67 return self[2]
68 68
69 69 @property
70 70 def deleted(self):
71 71 '''files that are in the dirstate, but have been deleted from the
72 72 working copy (aka "missing")
73 73 '''
74 74 return self[3]
75 75
76 76 @property
77 77 def unknown(self):
78 78 '''files not in the dirstate that are not ignored'''
79 79 return self[4]
80 80
81 81 @property
82 82 def ignored(self):
83 83 '''files not in the dirstate that are ignored (by _dirignore())'''
84 84 return self[5]
85 85
86 86 @property
87 87 def clean(self):
88 88 '''files that have not been modified'''
89 89 return self[6]
90 90
91 91 def __repr__(self, *args, **kwargs):
92 92 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
93 93 'unknown=%r, ignored=%r, clean=%r>') % self)
94 94
95 95 def itersubrepos(ctx1, ctx2):
96 96 """find subrepos in ctx1 or ctx2"""
97 97 # Create a (subpath, ctx) mapping where we prefer subpaths from
98 98 # ctx1. The subpaths from ctx2 are important when the .hgsub file
99 99 # has been modified (in ctx2) but not yet committed (in ctx1).
100 100 subpaths = dict.fromkeys(ctx2.substate, ctx2)
101 101 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
102 102
103 103 missing = set()
104 104
105 105 for subpath in ctx2.substate:
106 106 if subpath not in ctx1.substate:
107 107 del subpaths[subpath]
108 108 missing.add(subpath)
109 109
110 110 for subpath, ctx in sorted(subpaths.iteritems()):
111 111 yield subpath, ctx.sub(subpath)
112 112
113 113 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
114 114 # status and diff will have an accurate result when it does
115 115 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
116 116 # against itself.
117 117 for subpath in missing:
118 118 yield subpath, ctx2.nullsub(subpath, ctx1)
119 119
120 120 def nochangesfound(ui, repo, excluded=None):
121 121 '''Report no changes for push/pull, excluded is None or a list of
122 122 nodes excluded from the push/pull.
123 123 '''
124 124 secretlist = []
125 125 if excluded:
126 126 for n in excluded:
127 127 if n not in repo:
128 128 # discovery should not have included the filtered revision,
129 129 # we have to explicitly exclude it until discovery is cleanup.
130 130 continue
131 131 ctx = repo[n]
132 132 if ctx.phase() >= phases.secret and not ctx.extinct():
133 133 secretlist.append(n)
134 134
135 135 if secretlist:
136 136 ui.status(_("no changes found (ignored %d secret changesets)\n")
137 137 % len(secretlist))
138 138 else:
139 139 ui.status(_("no changes found\n"))
140 140
141 141 def checknewlabel(repo, lbl, kind):
142 142 # Do not use the "kind" parameter in ui output.
143 143 # It makes strings difficult to translate.
144 144 if lbl in ['tip', '.', 'null']:
145 145 raise error.Abort(_("the name '%s' is reserved") % lbl)
146 146 for c in (':', '\0', '\n', '\r'):
147 147 if c in lbl:
148 148 raise error.Abort(_("%r cannot be used in a name") % c)
149 149 try:
150 150 int(lbl)
151 151 raise error.Abort(_("cannot use an integer as a name"))
152 152 except ValueError:
153 153 pass
154 154
155 155 def checkfilename(f):
156 156 '''Check that the filename f is an acceptable filename for a tracked file'''
157 157 if '\r' in f or '\n' in f:
158 158 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
159 159
160 160 def checkportable(ui, f):
161 161 '''Check if filename f is portable and warn or abort depending on config'''
162 162 checkfilename(f)
163 163 abort, warn = checkportabilityalert(ui)
164 164 if abort or warn:
165 165 msg = util.checkwinfilename(f)
166 166 if msg:
167 167 msg = "%s: %r" % (msg, f)
168 168 if abort:
169 169 raise error.Abort(msg)
170 170 ui.warn(_("warning: %s\n") % msg)
171 171
172 172 def checkportabilityalert(ui):
173 173 '''check if the user's config requests nothing, a warning, or abort for
174 174 non-portable filenames'''
175 175 val = ui.config('ui', 'portablefilenames', 'warn')
176 176 lval = val.lower()
177 177 bval = util.parsebool(val)
178 178 abort = os.name == 'nt' or lval == 'abort'
179 179 warn = bval or lval == 'warn'
180 180 if bval is None and not (warn or abort or lval == 'ignore'):
181 181 raise error.ConfigError(
182 182 _("ui.portablefilenames value is invalid ('%s')") % val)
183 183 return abort, warn
184 184
185 185 class casecollisionauditor(object):
186 186 def __init__(self, ui, abort, dirstate):
187 187 self._ui = ui
188 188 self._abort = abort
189 189 allfiles = '\0'.join(dirstate._map)
190 190 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
191 191 self._dirstate = dirstate
192 192 # The purpose of _newfiles is so that we don't complain about
193 193 # case collisions if someone were to call this object with the
194 194 # same filename twice.
195 195 self._newfiles = set()
196 196
197 197 def __call__(self, f):
198 198 if f in self._newfiles:
199 199 return
200 200 fl = encoding.lower(f)
201 201 if fl in self._loweredfiles and f not in self._dirstate:
202 202 msg = _('possible case-folding collision for %s') % f
203 203 if self._abort:
204 204 raise error.Abort(msg)
205 205 self._ui.warn(_("warning: %s\n") % msg)
206 206 self._loweredfiles.add(fl)
207 207 self._newfiles.add(f)
208 208
209 209 def filteredhash(repo, maxrev):
210 210 """build hash of filtered revisions in the current repoview.
211 211
212 212 Multiple caches perform up-to-date validation by checking that the
213 213 tiprev and tipnode stored in the cache file match the current repository.
214 214 However, this is not sufficient for validating repoviews because the set
215 215 of revisions in the view may change without the repository tiprev and
216 216 tipnode changing.
217 217
218 218 This function hashes all the revs filtered from the view and returns
219 219 that SHA-1 digest.
220 220 """
221 221 cl = repo.changelog
222 222 if not cl.filteredrevs:
223 223 return None
224 224 key = None
225 225 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
226 226 if revs:
227 227 s = util.sha1()
228 228 for rev in revs:
229 229 s.update('%s;' % rev)
230 230 key = s.digest()
231 231 return key
232 232
233 233 class abstractvfs(object):
234 234 """Abstract base class; cannot be instantiated"""
235 235
236 236 def __init__(self, *args, **kwargs):
237 237 '''Prevent instantiation; don't call this from subclasses.'''
238 238 raise NotImplementedError('attempted instantiating ' + str(type(self)))
239 239
240 240 def tryread(self, path):
241 241 '''gracefully return an empty string for missing files'''
242 242 try:
243 243 return self.read(path)
244 244 except IOError as inst:
245 245 if inst.errno != errno.ENOENT:
246 246 raise
247 247 return ""
248 248
249 249 def tryreadlines(self, path, mode='rb'):
250 250 '''gracefully return an empty array for missing files'''
251 251 try:
252 252 return self.readlines(path, mode=mode)
253 253 except IOError as inst:
254 254 if inst.errno != errno.ENOENT:
255 255 raise
256 256 return []
257 257
258 258 def open(self, path, mode="r", text=False, atomictemp=False,
259 259 notindexed=False, backgroundclose=False):
260 260 '''Open ``path`` file, which is relative to vfs root.
261 261
262 262 Newly created directories are marked as "not to be indexed by
263 263 the content indexing service", if ``notindexed`` is specified
264 264 for "write" mode access.
265 265 '''
266 266 self.open = self.__call__
267 267 return self.__call__(path, mode, text, atomictemp, notindexed,
268 268 backgroundclose=backgroundclose)
269 269
270 270 def read(self, path):
271 271 with self(path, 'rb') as fp:
272 272 return fp.read()
273 273
274 274 def readlines(self, path, mode='rb'):
275 275 with self(path, mode=mode) as fp:
276 276 return fp.readlines()
277 277
278 278 def write(self, path, data, backgroundclose=False):
279 279 with self(path, 'wb', backgroundclose=backgroundclose) as fp:
280 280 return fp.write(data)
281 281
282 282 def writelines(self, path, data, mode='wb', notindexed=False):
283 283 with self(path, mode=mode, notindexed=notindexed) as fp:
284 284 return fp.writelines(data)
285 285
286 286 def append(self, path, data):
287 287 with self(path, 'ab') as fp:
288 288 return fp.write(data)
289 289
290 290 def basename(self, path):
291 291 """return base element of a path (as os.path.basename would do)
292 292
293 293 This exists to allow handling of strange encoding if needed."""
294 294 return os.path.basename(path)
295 295
296 296 def chmod(self, path, mode):
297 297 return os.chmod(self.join(path), mode)
298 298
299 299 def dirname(self, path):
300 300 """return dirname element of a path (as os.path.dirname would do)
301 301
302 302 This exists to allow handling of strange encoding if needed."""
303 303 return os.path.dirname(path)
304 304
305 305 def exists(self, path=None):
306 306 return os.path.exists(self.join(path))
307 307
308 308 def fstat(self, fp):
309 309 return util.fstat(fp)
310 310
311 311 def isdir(self, path=None):
312 312 return os.path.isdir(self.join(path))
313 313
314 314 def isfile(self, path=None):
315 315 return os.path.isfile(self.join(path))
316 316
317 317 def islink(self, path=None):
318 318 return os.path.islink(self.join(path))
319 319
320 320 def isfileorlink(self, path=None):
321 321 '''return whether path is a regular file or a symlink
322 322
323 323 Unlike isfile, this doesn't follow symlinks.'''
324 324 try:
325 325 st = self.lstat(path)
326 326 except OSError:
327 327 return False
328 328 mode = st.st_mode
329 329 return stat.S_ISREG(mode) or stat.S_ISLNK(mode)
330 330
331 331 def reljoin(self, *paths):
332 332 """join various elements of a path together (as os.path.join would do)
333 333
334 334 The vfs base is not injected so that path stay relative. This exists
335 335 to allow handling of strange encoding if needed."""
336 336 return os.path.join(*paths)
337 337
338 338 def split(self, path):
339 339 """split top-most element of a path (as os.path.split would do)
340 340
341 341 This exists to allow handling of strange encoding if needed."""
342 342 return os.path.split(path)
343 343
344 344 def lexists(self, path=None):
345 345 return os.path.lexists(self.join(path))
346 346
347 347 def lstat(self, path=None):
348 348 return os.lstat(self.join(path))
349 349
350 350 def listdir(self, path=None):
351 351 return os.listdir(self.join(path))
352 352
353 353 def makedir(self, path=None, notindexed=True):
354 354 return util.makedir(self.join(path), notindexed)
355 355
356 356 def makedirs(self, path=None, mode=None):
357 357 return util.makedirs(self.join(path), mode)
358 358
359 359 def makelock(self, info, path):
360 360 return util.makelock(info, self.join(path))
361 361
362 362 def mkdir(self, path=None):
363 363 return os.mkdir(self.join(path))
364 364
365 365 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
366 366 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
367 367 dir=self.join(dir), text=text)
368 368 dname, fname = util.split(name)
369 369 if dir:
370 370 return fd, os.path.join(dir, fname)
371 371 else:
372 372 return fd, fname
373 373
374 374 def readdir(self, path=None, stat=None, skip=None):
375 375 return osutil.listdir(self.join(path), stat, skip)
376 376
377 377 def readlock(self, path):
378 378 return util.readlock(self.join(path))
379 379
380 380 def rename(self, src, dst):
381 381 return util.rename(self.join(src), self.join(dst))
382 382
383 383 def readlink(self, path):
384 384 return os.readlink(self.join(path))
385 385
386 386 def removedirs(self, path=None):
387 387 """Remove a leaf directory and all empty intermediate ones
388 388 """
389 389 return util.removedirs(self.join(path))
390 390
391 391 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
392 392 """Remove a directory tree recursively
393 393
394 394 If ``forcibly``, this tries to remove READ-ONLY files, too.
395 395 """
396 396 if forcibly:
397 397 def onerror(function, path, excinfo):
398 398 if function is not os.remove:
399 399 raise
400 400 # read-only files cannot be unlinked under Windows
401 401 s = os.stat(path)
402 402 if (s.st_mode & stat.S_IWRITE) != 0:
403 403 raise
404 404 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
405 405 os.remove(path)
406 406 else:
407 407 onerror = None
408 408 return shutil.rmtree(self.join(path),
409 409 ignore_errors=ignore_errors, onerror=onerror)
410 410
411 411 def setflags(self, path, l, x):
412 412 return util.setflags(self.join(path), l, x)
413 413
414 414 def stat(self, path=None):
415 415 return os.stat(self.join(path))
416 416
417 417 def unlink(self, path=None):
418 418 return util.unlink(self.join(path))
419 419
420 420 def unlinkpath(self, path=None, ignoremissing=False):
421 421 return util.unlinkpath(self.join(path), ignoremissing)
422 422
423 423 def utime(self, path=None, t=None):
424 424 return os.utime(self.join(path), t)
425 425
426 426 def walk(self, path=None, onerror=None):
427 427 """Yield (dirpath, dirs, files) tuple for each directories under path
428 428
429 429 ``dirpath`` is relative one from the root of this vfs. This
430 430 uses ``os.sep`` as path separator, even you specify POSIX
431 431 style ``path``.
432 432
433 433 "The root of this vfs" is represented as empty ``dirpath``.
434 434 """
435 435 root = os.path.normpath(self.join(None))
436 436 # when dirpath == root, dirpath[prefixlen:] becomes empty
437 437 # because len(dirpath) < prefixlen.
438 438 prefixlen = len(pathutil.normasprefix(root))
439 439 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
440 440 yield (dirpath[prefixlen:], dirs, files)
441 441
442 442 @contextlib.contextmanager
443 443 def backgroundclosing(self, ui, expectedcount=-1):
444 444 """Allow files to be closed asynchronously.
445 445
446 446 When this context manager is active, ``backgroundclose`` can be passed
447 447 to ``__call__``/``open`` to result in the file possibly being closed
448 448 asynchronously, on a background thread.
449 449 """
450 450 # This is an arbitrary restriction and could be changed if we ever
451 451 # have a use case.
452 452 vfs = getattr(self, 'vfs', self)
453 453 if getattr(vfs, '_backgroundfilecloser', None):
454 454 raise error.Abort('can only have 1 active background file closer')
455 455
456 456 with backgroundfilecloser(ui, expectedcount=expectedcount) as bfc:
457 457 try:
458 458 vfs._backgroundfilecloser = bfc
459 459 yield bfc
460 460 finally:
461 461 vfs._backgroundfilecloser = None
462 462
463 463 class vfs(abstractvfs):
464 464 '''Operate files relative to a base directory
465 465
466 466 This class is used to hide the details of COW semantics and
467 467 remote file access from higher level code.
468 468 '''
469 469 def __init__(self, base, audit=True, expandpath=False, realpath=False):
470 470 if expandpath:
471 471 base = util.expandpath(base)
472 472 if realpath:
473 473 base = os.path.realpath(base)
474 474 self.base = base
475 475 self.mustaudit = audit
476 476 self.createmode = None
477 477 self._trustnlink = None
478 478
479 479 @property
480 480 def mustaudit(self):
481 481 return self._audit
482 482
483 483 @mustaudit.setter
484 484 def mustaudit(self, onoff):
485 485 self._audit = onoff
486 486 if onoff:
487 487 self.audit = pathutil.pathauditor(self.base)
488 488 else:
489 489 self.audit = util.always
490 490
491 491 @util.propertycache
492 492 def _cansymlink(self):
493 493 return util.checklink(self.base)
494 494
495 495 @util.propertycache
496 496 def _chmod(self):
497 497 return util.checkexec(self.base)
498 498
499 499 def _fixfilemode(self, name):
500 500 if self.createmode is None or not self._chmod:
501 501 return
502 502 os.chmod(name, self.createmode & 0o666)
503 503
504 504 def __call__(self, path, mode="r", text=False, atomictemp=False,
505 505 notindexed=False, backgroundclose=False):
506 506 '''Open ``path`` file, which is relative to vfs root.
507 507
508 508 Newly created directories are marked as "not to be indexed by
509 509 the content indexing service", if ``notindexed`` is specified
510 510 for "write" mode access.
511 511
512 512 If ``backgroundclose`` is passed, the file may be closed asynchronously.
513 513 It can only be used if the ``self.backgroundclosing()`` context manager
514 514 is active. This should only be specified if the following criteria hold:
515 515
516 516 1. There is a potential for writing thousands of files. Unless you
517 517 are writing thousands of files, the performance benefits of
518 518 asynchronously closing files is not realized.
519 519 2. Files are opened exactly once for the ``backgroundclosing``
520 520 active duration and are therefore free of race conditions between
521 521 closing a file on a background thread and reopening it. (If the
522 522 file were opened multiple times, there could be unflushed data
523 523 because the original file handle hasn't been flushed/closed yet.)
524 524 '''
525 525 if self._audit:
526 526 r = util.checkosfilename(path)
527 527 if r:
528 528 raise error.Abort("%s: %r" % (r, path))
529 529 self.audit(path)
530 530 f = self.join(path)
531 531
532 532 if not text and "b" not in mode:
533 533 mode += "b" # for that other OS
534 534
535 535 nlink = -1
536 536 if mode not in ('r', 'rb'):
537 537 dirname, basename = util.split(f)
538 538 # If basename is empty, then the path is malformed because it points
539 539 # to a directory. Let the posixfile() call below raise IOError.
540 540 if basename:
541 541 if atomictemp:
542 542 util.makedirs(dirname, self.createmode, notindexed)
543 543 return util.atomictempfile(f, mode, self.createmode)
544 544 try:
545 545 if 'w' in mode:
546 546 util.unlink(f)
547 547 nlink = 0
548 548 else:
549 549 # nlinks() may behave differently for files on Windows
550 550 # shares if the file is open.
551 551 with util.posixfile(f):
552 552 nlink = util.nlinks(f)
553 553 if nlink < 1:
554 554 nlink = 2 # force mktempcopy (issue1922)
555 555 except (OSError, IOError) as e:
556 556 if e.errno != errno.ENOENT:
557 557 raise
558 558 nlink = 0
559 559 util.makedirs(dirname, self.createmode, notindexed)
560 560 if nlink > 0:
561 561 if self._trustnlink is None:
562 562 self._trustnlink = nlink > 1 or util.checknlink(f)
563 563 if nlink > 1 or not self._trustnlink:
564 564 util.rename(util.mktempcopy(f), f)
565 565 fp = util.posixfile(f, mode)
566 566 if nlink == 0:
567 567 self._fixfilemode(f)
568 568
569 569 if backgroundclose:
570 570 if not self._backgroundfilecloser:
571 571 raise error.Abort('backgroundclose can only be used when a '
572 572 'backgroundclosing context manager is active')
573 573
574 574 fp = delayclosedfile(fp, self._backgroundfilecloser)
575 575
576 576 return fp
577 577
578 578 def symlink(self, src, dst):
579 579 self.audit(dst)
580 580 linkname = self.join(dst)
581 581 try:
582 582 os.unlink(linkname)
583 583 except OSError:
584 584 pass
585 585
586 586 util.makedirs(os.path.dirname(linkname), self.createmode)
587 587
588 588 if self._cansymlink:
589 589 try:
590 590 os.symlink(src, linkname)
591 591 except OSError as err:
592 592 raise OSError(err.errno, _('could not symlink to %r: %s') %
593 593 (src, err.strerror), linkname)
594 594 else:
595 595 self.write(dst, src)
596 596
597 597 def join(self, path, *insidef):
598 598 if path:
599 599 return os.path.join(self.base, path, *insidef)
600 600 else:
601 601 return self.base
602 602
603 603 opener = vfs
604 604
605 605 class auditvfs(object):
606 606 def __init__(self, vfs):
607 607 self.vfs = vfs
608 608
609 609 @property
610 610 def mustaudit(self):
611 611 return self.vfs.mustaudit
612 612
613 613 @mustaudit.setter
614 614 def mustaudit(self, onoff):
615 615 self.vfs.mustaudit = onoff
616 616
617 617 class filtervfs(abstractvfs, auditvfs):
618 618 '''Wrapper vfs for filtering filenames with a function.'''
619 619
620 620 def __init__(self, vfs, filter):
621 621 auditvfs.__init__(self, vfs)
622 622 self._filter = filter
623 623
624 624 def __call__(self, path, *args, **kwargs):
625 625 return self.vfs(self._filter(path), *args, **kwargs)
626 626
627 627 def join(self, path, *insidef):
628 628 if path:
629 629 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
630 630 else:
631 631 return self.vfs.join(path)
632 632
633 633 filteropener = filtervfs
634 634
635 635 class readonlyvfs(abstractvfs, auditvfs):
636 636 '''Wrapper vfs preventing any writing.'''
637 637
638 638 def __init__(self, vfs):
639 639 auditvfs.__init__(self, vfs)
640 640
641 641 def __call__(self, path, mode='r', *args, **kw):
642 642 if mode not in ('r', 'rb'):
643 643 raise error.Abort('this vfs is read only')
644 644 return self.vfs(path, mode, *args, **kw)
645 645
646 646 def join(self, path, *insidef):
647 647 return self.vfs.join(path, *insidef)
648 648
649 649 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
650 650 '''yield every hg repository under path, always recursively.
651 651 The recurse flag will only control recursion into repo working dirs'''
652 652 def errhandler(err):
653 653 if err.filename == path:
654 654 raise err
655 655 samestat = getattr(os.path, 'samestat', None)
656 656 if followsym and samestat is not None:
657 657 def adddir(dirlst, dirname):
658 658 match = False
659 659 dirstat = os.stat(dirname)
660 660 for lstdirstat in dirlst:
661 661 if samestat(dirstat, lstdirstat):
662 662 match = True
663 663 break
664 664 if not match:
665 665 dirlst.append(dirstat)
666 666 return not match
667 667 else:
668 668 followsym = False
669 669
670 670 if (seen_dirs is None) and followsym:
671 671 seen_dirs = []
672 672 adddir(seen_dirs, path)
673 673 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
674 674 dirs.sort()
675 675 if '.hg' in dirs:
676 676 yield root # found a repository
677 677 qroot = os.path.join(root, '.hg', 'patches')
678 678 if os.path.isdir(os.path.join(qroot, '.hg')):
679 679 yield qroot # we have a patch queue repo here
680 680 if recurse:
681 681 # avoid recursing inside the .hg directory
682 682 dirs.remove('.hg')
683 683 else:
684 684 dirs[:] = [] # don't descend further
685 685 elif followsym:
686 686 newdirs = []
687 687 for d in dirs:
688 688 fname = os.path.join(root, d)
689 689 if adddir(seen_dirs, fname):
690 690 if os.path.islink(fname):
691 691 for hgname in walkrepos(fname, True, seen_dirs):
692 692 yield hgname
693 693 else:
694 694 newdirs.append(d)
695 695 dirs[:] = newdirs
696 696
697 697 def osrcpath():
698 698 '''return default os-specific hgrc search path'''
699 699 path = []
700 700 defaultpath = os.path.join(util.datapath, 'default.d')
701 701 if os.path.isdir(defaultpath):
702 702 for f, kind in osutil.listdir(defaultpath):
703 703 if f.endswith('.rc'):
704 704 path.append(os.path.join(defaultpath, f))
705 705 path.extend(systemrcpath())
706 706 path.extend(userrcpath())
707 707 path = [os.path.normpath(f) for f in path]
708 708 return path
709 709
710 710 _rcpath = None
711 711
712 712 def rcpath():
713 713 '''return hgrc search path. if env var HGRCPATH is set, use it.
714 714 for each item in path, if directory, use files ending in .rc,
715 715 else use item.
716 716 make HGRCPATH empty to only look in .hg/hgrc of current repo.
717 717 if no HGRCPATH, use default os-specific path.'''
718 718 global _rcpath
719 719 if _rcpath is None:
720 720 if 'HGRCPATH' in os.environ:
721 721 _rcpath = []
722 722 for p in os.environ['HGRCPATH'].split(os.pathsep):
723 723 if not p:
724 724 continue
725 725 p = util.expandpath(p)
726 726 if os.path.isdir(p):
727 727 for f, kind in osutil.listdir(p):
728 728 if f.endswith('.rc'):
729 729 _rcpath.append(os.path.join(p, f))
730 730 else:
731 731 _rcpath.append(p)
732 732 else:
733 733 _rcpath = osrcpath()
734 734 return _rcpath
735 735
736 736 def intrev(rev):
737 737 """Return integer for a given revision that can be used in comparison or
738 738 arithmetic operation"""
739 739 if rev is None:
740 740 return wdirrev
741 741 return rev
742 742
743 743 def revsingle(repo, revspec, default='.'):
744 744 if not revspec and revspec != 0:
745 745 return repo[default]
746 746
747 747 l = revrange(repo, [revspec])
748 748 if not l:
749 749 raise error.Abort(_('empty revision set'))
750 750 return repo[l.last()]
751 751
752 752 def _pairspec(revspec):
753 753 tree = revset.parse(revspec)
754 tree = revset.optimize(tree, True)[1] # fix up "x^:y" -> "(x^):y"
754 tree = revset.optimize(tree) # fix up "x^:y" -> "(x^):y"
755 755 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
756 756
757 757 def revpair(repo, revs):
758 758 if not revs:
759 759 return repo.dirstate.p1(), None
760 760
761 761 l = revrange(repo, revs)
762 762
763 763 if not l:
764 764 first = second = None
765 765 elif l.isascending():
766 766 first = l.min()
767 767 second = l.max()
768 768 elif l.isdescending():
769 769 first = l.max()
770 770 second = l.min()
771 771 else:
772 772 first = l.first()
773 773 second = l.last()
774 774
775 775 if first is None:
776 776 raise error.Abort(_('empty revision range'))
777 777 if (first == second and len(revs) >= 2
778 778 and not all(revrange(repo, [r]) for r in revs)):
779 779 raise error.Abort(_('empty revision on one side of range'))
780 780
781 781 # if top-level is range expression, the result must always be a pair
782 782 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
783 783 return repo.lookup(first), None
784 784
785 785 return repo.lookup(first), repo.lookup(second)
786 786
787 787 def revrange(repo, revs):
788 788 """Yield revision as strings from a list of revision specifications."""
789 789 allspecs = []
790 790 for spec in revs:
791 791 if isinstance(spec, int):
792 792 spec = revset.formatspec('rev(%d)', spec)
793 793 allspecs.append(spec)
794 794 m = revset.matchany(repo.ui, allspecs, repo)
795 795 return m(repo)
796 796
797 797 def meaningfulparents(repo, ctx):
798 798 """Return list of meaningful (or all if debug) parentrevs for rev.
799 799
800 800 For merges (two non-nullrev revisions) both parents are meaningful.
801 801 Otherwise the first parent revision is considered meaningful if it
802 802 is not the preceding revision.
803 803 """
804 804 parents = ctx.parents()
805 805 if len(parents) > 1:
806 806 return parents
807 807 if repo.ui.debugflag:
808 808 return [parents[0], repo['null']]
809 809 if parents[0].rev() >= intrev(ctx.rev()) - 1:
810 810 return []
811 811 return parents
812 812
813 813 def expandpats(pats):
814 814 '''Expand bare globs when running on windows.
815 815 On posix we assume it already has already been done by sh.'''
816 816 if not util.expandglobs:
817 817 return list(pats)
818 818 ret = []
819 819 for kindpat in pats:
820 820 kind, pat = matchmod._patsplit(kindpat, None)
821 821 if kind is None:
822 822 try:
823 823 globbed = glob.glob(pat)
824 824 except re.error:
825 825 globbed = [pat]
826 826 if globbed:
827 827 ret.extend(globbed)
828 828 continue
829 829 ret.append(kindpat)
830 830 return ret
831 831
832 832 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
833 833 badfn=None):
834 834 '''Return a matcher and the patterns that were used.
835 835 The matcher will warn about bad matches, unless an alternate badfn callback
836 836 is provided.'''
837 837 if pats == ("",):
838 838 pats = []
839 839 if opts is None:
840 840 opts = {}
841 841 if not globbed and default == 'relpath':
842 842 pats = expandpats(pats or [])
843 843
844 844 def bad(f, msg):
845 845 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
846 846
847 847 if badfn is None:
848 848 badfn = bad
849 849
850 850 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
851 851 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
852 852
853 853 if m.always():
854 854 pats = []
855 855 return m, pats
856 856
857 857 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
858 858 badfn=None):
859 859 '''Return a matcher that will warn about bad matches.'''
860 860 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
861 861
862 862 def matchall(repo):
863 863 '''Return a matcher that will efficiently match everything.'''
864 864 return matchmod.always(repo.root, repo.getcwd())
865 865
866 866 def matchfiles(repo, files, badfn=None):
867 867 '''Return a matcher that will efficiently match exactly these files.'''
868 868 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
869 869
870 870 def origpath(ui, repo, filepath):
871 871 '''customize where .orig files are created
872 872
873 873 Fetch user defined path from config file: [ui] origbackuppath = <path>
874 874 Fall back to default (filepath) if not specified
875 875 '''
876 876 origbackuppath = ui.config('ui', 'origbackuppath', None)
877 877 if origbackuppath is None:
878 878 return filepath + ".orig"
879 879
880 880 filepathfromroot = os.path.relpath(filepath, start=repo.root)
881 881 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
882 882
883 883 origbackupdir = repo.vfs.dirname(fullorigpath)
884 884 if not repo.vfs.exists(origbackupdir):
885 885 ui.note(_('creating directory: %s\n') % origbackupdir)
886 886 util.makedirs(origbackupdir)
887 887
888 888 return fullorigpath + ".orig"
889 889
890 890 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
891 891 if opts is None:
892 892 opts = {}
893 893 m = matcher
894 894 if dry_run is None:
895 895 dry_run = opts.get('dry_run')
896 896 if similarity is None:
897 897 similarity = float(opts.get('similarity') or 0)
898 898
899 899 ret = 0
900 900 join = lambda f: os.path.join(prefix, f)
901 901
902 902 def matchessubrepo(matcher, subpath):
903 903 if matcher.exact(subpath):
904 904 return True
905 905 for f in matcher.files():
906 906 if f.startswith(subpath):
907 907 return True
908 908 return False
909 909
910 910 wctx = repo[None]
911 911 for subpath in sorted(wctx.substate):
912 912 if opts.get('subrepos') or matchessubrepo(m, subpath):
913 913 sub = wctx.sub(subpath)
914 914 try:
915 915 submatch = matchmod.subdirmatcher(subpath, m)
916 916 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
917 917 ret = 1
918 918 except error.LookupError:
919 919 repo.ui.status(_("skipping missing subrepository: %s\n")
920 920 % join(subpath))
921 921
922 922 rejected = []
923 923 def badfn(f, msg):
924 924 if f in m.files():
925 925 m.bad(f, msg)
926 926 rejected.append(f)
927 927
928 928 badmatch = matchmod.badmatch(m, badfn)
929 929 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
930 930 badmatch)
931 931
932 932 unknownset = set(unknown + forgotten)
933 933 toprint = unknownset.copy()
934 934 toprint.update(deleted)
935 935 for abs in sorted(toprint):
936 936 if repo.ui.verbose or not m.exact(abs):
937 937 if abs in unknownset:
938 938 status = _('adding %s\n') % m.uipath(abs)
939 939 else:
940 940 status = _('removing %s\n') % m.uipath(abs)
941 941 repo.ui.status(status)
942 942
943 943 renames = _findrenames(repo, m, added + unknown, removed + deleted,
944 944 similarity)
945 945
946 946 if not dry_run:
947 947 _markchanges(repo, unknown + forgotten, deleted, renames)
948 948
949 949 for f in rejected:
950 950 if f in m.files():
951 951 return 1
952 952 return ret
953 953
954 954 def marktouched(repo, files, similarity=0.0):
955 955 '''Assert that files have somehow been operated upon. files are relative to
956 956 the repo root.'''
957 957 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
958 958 rejected = []
959 959
960 960 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
961 961
962 962 if repo.ui.verbose:
963 963 unknownset = set(unknown + forgotten)
964 964 toprint = unknownset.copy()
965 965 toprint.update(deleted)
966 966 for abs in sorted(toprint):
967 967 if abs in unknownset:
968 968 status = _('adding %s\n') % abs
969 969 else:
970 970 status = _('removing %s\n') % abs
971 971 repo.ui.status(status)
972 972
973 973 renames = _findrenames(repo, m, added + unknown, removed + deleted,
974 974 similarity)
975 975
976 976 _markchanges(repo, unknown + forgotten, deleted, renames)
977 977
978 978 for f in rejected:
979 979 if f in m.files():
980 980 return 1
981 981 return 0
982 982
983 983 def _interestingfiles(repo, matcher):
984 984 '''Walk dirstate with matcher, looking for files that addremove would care
985 985 about.
986 986
987 987 This is different from dirstate.status because it doesn't care about
988 988 whether files are modified or clean.'''
989 989 added, unknown, deleted, removed, forgotten = [], [], [], [], []
990 990 audit_path = pathutil.pathauditor(repo.root)
991 991
992 992 ctx = repo[None]
993 993 dirstate = repo.dirstate
994 994 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
995 995 full=False)
996 996 for abs, st in walkresults.iteritems():
997 997 dstate = dirstate[abs]
998 998 if dstate == '?' and audit_path.check(abs):
999 999 unknown.append(abs)
1000 1000 elif dstate != 'r' and not st:
1001 1001 deleted.append(abs)
1002 1002 elif dstate == 'r' and st:
1003 1003 forgotten.append(abs)
1004 1004 # for finding renames
1005 1005 elif dstate == 'r' and not st:
1006 1006 removed.append(abs)
1007 1007 elif dstate == 'a':
1008 1008 added.append(abs)
1009 1009
1010 1010 return added, unknown, deleted, removed, forgotten
1011 1011
1012 1012 def _findrenames(repo, matcher, added, removed, similarity):
1013 1013 '''Find renames from removed files to added ones.'''
1014 1014 renames = {}
1015 1015 if similarity > 0:
1016 1016 for old, new, score in similar.findrenames(repo, added, removed,
1017 1017 similarity):
1018 1018 if (repo.ui.verbose or not matcher.exact(old)
1019 1019 or not matcher.exact(new)):
1020 1020 repo.ui.status(_('recording removal of %s as rename to %s '
1021 1021 '(%d%% similar)\n') %
1022 1022 (matcher.rel(old), matcher.rel(new),
1023 1023 score * 100))
1024 1024 renames[new] = old
1025 1025 return renames
1026 1026
1027 1027 def _markchanges(repo, unknown, deleted, renames):
1028 1028 '''Marks the files in unknown as added, the files in deleted as removed,
1029 1029 and the files in renames as copied.'''
1030 1030 wctx = repo[None]
1031 1031 with repo.wlock():
1032 1032 wctx.forget(deleted)
1033 1033 wctx.add(unknown)
1034 1034 for new, old in renames.iteritems():
1035 1035 wctx.copy(old, new)
1036 1036
1037 1037 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1038 1038 """Update the dirstate to reflect the intent of copying src to dst. For
1039 1039 different reasons it might not end with dst being marked as copied from src.
1040 1040 """
1041 1041 origsrc = repo.dirstate.copied(src) or src
1042 1042 if dst == origsrc: # copying back a copy?
1043 1043 if repo.dirstate[dst] not in 'mn' and not dryrun:
1044 1044 repo.dirstate.normallookup(dst)
1045 1045 else:
1046 1046 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1047 1047 if not ui.quiet:
1048 1048 ui.warn(_("%s has not been committed yet, so no copy "
1049 1049 "data will be stored for %s.\n")
1050 1050 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1051 1051 if repo.dirstate[dst] in '?r' and not dryrun:
1052 1052 wctx.add([dst])
1053 1053 elif not dryrun:
1054 1054 wctx.copy(origsrc, dst)
1055 1055
1056 1056 def readrequires(opener, supported):
1057 1057 '''Reads and parses .hg/requires and checks if all entries found
1058 1058 are in the list of supported features.'''
1059 1059 requirements = set(opener.read("requires").splitlines())
1060 1060 missings = []
1061 1061 for r in requirements:
1062 1062 if r not in supported:
1063 1063 if not r or not r[0].isalnum():
1064 1064 raise error.RequirementError(_(".hg/requires file is corrupt"))
1065 1065 missings.append(r)
1066 1066 missings.sort()
1067 1067 if missings:
1068 1068 raise error.RequirementError(
1069 1069 _("repository requires features unknown to this Mercurial: %s")
1070 1070 % " ".join(missings),
1071 1071 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1072 1072 " for more information"))
1073 1073 return requirements
1074 1074
1075 1075 def writerequires(opener, requirements):
1076 1076 with opener('requires', 'w') as fp:
1077 1077 for r in sorted(requirements):
1078 1078 fp.write("%s\n" % r)
1079 1079
1080 1080 class filecachesubentry(object):
1081 1081 def __init__(self, path, stat):
1082 1082 self.path = path
1083 1083 self.cachestat = None
1084 1084 self._cacheable = None
1085 1085
1086 1086 if stat:
1087 1087 self.cachestat = filecachesubentry.stat(self.path)
1088 1088
1089 1089 if self.cachestat:
1090 1090 self._cacheable = self.cachestat.cacheable()
1091 1091 else:
1092 1092 # None means we don't know yet
1093 1093 self._cacheable = None
1094 1094
1095 1095 def refresh(self):
1096 1096 if self.cacheable():
1097 1097 self.cachestat = filecachesubentry.stat(self.path)
1098 1098
1099 1099 def cacheable(self):
1100 1100 if self._cacheable is not None:
1101 1101 return self._cacheable
1102 1102
1103 1103 # we don't know yet, assume it is for now
1104 1104 return True
1105 1105
1106 1106 def changed(self):
1107 1107 # no point in going further if we can't cache it
1108 1108 if not self.cacheable():
1109 1109 return True
1110 1110
1111 1111 newstat = filecachesubentry.stat(self.path)
1112 1112
1113 1113 # we may not know if it's cacheable yet, check again now
1114 1114 if newstat and self._cacheable is None:
1115 1115 self._cacheable = newstat.cacheable()
1116 1116
1117 1117 # check again
1118 1118 if not self._cacheable:
1119 1119 return True
1120 1120
1121 1121 if self.cachestat != newstat:
1122 1122 self.cachestat = newstat
1123 1123 return True
1124 1124 else:
1125 1125 return False
1126 1126
1127 1127 @staticmethod
1128 1128 def stat(path):
1129 1129 try:
1130 1130 return util.cachestat(path)
1131 1131 except OSError as e:
1132 1132 if e.errno != errno.ENOENT:
1133 1133 raise
1134 1134
1135 1135 class filecacheentry(object):
1136 1136 def __init__(self, paths, stat=True):
1137 1137 self._entries = []
1138 1138 for path in paths:
1139 1139 self._entries.append(filecachesubentry(path, stat))
1140 1140
1141 1141 def changed(self):
1142 1142 '''true if any entry has changed'''
1143 1143 for entry in self._entries:
1144 1144 if entry.changed():
1145 1145 return True
1146 1146 return False
1147 1147
1148 1148 def refresh(self):
1149 1149 for entry in self._entries:
1150 1150 entry.refresh()
1151 1151
1152 1152 class filecache(object):
1153 1153 '''A property like decorator that tracks files under .hg/ for updates.
1154 1154
1155 1155 Records stat info when called in _filecache.
1156 1156
1157 1157 On subsequent calls, compares old stat info with new info, and recreates the
1158 1158 object when any of the files changes, updating the new stat info in
1159 1159 _filecache.
1160 1160
1161 1161 Mercurial either atomic renames or appends for files under .hg,
1162 1162 so to ensure the cache is reliable we need the filesystem to be able
1163 1163 to tell us if a file has been replaced. If it can't, we fallback to
1164 1164 recreating the object on every call (essentially the same behavior as
1165 1165 propertycache).
1166 1166
1167 1167 '''
1168 1168 def __init__(self, *paths):
1169 1169 self.paths = paths
1170 1170
1171 1171 def join(self, obj, fname):
1172 1172 """Used to compute the runtime path of a cached file.
1173 1173
1174 1174 Users should subclass filecache and provide their own version of this
1175 1175 function to call the appropriate join function on 'obj' (an instance
1176 1176 of the class that its member function was decorated).
1177 1177 """
1178 1178 return obj.join(fname)
1179 1179
1180 1180 def __call__(self, func):
1181 1181 self.func = func
1182 1182 self.name = func.__name__
1183 1183 return self
1184 1184
1185 1185 def __get__(self, obj, type=None):
1186 1186 # do we need to check if the file changed?
1187 1187 if self.name in obj.__dict__:
1188 1188 assert self.name in obj._filecache, self.name
1189 1189 return obj.__dict__[self.name]
1190 1190
1191 1191 entry = obj._filecache.get(self.name)
1192 1192
1193 1193 if entry:
1194 1194 if entry.changed():
1195 1195 entry.obj = self.func(obj)
1196 1196 else:
1197 1197 paths = [self.join(obj, path) for path in self.paths]
1198 1198
1199 1199 # We stat -before- creating the object so our cache doesn't lie if
1200 1200 # a writer modified between the time we read and stat
1201 1201 entry = filecacheentry(paths, True)
1202 1202 entry.obj = self.func(obj)
1203 1203
1204 1204 obj._filecache[self.name] = entry
1205 1205
1206 1206 obj.__dict__[self.name] = entry.obj
1207 1207 return entry.obj
1208 1208
1209 1209 def __set__(self, obj, value):
1210 1210 if self.name not in obj._filecache:
1211 1211 # we add an entry for the missing value because X in __dict__
1212 1212 # implies X in _filecache
1213 1213 paths = [self.join(obj, path) for path in self.paths]
1214 1214 ce = filecacheentry(paths, False)
1215 1215 obj._filecache[self.name] = ce
1216 1216 else:
1217 1217 ce = obj._filecache[self.name]
1218 1218
1219 1219 ce.obj = value # update cached copy
1220 1220 obj.__dict__[self.name] = value # update copy returned by obj.x
1221 1221
1222 1222 def __delete__(self, obj):
1223 1223 try:
1224 1224 del obj.__dict__[self.name]
1225 1225 except KeyError:
1226 1226 raise AttributeError(self.name)
1227 1227
1228 1228 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1229 1229 if lock is None:
1230 1230 raise error.LockInheritanceContractViolation(
1231 1231 'lock can only be inherited while held')
1232 1232 if environ is None:
1233 1233 environ = {}
1234 1234 with lock.inherit() as locker:
1235 1235 environ[envvar] = locker
1236 1236 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1237 1237
1238 1238 def wlocksub(repo, cmd, *args, **kwargs):
1239 1239 """run cmd as a subprocess that allows inheriting repo's wlock
1240 1240
1241 1241 This can only be called while the wlock is held. This takes all the
1242 1242 arguments that ui.system does, and returns the exit code of the
1243 1243 subprocess."""
1244 1244 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1245 1245 **kwargs)
1246 1246
1247 1247 def gdinitconfig(ui):
1248 1248 """helper function to know if a repo should be created as general delta
1249 1249 """
1250 1250 # experimental config: format.generaldelta
1251 1251 return (ui.configbool('format', 'generaldelta', False)
1252 1252 or ui.configbool('format', 'usegeneraldelta', True))
1253 1253
1254 1254 def gddeltaconfig(ui):
1255 1255 """helper function to know if incoming delta should be optimised
1256 1256 """
1257 1257 # experimental config: format.generaldelta
1258 1258 return ui.configbool('format', 'generaldelta', False)
1259 1259
1260 1260 class delayclosedfile(object):
1261 1261 """Proxy for a file object whose close is delayed.
1262 1262
1263 1263 Do not instantiate outside of the vfs layer.
1264 1264 """
1265 1265
1266 1266 def __init__(self, fh, closer):
1267 1267 object.__setattr__(self, '_origfh', fh)
1268 1268 object.__setattr__(self, '_closer', closer)
1269 1269
1270 1270 def __getattr__(self, attr):
1271 1271 return getattr(self._origfh, attr)
1272 1272
1273 1273 def __setattr__(self, attr, value):
1274 1274 return setattr(self._origfh, attr, value)
1275 1275
1276 1276 def __delattr__(self, attr):
1277 1277 return delattr(self._origfh, attr)
1278 1278
1279 1279 def __enter__(self):
1280 1280 return self._origfh.__enter__()
1281 1281
1282 1282 def __exit__(self, exc_type, exc_value, exc_tb):
1283 1283 self._closer.close(self._origfh)
1284 1284
1285 1285 def close(self):
1286 1286 self._closer.close(self._origfh)
1287 1287
1288 1288 class backgroundfilecloser(object):
1289 1289 """Coordinates background closing of file handles on multiple threads."""
1290 1290 def __init__(self, ui, expectedcount=-1):
1291 1291 self._running = False
1292 1292 self._entered = False
1293 1293 self._threads = []
1294 1294 self._threadexception = None
1295 1295
1296 1296 # Only Windows/NTFS has slow file closing. So only enable by default
1297 1297 # on that platform. But allow to be enabled elsewhere for testing.
1298 1298 defaultenabled = os.name == 'nt'
1299 1299 enabled = ui.configbool('worker', 'backgroundclose', defaultenabled)
1300 1300
1301 1301 if not enabled:
1302 1302 return
1303 1303
1304 1304 # There is overhead to starting and stopping the background threads.
1305 1305 # Don't do background processing unless the file count is large enough
1306 1306 # to justify it.
1307 1307 minfilecount = ui.configint('worker', 'backgroundcloseminfilecount',
1308 1308 2048)
1309 1309 # FUTURE dynamically start background threads after minfilecount closes.
1310 1310 # (We don't currently have any callers that don't know their file count)
1311 1311 if expectedcount > 0 and expectedcount < minfilecount:
1312 1312 return
1313 1313
1314 1314 # Windows defaults to a limit of 512 open files. A buffer of 128
1315 1315 # should give us enough headway.
1316 1316 maxqueue = ui.configint('worker', 'backgroundclosemaxqueue', 384)
1317 1317 threadcount = ui.configint('worker', 'backgroundclosethreadcount', 4)
1318 1318
1319 1319 ui.debug('starting %d threads for background file closing\n' %
1320 1320 threadcount)
1321 1321
1322 1322 self._queue = util.queue(maxsize=maxqueue)
1323 1323 self._running = True
1324 1324
1325 1325 for i in range(threadcount):
1326 1326 t = threading.Thread(target=self._worker, name='backgroundcloser')
1327 1327 self._threads.append(t)
1328 1328 t.start()
1329 1329
1330 1330 def __enter__(self):
1331 1331 self._entered = True
1332 1332 return self
1333 1333
1334 1334 def __exit__(self, exc_type, exc_value, exc_tb):
1335 1335 self._running = False
1336 1336
1337 1337 # Wait for threads to finish closing so open files don't linger for
1338 1338 # longer than lifetime of context manager.
1339 1339 for t in self._threads:
1340 1340 t.join()
1341 1341
1342 1342 def _worker(self):
1343 1343 """Main routine for worker thread."""
1344 1344 while True:
1345 1345 try:
1346 1346 fh = self._queue.get(block=True, timeout=0.100)
1347 1347 # Need to catch or the thread will terminate and
1348 1348 # we could orphan file descriptors.
1349 1349 try:
1350 1350 fh.close()
1351 1351 except Exception as e:
1352 1352 # Stash so can re-raise from main thread later.
1353 1353 self._threadexception = e
1354 1354 except util.empty:
1355 1355 if not self._running:
1356 1356 break
1357 1357
1358 1358 def close(self, fh):
1359 1359 """Schedule a file for closing."""
1360 1360 if not self._entered:
1361 1361 raise error.Abort('can only call close() when context manager '
1362 1362 'active')
1363 1363
1364 1364 # If a background thread encountered an exception, raise now so we fail
1365 1365 # fast. Otherwise we may potentially go on for minutes until the error
1366 1366 # is acted on.
1367 1367 if self._threadexception:
1368 1368 e = self._threadexception
1369 1369 self._threadexception = None
1370 1370 raise e
1371 1371
1372 1372 # If we're not actively running, close synchronously.
1373 1373 if not self._running:
1374 1374 fh.close()
1375 1375 return
1376 1376
1377 1377 self._queue.put(fh, block=True, timeout=None)
1378 1378
General Comments 0
You need to be logged in to leave comments. Login now