##// END OF EJS Templates
dirstate: fix a potential traceback when in `copy` and `rename`...
marmoute -
r51225:9fc0d244 stable
parent child Browse files
Show More
@@ -1,8013 +1,8013 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import os
10 10 import re
11 11 import sys
12 12
13 13 from .i18n import _
14 14 from .node import (
15 15 hex,
16 16 nullid,
17 17 nullrev,
18 18 short,
19 19 wdirrev,
20 20 )
21 21 from .pycompat import open
22 22 from . import (
23 23 archival,
24 24 bookmarks,
25 25 bundle2,
26 26 bundlecaches,
27 27 changegroup,
28 28 cmdutil,
29 29 copies,
30 30 debugcommands as debugcommandsmod,
31 31 destutil,
32 32 discovery,
33 33 encoding,
34 34 error,
35 35 exchange,
36 36 extensions,
37 37 filemerge,
38 38 formatter,
39 39 graphmod,
40 40 grep as grepmod,
41 41 hbisect,
42 42 help,
43 43 hg,
44 44 logcmdutil,
45 45 merge as mergemod,
46 46 mergestate as mergestatemod,
47 47 narrowspec,
48 48 obsolete,
49 49 obsutil,
50 50 patch,
51 51 phases,
52 52 pycompat,
53 53 rcutil,
54 54 registrar,
55 55 requirements,
56 56 revsetlang,
57 57 rewriteutil,
58 58 scmutil,
59 59 server,
60 60 shelve as shelvemod,
61 61 state as statemod,
62 62 streamclone,
63 63 tags as tagsmod,
64 64 ui as uimod,
65 65 util,
66 66 verify as verifymod,
67 67 vfs as vfsmod,
68 68 wireprotoserver,
69 69 )
70 70 from .utils import (
71 71 dateutil,
72 72 stringutil,
73 73 urlutil,
74 74 )
75 75
76 76 table = {}
77 77 table.update(debugcommandsmod.command._table)
78 78
79 79 command = registrar.command(table)
80 80 INTENT_READONLY = registrar.INTENT_READONLY
81 81
82 82 # common command options
83 83
84 84 globalopts = [
85 85 (
86 86 b'R',
87 87 b'repository',
88 88 b'',
89 89 _(b'repository root directory or name of overlay bundle file'),
90 90 _(b'REPO'),
91 91 ),
92 92 (b'', b'cwd', b'', _(b'change working directory'), _(b'DIR')),
93 93 (
94 94 b'y',
95 95 b'noninteractive',
96 96 None,
97 97 _(
98 98 b'do not prompt, automatically pick the first choice for all prompts'
99 99 ),
100 100 ),
101 101 (b'q', b'quiet', None, _(b'suppress output')),
102 102 (b'v', b'verbose', None, _(b'enable additional output')),
103 103 (
104 104 b'',
105 105 b'color',
106 106 b'',
107 107 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
108 108 # and should not be translated
109 109 _(b"when to colorize (boolean, always, auto, never, or debug)"),
110 110 _(b'TYPE'),
111 111 ),
112 112 (
113 113 b'',
114 114 b'config',
115 115 [],
116 116 _(b'set/override config option (use \'section.name=value\')'),
117 117 _(b'CONFIG'),
118 118 ),
119 119 (b'', b'debug', None, _(b'enable debugging output')),
120 120 (b'', b'debugger', None, _(b'start debugger')),
121 121 (
122 122 b'',
123 123 b'encoding',
124 124 encoding.encoding,
125 125 _(b'set the charset encoding'),
126 126 _(b'ENCODE'),
127 127 ),
128 128 (
129 129 b'',
130 130 b'encodingmode',
131 131 encoding.encodingmode,
132 132 _(b'set the charset encoding mode'),
133 133 _(b'MODE'),
134 134 ),
135 135 (b'', b'traceback', None, _(b'always print a traceback on exception')),
136 136 (b'', b'time', None, _(b'time how long the command takes')),
137 137 (b'', b'profile', None, _(b'print command execution profile')),
138 138 (b'', b'version', None, _(b'output version information and exit')),
139 139 (b'h', b'help', None, _(b'display help and exit')),
140 140 (b'', b'hidden', False, _(b'consider hidden changesets')),
141 141 (
142 142 b'',
143 143 b'pager',
144 144 b'auto',
145 145 _(b"when to paginate (boolean, always, auto, or never)"),
146 146 _(b'TYPE'),
147 147 ),
148 148 ]
149 149
150 150 dryrunopts = cmdutil.dryrunopts
151 151 remoteopts = cmdutil.remoteopts
152 152 walkopts = cmdutil.walkopts
153 153 commitopts = cmdutil.commitopts
154 154 commitopts2 = cmdutil.commitopts2
155 155 commitopts3 = cmdutil.commitopts3
156 156 formatteropts = cmdutil.formatteropts
157 157 templateopts = cmdutil.templateopts
158 158 logopts = cmdutil.logopts
159 159 diffopts = cmdutil.diffopts
160 160 diffwsopts = cmdutil.diffwsopts
161 161 diffopts2 = cmdutil.diffopts2
162 162 mergetoolopts = cmdutil.mergetoolopts
163 163 similarityopts = cmdutil.similarityopts
164 164 subrepoopts = cmdutil.subrepoopts
165 165 debugrevlogopts = cmdutil.debugrevlogopts
166 166
167 167 # Commands start here, listed alphabetically
168 168
169 169
170 170 @command(
171 171 b'abort',
172 172 dryrunopts,
173 173 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
174 174 helpbasic=True,
175 175 )
176 176 def abort(ui, repo, **opts):
177 177 """abort an unfinished operation (EXPERIMENTAL)
178 178
179 179 Aborts a multistep operation like graft, histedit, rebase, merge,
180 180 and unshelve if they are in an unfinished state.
181 181
182 182 use --dry-run/-n to dry run the command.
183 183 """
184 184 dryrun = opts.get('dry_run')
185 185 abortstate = cmdutil.getunfinishedstate(repo)
186 186 if not abortstate:
187 187 raise error.StateError(_(b'no operation in progress'))
188 188 if not abortstate.abortfunc:
189 189 raise error.InputError(
190 190 (
191 191 _(b"%s in progress but does not support 'hg abort'")
192 192 % (abortstate._opname)
193 193 ),
194 194 hint=abortstate.hint(),
195 195 )
196 196 if dryrun:
197 197 ui.status(
198 198 _(b'%s in progress, will be aborted\n') % (abortstate._opname)
199 199 )
200 200 return
201 201 return abortstate.abortfunc(ui, repo)
202 202
203 203
204 204 @command(
205 205 b'add',
206 206 walkopts + subrepoopts + dryrunopts,
207 207 _(b'[OPTION]... [FILE]...'),
208 208 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
209 209 helpbasic=True,
210 210 inferrepo=True,
211 211 )
212 212 def add(ui, repo, *pats, **opts):
213 213 """add the specified files on the next commit
214 214
215 215 Schedule files to be version controlled and added to the
216 216 repository.
217 217
218 218 The files will be added to the repository at the next commit. To
219 219 undo an add before that, see :hg:`forget`.
220 220
221 221 If no names are given, add all files to the repository (except
222 222 files matching ``.hgignore``).
223 223
224 224 .. container:: verbose
225 225
226 226 Examples:
227 227
228 228 - New (unknown) files are added
229 229 automatically by :hg:`add`::
230 230
231 231 $ ls
232 232 foo.c
233 233 $ hg status
234 234 ? foo.c
235 235 $ hg add
236 236 adding foo.c
237 237 $ hg status
238 238 A foo.c
239 239
240 240 - Specific files to be added can be specified::
241 241
242 242 $ ls
243 243 bar.c foo.c
244 244 $ hg status
245 245 ? bar.c
246 246 ? foo.c
247 247 $ hg add bar.c
248 248 $ hg status
249 249 A bar.c
250 250 ? foo.c
251 251
252 252 Returns 0 if all files are successfully added.
253 253 """
254 254
255 255 with repo.wlock(), repo.dirstate.changing_files(repo):
256 256 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
257 257 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
258 258 rejected = cmdutil.add(ui, repo, m, b"", uipathfn, False, **opts)
259 259 return rejected and 1 or 0
260 260
261 261
262 262 @command(
263 263 b'addremove',
264 264 similarityopts + subrepoopts + walkopts + dryrunopts,
265 265 _(b'[OPTION]... [FILE]...'),
266 266 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
267 267 inferrepo=True,
268 268 )
269 269 def addremove(ui, repo, *pats, **opts):
270 270 """add all new files, delete all missing files
271 271
272 272 Add all new files and remove all missing files from the
273 273 repository.
274 274
275 275 Unless names are given, new files are ignored if they match any of
276 276 the patterns in ``.hgignore``. As with add, these changes take
277 277 effect at the next commit.
278 278
279 279 Use the -s/--similarity option to detect renamed files. This
280 280 option takes a percentage between 0 (disabled) and 100 (files must
281 281 be identical) as its parameter. With a parameter greater than 0,
282 282 this compares every removed file with every added file and records
283 283 those similar enough as renames. Detecting renamed files this way
284 284 can be expensive. After using this option, :hg:`status -C` can be
285 285 used to check which files were identified as moved or renamed. If
286 286 not specified, -s/--similarity defaults to 100 and only renames of
287 287 identical files are detected.
288 288
289 289 .. container:: verbose
290 290
291 291 Examples:
292 292
293 293 - A number of files (bar.c and foo.c) are new,
294 294 while foobar.c has been removed (without using :hg:`remove`)
295 295 from the repository::
296 296
297 297 $ ls
298 298 bar.c foo.c
299 299 $ hg status
300 300 ! foobar.c
301 301 ? bar.c
302 302 ? foo.c
303 303 $ hg addremove
304 304 adding bar.c
305 305 adding foo.c
306 306 removing foobar.c
307 307 $ hg status
308 308 A bar.c
309 309 A foo.c
310 310 R foobar.c
311 311
312 312 - A file foobar.c was moved to foo.c without using :hg:`rename`.
313 313 Afterwards, it was edited slightly::
314 314
315 315 $ ls
316 316 foo.c
317 317 $ hg status
318 318 ! foobar.c
319 319 ? foo.c
320 320 $ hg addremove --similarity 90
321 321 removing foobar.c
322 322 adding foo.c
323 323 recording removal of foobar.c as rename to foo.c (94% similar)
324 324 $ hg status -C
325 325 A foo.c
326 326 foobar.c
327 327 R foobar.c
328 328
329 329 Returns 0 if all files are successfully added.
330 330 """
331 331 opts = pycompat.byteskwargs(opts)
332 332 if not opts.get(b'similarity'):
333 333 opts[b'similarity'] = b'100'
334 334 with repo.wlock(), repo.dirstate.changing_files(repo):
335 335 matcher = scmutil.match(repo[None], pats, opts)
336 336 relative = scmutil.anypats(pats, opts)
337 337 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
338 338 return scmutil.addremove(repo, matcher, b"", uipathfn, opts)
339 339
340 340
341 341 @command(
342 342 b'annotate|blame',
343 343 [
344 344 (b'r', b'rev', b'', _(b'annotate the specified revision'), _(b'REV')),
345 345 (
346 346 b'',
347 347 b'follow',
348 348 None,
349 349 _(b'follow copies/renames and list the filename (DEPRECATED)'),
350 350 ),
351 351 (b'', b'no-follow', None, _(b"don't follow copies and renames")),
352 352 (b'a', b'text', None, _(b'treat all files as text')),
353 353 (b'u', b'user', None, _(b'list the author (long with -v)')),
354 354 (b'f', b'file', None, _(b'list the filename')),
355 355 (b'd', b'date', None, _(b'list the date (short with -q)')),
356 356 (b'n', b'number', None, _(b'list the revision number (default)')),
357 357 (b'c', b'changeset', None, _(b'list the changeset')),
358 358 (
359 359 b'l',
360 360 b'line-number',
361 361 None,
362 362 _(b'show line number at the first appearance'),
363 363 ),
364 364 (
365 365 b'',
366 366 b'skip',
367 367 [],
368 368 _(b'revset to not display (EXPERIMENTAL)'),
369 369 _(b'REV'),
370 370 ),
371 371 ]
372 372 + diffwsopts
373 373 + walkopts
374 374 + formatteropts,
375 375 _(b'[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
376 376 helpcategory=command.CATEGORY_FILE_CONTENTS,
377 377 helpbasic=True,
378 378 inferrepo=True,
379 379 )
380 380 def annotate(ui, repo, *pats, **opts):
381 381 """show changeset information by line for each file
382 382
383 383 List changes in files, showing the revision id responsible for
384 384 each line.
385 385
386 386 This command is useful for discovering when a change was made and
387 387 by whom.
388 388
389 389 If you include --file, --user, or --date, the revision number is
390 390 suppressed unless you also include --number.
391 391
392 392 Without the -a/--text option, annotate will avoid processing files
393 393 it detects as binary. With -a, annotate will annotate the file
394 394 anyway, although the results will probably be neither useful
395 395 nor desirable.
396 396
397 397 .. container:: verbose
398 398
399 399 Template:
400 400
401 401 The following keywords are supported in addition to the common template
402 402 keywords and functions. See also :hg:`help templates`.
403 403
404 404 :lines: List of lines with annotation data.
405 405 :path: String. Repository-absolute path of the specified file.
406 406
407 407 And each entry of ``{lines}`` provides the following sub-keywords in
408 408 addition to ``{date}``, ``{node}``, ``{rev}``, ``{user}``, etc.
409 409
410 410 :line: String. Line content.
411 411 :lineno: Integer. Line number at that revision.
412 412 :path: String. Repository-absolute path of the file at that revision.
413 413
414 414 See :hg:`help templates.operators` for the list expansion syntax.
415 415
416 416 Returns 0 on success.
417 417 """
418 418 opts = pycompat.byteskwargs(opts)
419 419 if not pats:
420 420 raise error.InputError(
421 421 _(b'at least one filename or pattern is required')
422 422 )
423 423
424 424 if opts.get(b'follow'):
425 425 # --follow is deprecated and now just an alias for -f/--file
426 426 # to mimic the behavior of Mercurial before version 1.5
427 427 opts[b'file'] = True
428 428
429 429 if (
430 430 not opts.get(b'user')
431 431 and not opts.get(b'changeset')
432 432 and not opts.get(b'date')
433 433 and not opts.get(b'file')
434 434 ):
435 435 opts[b'number'] = True
436 436
437 437 linenumber = opts.get(b'line_number') is not None
438 438 if (
439 439 linenumber
440 440 and (not opts.get(b'changeset'))
441 441 and (not opts.get(b'number'))
442 442 ):
443 443 raise error.InputError(_(b'at least one of -n/-c is required for -l'))
444 444
445 445 rev = opts.get(b'rev')
446 446 if rev:
447 447 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
448 448 ctx = logcmdutil.revsingle(repo, rev)
449 449
450 450 ui.pager(b'annotate')
451 451 rootfm = ui.formatter(b'annotate', opts)
452 452 if ui.debugflag:
453 453 shorthex = pycompat.identity
454 454 else:
455 455
456 456 def shorthex(h):
457 457 return h[:12]
458 458
459 459 if ui.quiet:
460 460 datefunc = dateutil.shortdate
461 461 else:
462 462 datefunc = dateutil.datestr
463 463 if ctx.rev() is None:
464 464 if opts.get(b'changeset'):
465 465 # omit "+" suffix which is appended to node hex
466 466 def formatrev(rev):
467 467 if rev == wdirrev:
468 468 return b'%d' % ctx.p1().rev()
469 469 else:
470 470 return b'%d' % rev
471 471
472 472 else:
473 473
474 474 def formatrev(rev):
475 475 if rev == wdirrev:
476 476 return b'%d+' % ctx.p1().rev()
477 477 else:
478 478 return b'%d ' % rev
479 479
480 480 def formathex(h):
481 481 if h == repo.nodeconstants.wdirhex:
482 482 return b'%s+' % shorthex(hex(ctx.p1().node()))
483 483 else:
484 484 return b'%s ' % shorthex(h)
485 485
486 486 else:
487 487 formatrev = b'%d'.__mod__
488 488 formathex = shorthex
489 489
490 490 opmap = [
491 491 (b'user', b' ', lambda x: x.fctx.user(), ui.shortuser),
492 492 (b'rev', b' ', lambda x: scmutil.intrev(x.fctx), formatrev),
493 493 (b'node', b' ', lambda x: hex(scmutil.binnode(x.fctx)), formathex),
494 494 (b'date', b' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
495 495 (b'path', b' ', lambda x: x.fctx.path(), pycompat.bytestr),
496 496 (b'lineno', b':', lambda x: x.lineno, pycompat.bytestr),
497 497 ]
498 498 opnamemap = {
499 499 b'rev': b'number',
500 500 b'node': b'changeset',
501 501 b'path': b'file',
502 502 b'lineno': b'line_number',
503 503 }
504 504
505 505 if rootfm.isplain():
506 506
507 507 def makefunc(get, fmt):
508 508 return lambda x: fmt(get(x))
509 509
510 510 else:
511 511
512 512 def makefunc(get, fmt):
513 513 return get
514 514
515 515 datahint = rootfm.datahint()
516 516 funcmap = [
517 517 (makefunc(get, fmt), sep)
518 518 for fn, sep, get, fmt in opmap
519 519 if opts.get(opnamemap.get(fn, fn)) or fn in datahint
520 520 ]
521 521 funcmap[0] = (funcmap[0][0], b'') # no separator in front of first column
522 522 fields = b' '.join(
523 523 fn
524 524 for fn, sep, get, fmt in opmap
525 525 if opts.get(opnamemap.get(fn, fn)) or fn in datahint
526 526 )
527 527
528 528 def bad(x, y):
529 529 raise error.InputError(b"%s: %s" % (x, y))
530 530
531 531 m = scmutil.match(ctx, pats, opts, badfn=bad)
532 532
533 533 follow = not opts.get(b'no_follow')
534 534 diffopts = patch.difffeatureopts(
535 535 ui, opts, section=b'annotate', whitespace=True
536 536 )
537 537 skiprevs = opts.get(b'skip')
538 538 if skiprevs:
539 539 skiprevs = logcmdutil.revrange(repo, skiprevs)
540 540
541 541 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
542 542 for abs in ctx.walk(m):
543 543 fctx = ctx[abs]
544 544 rootfm.startitem()
545 545 rootfm.data(path=abs)
546 546 if not opts.get(b'text') and fctx.isbinary():
547 547 rootfm.plain(_(b"%s: binary file\n") % uipathfn(abs))
548 548 continue
549 549
550 550 fm = rootfm.nested(b'lines', tmpl=b'{rev}: {line}')
551 551 lines = fctx.annotate(
552 552 follow=follow, skiprevs=skiprevs, diffopts=diffopts
553 553 )
554 554 if not lines:
555 555 fm.end()
556 556 continue
557 557 formats = []
558 558 pieces = []
559 559
560 560 for f, sep in funcmap:
561 561 l = [f(n) for n in lines]
562 562 if fm.isplain():
563 563 sizes = [encoding.colwidth(x) for x in l]
564 564 ml = max(sizes)
565 565 formats.append([sep + b' ' * (ml - w) + b'%s' for w in sizes])
566 566 else:
567 567 formats.append([b'%s'] * len(l))
568 568 pieces.append(l)
569 569
570 570 for f, p, n in zip(zip(*formats), zip(*pieces), lines):
571 571 fm.startitem()
572 572 fm.context(fctx=n.fctx)
573 573 fm.write(fields, b"".join(f), *p)
574 574 if n.skip:
575 575 fmt = b"* %s"
576 576 else:
577 577 fmt = b": %s"
578 578 fm.write(b'line', fmt, n.text)
579 579
580 580 if not lines[-1].text.endswith(b'\n'):
581 581 fm.plain(b'\n')
582 582 fm.end()
583 583
584 584 rootfm.end()
585 585
586 586
587 587 @command(
588 588 b'archive',
589 589 [
590 590 (b'', b'no-decode', None, _(b'do not pass files through decoders')),
591 591 (
592 592 b'p',
593 593 b'prefix',
594 594 b'',
595 595 _(b'directory prefix for files in archive'),
596 596 _(b'PREFIX'),
597 597 ),
598 598 (b'r', b'rev', b'', _(b'revision to distribute'), _(b'REV')),
599 599 (b't', b'type', b'', _(b'type of distribution to create'), _(b'TYPE')),
600 600 ]
601 601 + subrepoopts
602 602 + walkopts,
603 603 _(b'[OPTION]... DEST'),
604 604 helpcategory=command.CATEGORY_IMPORT_EXPORT,
605 605 )
606 606 def archive(ui, repo, dest, **opts):
607 607 """create an unversioned archive of a repository revision
608 608
609 609 By default, the revision used is the parent of the working
610 610 directory; use -r/--rev to specify a different revision.
611 611
612 612 The archive type is automatically detected based on file
613 613 extension (to override, use -t/--type).
614 614
615 615 .. container:: verbose
616 616
617 617 Examples:
618 618
619 619 - create a zip file containing the 1.0 release::
620 620
621 621 hg archive -r 1.0 project-1.0.zip
622 622
623 623 - create a tarball excluding .hg files::
624 624
625 625 hg archive project.tar.gz -X ".hg*"
626 626
627 627 Valid types are:
628 628
629 629 :``files``: a directory full of files (default)
630 630 :``tar``: tar archive, uncompressed
631 631 :``tbz2``: tar archive, compressed using bzip2
632 632 :``tgz``: tar archive, compressed using gzip
633 633 :``txz``: tar archive, compressed using lzma (only in Python 3)
634 634 :``uzip``: zip archive, uncompressed
635 635 :``zip``: zip archive, compressed using deflate
636 636
637 637 The exact name of the destination archive or directory is given
638 638 using a format string; see :hg:`help export` for details.
639 639
640 640 Each member added to an archive file has a directory prefix
641 641 prepended. Use -p/--prefix to specify a format string for the
642 642 prefix. The default is the basename of the archive, with suffixes
643 643 removed.
644 644
645 645 Returns 0 on success.
646 646 """
647 647
648 648 opts = pycompat.byteskwargs(opts)
649 649 rev = opts.get(b'rev')
650 650 if rev:
651 651 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
652 652 ctx = logcmdutil.revsingle(repo, rev)
653 653 if not ctx:
654 654 raise error.InputError(
655 655 _(b'no working directory: please specify a revision')
656 656 )
657 657 node = ctx.node()
658 658 dest = cmdutil.makefilename(ctx, dest)
659 659 if os.path.realpath(dest) == repo.root:
660 660 raise error.InputError(_(b'repository root cannot be destination'))
661 661
662 662 kind = opts.get(b'type') or archival.guesskind(dest) or b'files'
663 663 prefix = opts.get(b'prefix')
664 664
665 665 if dest == b'-':
666 666 if kind == b'files':
667 667 raise error.InputError(_(b'cannot archive plain files to stdout'))
668 668 dest = cmdutil.makefileobj(ctx, dest)
669 669 if not prefix:
670 670 prefix = os.path.basename(repo.root) + b'-%h'
671 671
672 672 prefix = cmdutil.makefilename(ctx, prefix)
673 673 match = scmutil.match(ctx, [], opts)
674 674 archival.archive(
675 675 repo,
676 676 dest,
677 677 node,
678 678 kind,
679 679 not opts.get(b'no_decode'),
680 680 match,
681 681 prefix,
682 682 subrepos=opts.get(b'subrepos'),
683 683 )
684 684
685 685
686 686 @command(
687 687 b'backout',
688 688 [
689 689 (
690 690 b'',
691 691 b'merge',
692 692 None,
693 693 _(b'merge with old dirstate parent after backout'),
694 694 ),
695 695 (
696 696 b'',
697 697 b'commit',
698 698 None,
699 699 _(b'commit if no conflicts were encountered (DEPRECATED)'),
700 700 ),
701 701 (b'', b'no-commit', None, _(b'do not commit')),
702 702 (
703 703 b'',
704 704 b'parent',
705 705 b'',
706 706 _(b'parent to choose when backing out merge (DEPRECATED)'),
707 707 _(b'REV'),
708 708 ),
709 709 (b'r', b'rev', b'', _(b'revision to backout'), _(b'REV')),
710 710 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
711 711 ]
712 712 + mergetoolopts
713 713 + walkopts
714 714 + commitopts
715 715 + commitopts2,
716 716 _(b'[OPTION]... [-r] REV'),
717 717 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
718 718 )
719 719 def backout(ui, repo, node=None, rev=None, **opts):
720 720 """reverse effect of earlier changeset
721 721
722 722 Prepare a new changeset with the effect of REV undone in the
723 723 current working directory. If no conflicts were encountered,
724 724 it will be committed immediately.
725 725
726 726 If REV is the parent of the working directory, then this new changeset
727 727 is committed automatically (unless --no-commit is specified).
728 728
729 729 .. note::
730 730
731 731 :hg:`backout` cannot be used to fix either an unwanted or
732 732 incorrect merge.
733 733
734 734 .. container:: verbose
735 735
736 736 Examples:
737 737
738 738 - Reverse the effect of the parent of the working directory.
739 739 This backout will be committed immediately::
740 740
741 741 hg backout -r .
742 742
743 743 - Reverse the effect of previous bad revision 23::
744 744
745 745 hg backout -r 23
746 746
747 747 - Reverse the effect of previous bad revision 23 and
748 748 leave changes uncommitted::
749 749
750 750 hg backout -r 23 --no-commit
751 751 hg commit -m "Backout revision 23"
752 752
753 753 By default, the pending changeset will have one parent,
754 754 maintaining a linear history. With --merge, the pending
755 755 changeset will instead have two parents: the old parent of the
756 756 working directory and a new child of REV that simply undoes REV.
757 757
758 758 Before version 1.7, the behavior without --merge was equivalent
759 759 to specifying --merge followed by :hg:`update --clean .` to
760 760 cancel the merge and leave the child of REV as a head to be
761 761 merged separately.
762 762
763 763 See :hg:`help dates` for a list of formats valid for -d/--date.
764 764
765 765 See :hg:`help revert` for a way to restore files to the state
766 766 of another revision.
767 767
768 768 Returns 0 on success, 1 if nothing to backout or there are unresolved
769 769 files.
770 770 """
771 771 with repo.wlock(), repo.lock():
772 772 return _dobackout(ui, repo, node, rev, **opts)
773 773
774 774
775 775 def _dobackout(ui, repo, node=None, rev=None, **opts):
776 776 cmdutil.check_incompatible_arguments(opts, 'no_commit', ['commit', 'merge'])
777 777 opts = pycompat.byteskwargs(opts)
778 778
779 779 if rev and node:
780 780 raise error.InputError(_(b"please specify just one revision"))
781 781
782 782 if not rev:
783 783 rev = node
784 784
785 785 if not rev:
786 786 raise error.InputError(_(b"please specify a revision to backout"))
787 787
788 788 date = opts.get(b'date')
789 789 if date:
790 790 opts[b'date'] = dateutil.parsedate(date)
791 791
792 792 cmdutil.checkunfinished(repo)
793 793 cmdutil.bailifchanged(repo)
794 794 ctx = logcmdutil.revsingle(repo, rev)
795 795 node = ctx.node()
796 796
797 797 op1, op2 = repo.dirstate.parents()
798 798 if not repo.changelog.isancestor(node, op1):
799 799 raise error.InputError(
800 800 _(b'cannot backout change that is not an ancestor')
801 801 )
802 802
803 803 p1, p2 = repo.changelog.parents(node)
804 804 if p1 == repo.nullid:
805 805 raise error.InputError(_(b'cannot backout a change with no parents'))
806 806 if p2 != repo.nullid:
807 807 if not opts.get(b'parent'):
808 808 raise error.InputError(_(b'cannot backout a merge changeset'))
809 809 p = repo.lookup(opts[b'parent'])
810 810 if p not in (p1, p2):
811 811 raise error.InputError(
812 812 _(b'%s is not a parent of %s') % (short(p), short(node))
813 813 )
814 814 parent = p
815 815 else:
816 816 if opts.get(b'parent'):
817 817 raise error.InputError(
818 818 _(b'cannot use --parent on non-merge changeset')
819 819 )
820 820 parent = p1
821 821
822 822 # the backout should appear on the same branch
823 823 branch = repo.dirstate.branch()
824 824 bheads = repo.branchheads(branch)
825 825 rctx = scmutil.revsingle(repo, hex(parent))
826 826 if not opts.get(b'merge') and op1 != node:
827 827 with repo.transaction(b"backout"):
828 828 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
829 829 with ui.configoverride(overrides, b'backout'):
830 830 stats = mergemod.back_out(ctx, parent=repo[parent])
831 831 repo.setparents(op1, op2)
832 832 hg._showstats(repo, stats)
833 833 if stats.unresolvedcount:
834 834 repo.ui.status(
835 835 _(b"use 'hg resolve' to retry unresolved file merges\n")
836 836 )
837 837 return 1
838 838 else:
839 839 hg.clean(repo, node, show_stats=False)
840 840 repo.dirstate.setbranch(branch, repo.currenttransaction())
841 841 cmdutil.revert(ui, repo, rctx)
842 842
843 843 if opts.get(b'no_commit'):
844 844 msg = _(b"changeset %s backed out, don't forget to commit.\n")
845 845 ui.status(msg % short(node))
846 846 return 0
847 847
848 848 def commitfunc(ui, repo, message, match, opts):
849 849 editform = b'backout'
850 850 e = cmdutil.getcommiteditor(
851 851 editform=editform, **pycompat.strkwargs(opts)
852 852 )
853 853 if not message:
854 854 # we don't translate commit messages
855 855 message = b"Backed out changeset %s" % short(node)
856 856 e = cmdutil.getcommiteditor(edit=True, editform=editform)
857 857 return repo.commit(
858 858 message, opts.get(b'user'), opts.get(b'date'), match, editor=e
859 859 )
860 860
861 861 # save to detect changes
862 862 tip = repo.changelog.tip()
863 863
864 864 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
865 865 if not newnode:
866 866 ui.status(_(b"nothing changed\n"))
867 867 return 1
868 868 cmdutil.commitstatus(repo, newnode, branch, bheads, tip)
869 869
870 870 def nice(node):
871 871 return b'%d:%s' % (repo.changelog.rev(node), short(node))
872 872
873 873 ui.status(
874 874 _(b'changeset %s backs out changeset %s\n')
875 875 % (nice(newnode), nice(node))
876 876 )
877 877 if opts.get(b'merge') and op1 != node:
878 878 hg.clean(repo, op1, show_stats=False)
879 879 ui.status(_(b'merging with changeset %s\n') % nice(newnode))
880 880 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
881 881 with ui.configoverride(overrides, b'backout'):
882 882 return hg.merge(repo[b'tip'])
883 883 return 0
884 884
885 885
886 886 @command(
887 887 b'bisect',
888 888 [
889 889 (b'r', b'reset', False, _(b'reset bisect state')),
890 890 (b'g', b'good', False, _(b'mark changeset good')),
891 891 (b'b', b'bad', False, _(b'mark changeset bad')),
892 892 (b's', b'skip', False, _(b'skip testing changeset')),
893 893 (b'e', b'extend', False, _(b'extend the bisect range')),
894 894 (
895 895 b'c',
896 896 b'command',
897 897 b'',
898 898 _(b'use command to check changeset state'),
899 899 _(b'CMD'),
900 900 ),
901 901 (b'U', b'noupdate', False, _(b'do not update to target')),
902 902 ],
903 903 _(b"[-gbsr] [-U] [-c CMD] [REV]"),
904 904 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
905 905 )
906 906 def bisect(
907 907 ui,
908 908 repo,
909 909 positional_1=None,
910 910 positional_2=None,
911 911 command=None,
912 912 reset=None,
913 913 good=None,
914 914 bad=None,
915 915 skip=None,
916 916 extend=None,
917 917 noupdate=None,
918 918 ):
919 919 """subdivision search of changesets
920 920
921 921 This command helps to find changesets which introduce problems. To
922 922 use, mark the earliest changeset you know exhibits the problem as
923 923 bad, then mark the latest changeset which is free from the problem
924 924 as good. Bisect will update your working directory to a revision
925 925 for testing (unless the -U/--noupdate option is specified). Once
926 926 you have performed tests, mark the working directory as good or
927 927 bad, and bisect will either update to another candidate changeset
928 928 or announce that it has found the bad revision.
929 929
930 930 As a shortcut, you can also use the revision argument to mark a
931 931 revision as good or bad without checking it out first.
932 932
933 933 If you supply a command, it will be used for automatic bisection.
934 934 The environment variable HG_NODE will contain the ID of the
935 935 changeset being tested. The exit status of the command will be
936 936 used to mark revisions as good or bad: status 0 means good, 125
937 937 means to skip the revision, 127 (command not found) will abort the
938 938 bisection, and any other non-zero exit status means the revision
939 939 is bad.
940 940
941 941 .. container:: verbose
942 942
943 943 Some examples:
944 944
945 945 - start a bisection with known bad revision 34, and good revision 12::
946 946
947 947 hg bisect --bad 34
948 948 hg bisect --good 12
949 949
950 950 - advance the current bisection by marking current revision as good or
951 951 bad::
952 952
953 953 hg bisect --good
954 954 hg bisect --bad
955 955
956 956 - mark the current revision, or a known revision, to be skipped (e.g. if
957 957 that revision is not usable because of another issue)::
958 958
959 959 hg bisect --skip
960 960 hg bisect --skip 23
961 961
962 962 - skip all revisions that do not touch directories ``foo`` or ``bar``::
963 963
964 964 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
965 965
966 966 - forget the current bisection::
967 967
968 968 hg bisect --reset
969 969
970 970 - use 'make && make tests' to automatically find the first broken
971 971 revision::
972 972
973 973 hg bisect --reset
974 974 hg bisect --bad 34
975 975 hg bisect --good 12
976 976 hg bisect --command "make && make tests"
977 977
978 978 - see all changesets whose states are already known in the current
979 979 bisection::
980 980
981 981 hg log -r "bisect(pruned)"
982 982
983 983 - see the changeset currently being bisected (especially useful
984 984 if running with -U/--noupdate)::
985 985
986 986 hg log -r "bisect(current)"
987 987
988 988 - see all changesets that took part in the current bisection::
989 989
990 990 hg log -r "bisect(range)"
991 991
992 992 - you can even get a nice graph::
993 993
994 994 hg log --graph -r "bisect(range)"
995 995
996 996 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
997 997
998 998 Returns 0 on success.
999 999 """
1000 1000 rev = []
1001 1001 # backward compatibility
1002 1002 if positional_1 in (b"good", b"bad", b"reset", b"init"):
1003 1003 ui.warn(_(b"(use of 'hg bisect <cmd>' is deprecated)\n"))
1004 1004 cmd = positional_1
1005 1005 rev.append(positional_2)
1006 1006 if cmd == b"good":
1007 1007 good = True
1008 1008 elif cmd == b"bad":
1009 1009 bad = True
1010 1010 else:
1011 1011 reset = True
1012 1012 elif positional_2:
1013 1013 raise error.InputError(_(b'incompatible arguments'))
1014 1014 elif positional_1 is not None:
1015 1015 rev.append(positional_1)
1016 1016
1017 1017 incompatibles = {
1018 1018 b'--bad': bad,
1019 1019 b'--command': bool(command),
1020 1020 b'--extend': extend,
1021 1021 b'--good': good,
1022 1022 b'--reset': reset,
1023 1023 b'--skip': skip,
1024 1024 }
1025 1025
1026 1026 enabled = [x for x in incompatibles if incompatibles[x]]
1027 1027
1028 1028 if len(enabled) > 1:
1029 1029 raise error.InputError(
1030 1030 _(b'%s and %s are incompatible') % tuple(sorted(enabled)[0:2])
1031 1031 )
1032 1032
1033 1033 if reset:
1034 1034 hbisect.resetstate(repo)
1035 1035 return
1036 1036
1037 1037 state = hbisect.load_state(repo)
1038 1038
1039 1039 if rev:
1040 1040 revs = logcmdutil.revrange(repo, rev)
1041 1041 goodnodes = state[b'good']
1042 1042 badnodes = state[b'bad']
1043 1043 if goodnodes and badnodes:
1044 1044 candidates = repo.revs(b'(%ln)::(%ln)', goodnodes, badnodes)
1045 1045 candidates += repo.revs(b'(%ln)::(%ln)', badnodes, goodnodes)
1046 1046 revs = candidates & revs
1047 1047 nodes = [repo.changelog.node(i) for i in revs]
1048 1048 else:
1049 1049 nodes = [repo.lookup(b'.')]
1050 1050
1051 1051 # update state
1052 1052 if good or bad or skip:
1053 1053 if good:
1054 1054 state[b'good'] += nodes
1055 1055 elif bad:
1056 1056 state[b'bad'] += nodes
1057 1057 elif skip:
1058 1058 state[b'skip'] += nodes
1059 1059 hbisect.save_state(repo, state)
1060 1060 if not (state[b'good'] and state[b'bad']):
1061 1061 return
1062 1062
1063 1063 def mayupdate(repo, node, show_stats=True):
1064 1064 """common used update sequence"""
1065 1065 if noupdate:
1066 1066 return
1067 1067 cmdutil.checkunfinished(repo)
1068 1068 cmdutil.bailifchanged(repo)
1069 1069 return hg.clean(repo, node, show_stats=show_stats)
1070 1070
1071 1071 displayer = logcmdutil.changesetdisplayer(ui, repo, {})
1072 1072
1073 1073 if command:
1074 1074 changesets = 1
1075 1075 if noupdate:
1076 1076 try:
1077 1077 node = state[b'current'][0]
1078 1078 except LookupError:
1079 1079 raise error.StateError(
1080 1080 _(
1081 1081 b'current bisect revision is unknown - '
1082 1082 b'start a new bisect to fix'
1083 1083 )
1084 1084 )
1085 1085 else:
1086 1086 node, p2 = repo.dirstate.parents()
1087 1087 if p2 != repo.nullid:
1088 1088 raise error.StateError(_(b'current bisect revision is a merge'))
1089 1089 if rev:
1090 1090 if not nodes:
1091 1091 raise error.InputError(_(b'empty revision set'))
1092 1092 node = repo[nodes[-1]].node()
1093 1093 with hbisect.restore_state(repo, state, node):
1094 1094 while changesets:
1095 1095 # update state
1096 1096 state[b'current'] = [node]
1097 1097 hbisect.save_state(repo, state)
1098 1098 status = ui.system(
1099 1099 command,
1100 1100 environ={b'HG_NODE': hex(node)},
1101 1101 blockedtag=b'bisect_check',
1102 1102 )
1103 1103 if status == 125:
1104 1104 transition = b"skip"
1105 1105 elif status == 0:
1106 1106 transition = b"good"
1107 1107 # status < 0 means process was killed
1108 1108 elif status == 127:
1109 1109 raise error.Abort(_(b"failed to execute %s") % command)
1110 1110 elif status < 0:
1111 1111 raise error.Abort(_(b"%s killed") % command)
1112 1112 else:
1113 1113 transition = b"bad"
1114 1114 state[transition].append(node)
1115 1115 ctx = repo[node]
1116 1116 summary = cmdutil.format_changeset_summary(ui, ctx, b'bisect')
1117 1117 ui.status(_(b'changeset %s: %s\n') % (summary, transition))
1118 1118 hbisect.checkstate(state)
1119 1119 # bisect
1120 1120 nodes, changesets, bgood = hbisect.bisect(repo, state)
1121 1121 # update to next check
1122 1122 node = nodes[0]
1123 1123 mayupdate(repo, node, show_stats=False)
1124 1124 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
1125 1125 return
1126 1126
1127 1127 hbisect.checkstate(state)
1128 1128
1129 1129 # actually bisect
1130 1130 nodes, changesets, good = hbisect.bisect(repo, state)
1131 1131 if extend:
1132 1132 if not changesets:
1133 1133 extendctx = hbisect.extendrange(repo, state, nodes, good)
1134 1134 if extendctx is not None:
1135 1135 ui.write(
1136 1136 _(b"Extending search to changeset %s\n")
1137 1137 % cmdutil.format_changeset_summary(ui, extendctx, b'bisect')
1138 1138 )
1139 1139 state[b'current'] = [extendctx.node()]
1140 1140 hbisect.save_state(repo, state)
1141 1141 return mayupdate(repo, extendctx.node())
1142 1142 raise error.StateError(_(b"nothing to extend"))
1143 1143
1144 1144 if changesets == 0:
1145 1145 hbisect.printresult(ui, repo, state, displayer, nodes, good)
1146 1146 else:
1147 1147 assert len(nodes) == 1 # only a single node can be tested next
1148 1148 node = nodes[0]
1149 1149 # compute the approximate number of remaining tests
1150 1150 tests, size = 0, 2
1151 1151 while size <= changesets:
1152 1152 tests, size = tests + 1, size * 2
1153 1153 rev = repo.changelog.rev(node)
1154 1154 summary = cmdutil.format_changeset_summary(ui, repo[rev], b'bisect')
1155 1155 ui.write(
1156 1156 _(
1157 1157 b"Testing changeset %s "
1158 1158 b"(%d changesets remaining, ~%d tests)\n"
1159 1159 )
1160 1160 % (summary, changesets, tests)
1161 1161 )
1162 1162 state[b'current'] = [node]
1163 1163 hbisect.save_state(repo, state)
1164 1164 return mayupdate(repo, node)
1165 1165
1166 1166
1167 1167 @command(
1168 1168 b'bookmarks|bookmark',
1169 1169 [
1170 1170 (b'f', b'force', False, _(b'force')),
1171 1171 (b'r', b'rev', b'', _(b'revision for bookmark action'), _(b'REV')),
1172 1172 (b'd', b'delete', False, _(b'delete a given bookmark')),
1173 1173 (b'm', b'rename', b'', _(b'rename a given bookmark'), _(b'OLD')),
1174 1174 (b'i', b'inactive', False, _(b'mark a bookmark inactive')),
1175 1175 (b'l', b'list', False, _(b'list existing bookmarks')),
1176 1176 ]
1177 1177 + formatteropts,
1178 1178 _(b'hg bookmarks [OPTIONS]... [NAME]...'),
1179 1179 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1180 1180 )
1181 1181 def bookmark(ui, repo, *names, **opts):
1182 1182 """create a new bookmark or list existing bookmarks
1183 1183
1184 1184 Bookmarks are labels on changesets to help track lines of development.
1185 1185 Bookmarks are unversioned and can be moved, renamed and deleted.
1186 1186 Deleting or moving a bookmark has no effect on the associated changesets.
1187 1187
1188 1188 Creating or updating to a bookmark causes it to be marked as 'active'.
1189 1189 The active bookmark is indicated with a '*'.
1190 1190 When a commit is made, the active bookmark will advance to the new commit.
1191 1191 A plain :hg:`update` will also advance an active bookmark, if possible.
1192 1192 Updating away from a bookmark will cause it to be deactivated.
1193 1193
1194 1194 Bookmarks can be pushed and pulled between repositories (see
1195 1195 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
1196 1196 diverged, a new 'divergent bookmark' of the form 'name@path' will
1197 1197 be created. Using :hg:`merge` will resolve the divergence.
1198 1198
1199 1199 Specifying bookmark as '.' to -m/-d/-l options is equivalent to specifying
1200 1200 the active bookmark's name.
1201 1201
1202 1202 A bookmark named '@' has the special property that :hg:`clone` will
1203 1203 check it out by default if it exists.
1204 1204
1205 1205 .. container:: verbose
1206 1206
1207 1207 Template:
1208 1208
1209 1209 The following keywords are supported in addition to the common template
1210 1210 keywords and functions such as ``{bookmark}``. See also
1211 1211 :hg:`help templates`.
1212 1212
1213 1213 :active: Boolean. True if the bookmark is active.
1214 1214
1215 1215 Examples:
1216 1216
1217 1217 - create an active bookmark for a new line of development::
1218 1218
1219 1219 hg book new-feature
1220 1220
1221 1221 - create an inactive bookmark as a place marker::
1222 1222
1223 1223 hg book -i reviewed
1224 1224
1225 1225 - create an inactive bookmark on another changeset::
1226 1226
1227 1227 hg book -r .^ tested
1228 1228
1229 1229 - rename bookmark turkey to dinner::
1230 1230
1231 1231 hg book -m turkey dinner
1232 1232
1233 1233 - move the '@' bookmark from another branch::
1234 1234
1235 1235 hg book -f @
1236 1236
1237 1237 - print only the active bookmark name::
1238 1238
1239 1239 hg book -ql .
1240 1240 """
1241 1241 opts = pycompat.byteskwargs(opts)
1242 1242 force = opts.get(b'force')
1243 1243 rev = opts.get(b'rev')
1244 1244 inactive = opts.get(b'inactive') # meaning add/rename to inactive bookmark
1245 1245
1246 1246 action = cmdutil.check_at_most_one_arg(opts, b'delete', b'rename', b'list')
1247 1247 if action:
1248 1248 cmdutil.check_incompatible_arguments(opts, action, [b'rev'])
1249 1249 elif names or rev:
1250 1250 action = b'add'
1251 1251 elif inactive:
1252 1252 action = b'inactive' # meaning deactivate
1253 1253 else:
1254 1254 action = b'list'
1255 1255
1256 1256 cmdutil.check_incompatible_arguments(
1257 1257 opts, b'inactive', [b'delete', b'list']
1258 1258 )
1259 1259 if not names and action in {b'add', b'delete'}:
1260 1260 raise error.InputError(_(b"bookmark name required"))
1261 1261
1262 1262 if action in {b'add', b'delete', b'rename', b'inactive'}:
1263 1263 with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr:
1264 1264 if action == b'delete':
1265 1265 names = pycompat.maplist(repo._bookmarks.expandname, names)
1266 1266 bookmarks.delete(repo, tr, names)
1267 1267 elif action == b'rename':
1268 1268 if not names:
1269 1269 raise error.InputError(_(b"new bookmark name required"))
1270 1270 elif len(names) > 1:
1271 1271 raise error.InputError(
1272 1272 _(b"only one new bookmark name allowed")
1273 1273 )
1274 1274 oldname = repo._bookmarks.expandname(opts[b'rename'])
1275 1275 bookmarks.rename(repo, tr, oldname, names[0], force, inactive)
1276 1276 elif action == b'add':
1277 1277 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
1278 1278 elif action == b'inactive':
1279 1279 if len(repo._bookmarks) == 0:
1280 1280 ui.status(_(b"no bookmarks set\n"))
1281 1281 elif not repo._activebookmark:
1282 1282 ui.status(_(b"no active bookmark\n"))
1283 1283 else:
1284 1284 bookmarks.deactivate(repo)
1285 1285 elif action == b'list':
1286 1286 names = pycompat.maplist(repo._bookmarks.expandname, names)
1287 1287 with ui.formatter(b'bookmarks', opts) as fm:
1288 1288 bookmarks.printbookmarks(ui, repo, fm, names)
1289 1289 else:
1290 1290 raise error.ProgrammingError(b'invalid action: %s' % action)
1291 1291
1292 1292
1293 1293 @command(
1294 1294 b'branch',
1295 1295 [
1296 1296 (
1297 1297 b'f',
1298 1298 b'force',
1299 1299 None,
1300 1300 _(b'set branch name even if it shadows an existing branch'),
1301 1301 ),
1302 1302 (b'C', b'clean', None, _(b'reset branch name to parent branch name')),
1303 1303 (
1304 1304 b'r',
1305 1305 b'rev',
1306 1306 [],
1307 1307 _(b'change branches of the given revs (EXPERIMENTAL)'),
1308 1308 ),
1309 1309 ],
1310 1310 _(b'[-fC] [NAME]'),
1311 1311 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1312 1312 )
1313 1313 def branch(ui, repo, label=None, **opts):
1314 1314 """set or show the current branch name
1315 1315
1316 1316 .. note::
1317 1317
1318 1318 Branch names are permanent and global. Use :hg:`bookmark` to create a
1319 1319 light-weight bookmark instead. See :hg:`help glossary` for more
1320 1320 information about named branches and bookmarks.
1321 1321
1322 1322 With no argument, show the current branch name. With one argument,
1323 1323 set the working directory branch name (the branch will not exist
1324 1324 in the repository until the next commit). Standard practice
1325 1325 recommends that primary development take place on the 'default'
1326 1326 branch.
1327 1327
1328 1328 Unless -f/--force is specified, branch will not let you set a
1329 1329 branch name that already exists.
1330 1330
1331 1331 Use -C/--clean to reset the working directory branch to that of
1332 1332 the parent of the working directory, negating a previous branch
1333 1333 change.
1334 1334
1335 1335 Use the command :hg:`update` to switch to an existing branch. Use
1336 1336 :hg:`commit --close-branch` to mark this branch head as closed.
1337 1337 When all heads of a branch are closed, the branch will be
1338 1338 considered closed.
1339 1339
1340 1340 Returns 0 on success.
1341 1341 """
1342 1342 opts = pycompat.byteskwargs(opts)
1343 1343 revs = opts.get(b'rev')
1344 1344 if label:
1345 1345 label = label.strip()
1346 1346
1347 1347 if not opts.get(b'clean') and not label:
1348 1348 if revs:
1349 1349 raise error.InputError(
1350 1350 _(b"no branch name specified for the revisions")
1351 1351 )
1352 1352 ui.write(b"%s\n" % repo.dirstate.branch())
1353 1353 return
1354 1354
1355 1355 with repo.wlock():
1356 1356 if opts.get(b'clean'):
1357 1357 label = repo[b'.'].branch()
1358 1358 repo.dirstate.setbranch(label, repo.currenttransaction())
1359 1359 ui.status(_(b'reset working directory to branch %s\n') % label)
1360 1360 elif label:
1361 1361
1362 1362 scmutil.checknewlabel(repo, label, b'branch')
1363 1363 if revs:
1364 1364 return cmdutil.changebranch(ui, repo, revs, label, opts)
1365 1365
1366 1366 if not opts.get(b'force') and label in repo.branchmap():
1367 1367 if label not in [p.branch() for p in repo[None].parents()]:
1368 1368 raise error.InputError(
1369 1369 _(b'a branch of the same name already exists'),
1370 1370 # i18n: "it" refers to an existing branch
1371 1371 hint=_(b"use 'hg update' to switch to it"),
1372 1372 )
1373 1373
1374 1374 repo.dirstate.setbranch(label, repo.currenttransaction())
1375 1375 ui.status(_(b'marked working directory as branch %s\n') % label)
1376 1376
1377 1377 # find any open named branches aside from default
1378 1378 for n, h, t, c in repo.branchmap().iterbranches():
1379 1379 if n != b"default" and not c:
1380 1380 return 0
1381 1381 ui.status(
1382 1382 _(
1383 1383 b'(branches are permanent and global, '
1384 1384 b'did you want a bookmark?)\n'
1385 1385 )
1386 1386 )
1387 1387
1388 1388
1389 1389 @command(
1390 1390 b'branches',
1391 1391 [
1392 1392 (
1393 1393 b'a',
1394 1394 b'active',
1395 1395 False,
1396 1396 _(b'show only branches that have unmerged heads (DEPRECATED)'),
1397 1397 ),
1398 1398 (b'c', b'closed', False, _(b'show normal and closed branches')),
1399 1399 (b'r', b'rev', [], _(b'show branch name(s) of the given rev')),
1400 1400 ]
1401 1401 + formatteropts,
1402 1402 _(b'[-c]'),
1403 1403 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1404 1404 intents={INTENT_READONLY},
1405 1405 )
1406 1406 def branches(ui, repo, active=False, closed=False, **opts):
1407 1407 """list repository named branches
1408 1408
1409 1409 List the repository's named branches, indicating which ones are
1410 1410 inactive. If -c/--closed is specified, also list branches which have
1411 1411 been marked closed (see :hg:`commit --close-branch`).
1412 1412
1413 1413 Use the command :hg:`update` to switch to an existing branch.
1414 1414
1415 1415 .. container:: verbose
1416 1416
1417 1417 Template:
1418 1418
1419 1419 The following keywords are supported in addition to the common template
1420 1420 keywords and functions such as ``{branch}``. See also
1421 1421 :hg:`help templates`.
1422 1422
1423 1423 :active: Boolean. True if the branch is active.
1424 1424 :closed: Boolean. True if the branch is closed.
1425 1425 :current: Boolean. True if it is the current branch.
1426 1426
1427 1427 Returns 0.
1428 1428 """
1429 1429
1430 1430 opts = pycompat.byteskwargs(opts)
1431 1431 revs = opts.get(b'rev')
1432 1432 selectedbranches = None
1433 1433 if revs:
1434 1434 revs = logcmdutil.revrange(repo, revs)
1435 1435 getbi = repo.revbranchcache().branchinfo
1436 1436 selectedbranches = {getbi(r)[0] for r in revs}
1437 1437
1438 1438 ui.pager(b'branches')
1439 1439 fm = ui.formatter(b'branches', opts)
1440 1440 hexfunc = fm.hexfunc
1441 1441
1442 1442 allheads = set(repo.heads())
1443 1443 branches = []
1444 1444 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1445 1445 if selectedbranches is not None and tag not in selectedbranches:
1446 1446 continue
1447 1447 isactive = False
1448 1448 if not isclosed:
1449 1449 openheads = set(repo.branchmap().iteropen(heads))
1450 1450 isactive = bool(openheads & allheads)
1451 1451 branches.append((tag, repo[tip], isactive, not isclosed))
1452 1452 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]), reverse=True)
1453 1453
1454 1454 for tag, ctx, isactive, isopen in branches:
1455 1455 if active and not isactive:
1456 1456 continue
1457 1457 if isactive:
1458 1458 label = b'branches.active'
1459 1459 notice = b''
1460 1460 elif not isopen:
1461 1461 if not closed:
1462 1462 continue
1463 1463 label = b'branches.closed'
1464 1464 notice = _(b' (closed)')
1465 1465 else:
1466 1466 label = b'branches.inactive'
1467 1467 notice = _(b' (inactive)')
1468 1468 current = tag == repo.dirstate.branch()
1469 1469 if current:
1470 1470 label = b'branches.current'
1471 1471
1472 1472 fm.startitem()
1473 1473 fm.write(b'branch', b'%s', tag, label=label)
1474 1474 rev = ctx.rev()
1475 1475 padsize = max(31 - len(b"%d" % rev) - encoding.colwidth(tag), 0)
1476 1476 fmt = b' ' * padsize + b' %d:%s'
1477 1477 fm.condwrite(
1478 1478 not ui.quiet,
1479 1479 b'rev node',
1480 1480 fmt,
1481 1481 rev,
1482 1482 hexfunc(ctx.node()),
1483 1483 label=b'log.changeset changeset.%s' % ctx.phasestr(),
1484 1484 )
1485 1485 fm.context(ctx=ctx)
1486 1486 fm.data(active=isactive, closed=not isopen, current=current)
1487 1487 if not ui.quiet:
1488 1488 fm.plain(notice)
1489 1489 fm.plain(b'\n')
1490 1490 fm.end()
1491 1491
1492 1492
1493 1493 @command(
1494 1494 b'bundle',
1495 1495 [
1496 1496 (
1497 1497 b'',
1498 1498 b'exact',
1499 1499 None,
1500 1500 _(b'compute the base from the revision specified'),
1501 1501 ),
1502 1502 (
1503 1503 b'f',
1504 1504 b'force',
1505 1505 None,
1506 1506 _(b'run even when the destination is unrelated'),
1507 1507 ),
1508 1508 (
1509 1509 b'r',
1510 1510 b'rev',
1511 1511 [],
1512 1512 _(b'a changeset intended to be added to the destination'),
1513 1513 _(b'REV'),
1514 1514 ),
1515 1515 (
1516 1516 b'b',
1517 1517 b'branch',
1518 1518 [],
1519 1519 _(b'a specific branch you would like to bundle'),
1520 1520 _(b'BRANCH'),
1521 1521 ),
1522 1522 (
1523 1523 b'',
1524 1524 b'base',
1525 1525 [],
1526 1526 _(b'a base changeset assumed to be available at the destination'),
1527 1527 _(b'REV'),
1528 1528 ),
1529 1529 (b'a', b'all', None, _(b'bundle all changesets in the repository')),
1530 1530 (
1531 1531 b't',
1532 1532 b'type',
1533 1533 b'bzip2',
1534 1534 _(b'bundle compression type to use'),
1535 1535 _(b'TYPE'),
1536 1536 ),
1537 1537 ]
1538 1538 + remoteopts,
1539 1539 _(b'[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]...'),
1540 1540 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1541 1541 )
1542 1542 def bundle(ui, repo, fname, *dests, **opts):
1543 1543 """create a bundle file
1544 1544
1545 1545 Generate a bundle file containing data to be transferred to another
1546 1546 repository.
1547 1547
1548 1548 To create a bundle containing all changesets, use -a/--all
1549 1549 (or --base null). Otherwise, hg assumes the destination will have
1550 1550 all the nodes you specify with --base parameters. Otherwise, hg
1551 1551 will assume the repository has all the nodes in destination, or
1552 1552 default-push/default if no destination is specified, where destination
1553 1553 is the repositories you provide through DEST option.
1554 1554
1555 1555 You can change bundle format with the -t/--type option. See
1556 1556 :hg:`help bundlespec` for documentation on this format. By default,
1557 1557 the most appropriate format is used and compression defaults to
1558 1558 bzip2.
1559 1559
1560 1560 The bundle file can then be transferred using conventional means
1561 1561 and applied to another repository with the unbundle or pull
1562 1562 command. This is useful when direct push and pull are not
1563 1563 available or when exporting an entire repository is undesirable.
1564 1564
1565 1565 Applying bundles preserves all changeset contents including
1566 1566 permissions, copy/rename information, and revision history.
1567 1567
1568 1568 Returns 0 on success, 1 if no changes found.
1569 1569 """
1570 1570 opts = pycompat.byteskwargs(opts)
1571 1571
1572 1572 revs = None
1573 1573 if b'rev' in opts:
1574 1574 revstrings = opts[b'rev']
1575 1575 revs = logcmdutil.revrange(repo, revstrings)
1576 1576 if revstrings and not revs:
1577 1577 raise error.InputError(_(b'no commits to bundle'))
1578 1578
1579 1579 bundletype = opts.get(b'type', b'bzip2').lower()
1580 1580 try:
1581 1581 bundlespec = bundlecaches.parsebundlespec(
1582 1582 repo, bundletype, strict=False
1583 1583 )
1584 1584 except error.UnsupportedBundleSpecification as e:
1585 1585 raise error.InputError(
1586 1586 pycompat.bytestr(e),
1587 1587 hint=_(b"see 'hg help bundlespec' for supported values for --type"),
1588 1588 )
1589 1589 cgversion = bundlespec.params[b"cg.version"]
1590 1590
1591 1591 # Packed bundles are a pseudo bundle format for now.
1592 1592 if cgversion == b's1':
1593 1593 raise error.InputError(
1594 1594 _(b'packed bundles cannot be produced by "hg bundle"'),
1595 1595 hint=_(b"use 'hg debugcreatestreamclonebundle'"),
1596 1596 )
1597 1597
1598 1598 if opts.get(b'all'):
1599 1599 if dests:
1600 1600 raise error.InputError(
1601 1601 _(b"--all is incompatible with specifying destinations")
1602 1602 )
1603 1603 if opts.get(b'base'):
1604 1604 ui.warn(_(b"ignoring --base because --all was specified\n"))
1605 1605 if opts.get(b'exact'):
1606 1606 ui.warn(_(b"ignoring --exact because --all was specified\n"))
1607 1607 base = [nullrev]
1608 1608 elif opts.get(b'exact'):
1609 1609 if dests:
1610 1610 raise error.InputError(
1611 1611 _(b"--exact is incompatible with specifying destinations")
1612 1612 )
1613 1613 if opts.get(b'base'):
1614 1614 ui.warn(_(b"ignoring --base because --exact was specified\n"))
1615 1615 base = repo.revs(b'parents(%ld) - %ld', revs, revs)
1616 1616 if not base:
1617 1617 base = [nullrev]
1618 1618 else:
1619 1619 base = logcmdutil.revrange(repo, opts.get(b'base'))
1620 1620 if cgversion not in changegroup.supportedoutgoingversions(repo):
1621 1621 raise error.Abort(
1622 1622 _(b"repository does not support bundle version %s") % cgversion
1623 1623 )
1624 1624
1625 1625 if base:
1626 1626 if dests:
1627 1627 raise error.InputError(
1628 1628 _(b"--base is incompatible with specifying destinations")
1629 1629 )
1630 1630 cl = repo.changelog
1631 1631 common = [cl.node(rev) for rev in base]
1632 1632 heads = [cl.node(r) for r in revs] if revs else None
1633 1633 outgoing = discovery.outgoing(repo, common, heads)
1634 1634 missing = outgoing.missing
1635 1635 excluded = outgoing.excluded
1636 1636 else:
1637 1637 missing = set()
1638 1638 excluded = set()
1639 1639 for path in urlutil.get_push_paths(repo, ui, dests):
1640 1640 other = hg.peer(repo, opts, path)
1641 1641 if revs is not None:
1642 1642 hex_revs = [repo[r].hex() for r in revs]
1643 1643 else:
1644 1644 hex_revs = None
1645 1645 branches = (path.branch, [])
1646 1646 head_revs, checkout = hg.addbranchrevs(
1647 1647 repo, repo, branches, hex_revs
1648 1648 )
1649 1649 heads = (
1650 1650 head_revs
1651 1651 and pycompat.maplist(repo.lookup, head_revs)
1652 1652 or head_revs
1653 1653 )
1654 1654 outgoing = discovery.findcommonoutgoing(
1655 1655 repo,
1656 1656 other,
1657 1657 onlyheads=heads,
1658 1658 force=opts.get(b'force'),
1659 1659 portable=True,
1660 1660 )
1661 1661 missing.update(outgoing.missing)
1662 1662 excluded.update(outgoing.excluded)
1663 1663
1664 1664 if not missing:
1665 1665 scmutil.nochangesfound(ui, repo, not base and excluded)
1666 1666 return 1
1667 1667
1668 1668 if heads:
1669 1669 outgoing = discovery.outgoing(
1670 1670 repo, missingroots=missing, ancestorsof=heads
1671 1671 )
1672 1672 else:
1673 1673 outgoing = discovery.outgoing(repo, missingroots=missing)
1674 1674 outgoing.excluded = sorted(excluded)
1675 1675
1676 1676 if cgversion == b'01': # bundle1
1677 1677 bversion = b'HG10' + bundlespec.wirecompression
1678 1678 bcompression = None
1679 1679 elif cgversion in (b'02', b'03'):
1680 1680 bversion = b'HG20'
1681 1681 bcompression = bundlespec.wirecompression
1682 1682 else:
1683 1683 raise error.ProgrammingError(
1684 1684 b'bundle: unexpected changegroup version %s' % cgversion
1685 1685 )
1686 1686
1687 1687 # TODO compression options should be derived from bundlespec parsing.
1688 1688 # This is a temporary hack to allow adjusting bundle compression
1689 1689 # level without a) formalizing the bundlespec changes to declare it
1690 1690 # b) introducing a command flag.
1691 1691 compopts = {}
1692 1692 complevel = ui.configint(
1693 1693 b'experimental', b'bundlecomplevel.' + bundlespec.compression
1694 1694 )
1695 1695 if complevel is None:
1696 1696 complevel = ui.configint(b'experimental', b'bundlecomplevel')
1697 1697 if complevel is not None:
1698 1698 compopts[b'level'] = complevel
1699 1699
1700 1700 compthreads = ui.configint(
1701 1701 b'experimental', b'bundlecompthreads.' + bundlespec.compression
1702 1702 )
1703 1703 if compthreads is None:
1704 1704 compthreads = ui.configint(b'experimental', b'bundlecompthreads')
1705 1705 if compthreads is not None:
1706 1706 compopts[b'threads'] = compthreads
1707 1707
1708 1708 # Bundling of obsmarker and phases is optional as not all clients
1709 1709 # support the necessary features.
1710 1710 cfg = ui.configbool
1711 1711 obsolescence_cfg = cfg(b'experimental', b'evolution.bundle-obsmarker')
1712 1712 bundlespec.set_param(b'obsolescence', obsolescence_cfg, overwrite=False)
1713 1713 obs_mand_cfg = cfg(b'experimental', b'evolution.bundle-obsmarker:mandatory')
1714 1714 bundlespec.set_param(
1715 1715 b'obsolescence-mandatory', obs_mand_cfg, overwrite=False
1716 1716 )
1717 1717 phases_cfg = cfg(b'experimental', b'bundle-phases')
1718 1718 bundlespec.set_param(b'phases', phases_cfg, overwrite=False)
1719 1719
1720 1720 bundle2.writenewbundle(
1721 1721 ui,
1722 1722 repo,
1723 1723 b'bundle',
1724 1724 fname,
1725 1725 bversion,
1726 1726 outgoing,
1727 1727 bundlespec.params,
1728 1728 compression=bcompression,
1729 1729 compopts=compopts,
1730 1730 )
1731 1731
1732 1732
1733 1733 @command(
1734 1734 b'cat',
1735 1735 [
1736 1736 (
1737 1737 b'o',
1738 1738 b'output',
1739 1739 b'',
1740 1740 _(b'print output to file with formatted name'),
1741 1741 _(b'FORMAT'),
1742 1742 ),
1743 1743 (b'r', b'rev', b'', _(b'print the given revision'), _(b'REV')),
1744 1744 (b'', b'decode', None, _(b'apply any matching decode filter')),
1745 1745 ]
1746 1746 + walkopts
1747 1747 + formatteropts,
1748 1748 _(b'[OPTION]... FILE...'),
1749 1749 helpcategory=command.CATEGORY_FILE_CONTENTS,
1750 1750 inferrepo=True,
1751 1751 intents={INTENT_READONLY},
1752 1752 )
1753 1753 def cat(ui, repo, file1, *pats, **opts):
1754 1754 """output the current or given revision of files
1755 1755
1756 1756 Print the specified files as they were at the given revision. If
1757 1757 no revision is given, the parent of the working directory is used.
1758 1758
1759 1759 Output may be to a file, in which case the name of the file is
1760 1760 given using a template string. See :hg:`help templates`. In addition
1761 1761 to the common template keywords, the following formatting rules are
1762 1762 supported:
1763 1763
1764 1764 :``%%``: literal "%" character
1765 1765 :``%s``: basename of file being printed
1766 1766 :``%d``: dirname of file being printed, or '.' if in repository root
1767 1767 :``%p``: root-relative path name of file being printed
1768 1768 :``%H``: changeset hash (40 hexadecimal digits)
1769 1769 :``%R``: changeset revision number
1770 1770 :``%h``: short-form changeset hash (12 hexadecimal digits)
1771 1771 :``%r``: zero-padded changeset revision number
1772 1772 :``%b``: basename of the exporting repository
1773 1773 :``\\``: literal "\\" character
1774 1774
1775 1775 .. container:: verbose
1776 1776
1777 1777 Template:
1778 1778
1779 1779 The following keywords are supported in addition to the common template
1780 1780 keywords and functions. See also :hg:`help templates`.
1781 1781
1782 1782 :data: String. File content.
1783 1783 :path: String. Repository-absolute path of the file.
1784 1784
1785 1785 Returns 0 on success.
1786 1786 """
1787 1787 opts = pycompat.byteskwargs(opts)
1788 1788 rev = opts.get(b'rev')
1789 1789 if rev:
1790 1790 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
1791 1791 ctx = logcmdutil.revsingle(repo, rev)
1792 1792 m = scmutil.match(ctx, (file1,) + pats, opts)
1793 1793 fntemplate = opts.pop(b'output', b'')
1794 1794 if cmdutil.isstdiofilename(fntemplate):
1795 1795 fntemplate = b''
1796 1796
1797 1797 if fntemplate:
1798 1798 fm = formatter.nullformatter(ui, b'cat', opts)
1799 1799 else:
1800 1800 ui.pager(b'cat')
1801 1801 fm = ui.formatter(b'cat', opts)
1802 1802 with fm:
1803 1803 return cmdutil.cat(
1804 1804 ui, repo, ctx, m, fm, fntemplate, b'', **pycompat.strkwargs(opts)
1805 1805 )
1806 1806
1807 1807
1808 1808 @command(
1809 1809 b'clone',
1810 1810 [
1811 1811 (
1812 1812 b'U',
1813 1813 b'noupdate',
1814 1814 None,
1815 1815 _(
1816 1816 b'the clone will include an empty working '
1817 1817 b'directory (only a repository)'
1818 1818 ),
1819 1819 ),
1820 1820 (
1821 1821 b'u',
1822 1822 b'updaterev',
1823 1823 b'',
1824 1824 _(b'revision, tag, or branch to check out'),
1825 1825 _(b'REV'),
1826 1826 ),
1827 1827 (
1828 1828 b'r',
1829 1829 b'rev',
1830 1830 [],
1831 1831 _(
1832 1832 b'do not clone everything, but include this changeset'
1833 1833 b' and its ancestors'
1834 1834 ),
1835 1835 _(b'REV'),
1836 1836 ),
1837 1837 (
1838 1838 b'b',
1839 1839 b'branch',
1840 1840 [],
1841 1841 _(
1842 1842 b'do not clone everything, but include this branch\'s'
1843 1843 b' changesets and their ancestors'
1844 1844 ),
1845 1845 _(b'BRANCH'),
1846 1846 ),
1847 1847 (b'', b'pull', None, _(b'use pull protocol to copy metadata')),
1848 1848 (b'', b'uncompressed', None, _(b'an alias to --stream (DEPRECATED)')),
1849 1849 (b'', b'stream', None, _(b'clone with minimal data processing')),
1850 1850 ]
1851 1851 + remoteopts,
1852 1852 _(b'[OPTION]... SOURCE [DEST]'),
1853 1853 helpcategory=command.CATEGORY_REPO_CREATION,
1854 1854 helpbasic=True,
1855 1855 norepo=True,
1856 1856 )
1857 1857 def clone(ui, source, dest=None, **opts):
1858 1858 """make a copy of an existing repository
1859 1859
1860 1860 Create a copy of an existing repository in a new directory.
1861 1861
1862 1862 If no destination directory name is specified, it defaults to the
1863 1863 basename of the source.
1864 1864
1865 1865 The location of the source is added to the new repository's
1866 1866 ``.hg/hgrc`` file, as the default to be used for future pulls.
1867 1867
1868 1868 Only local paths and ``ssh://`` URLs are supported as
1869 1869 destinations. For ``ssh://`` destinations, no working directory or
1870 1870 ``.hg/hgrc`` will be created on the remote side.
1871 1871
1872 1872 If the source repository has a bookmark called '@' set, that
1873 1873 revision will be checked out in the new repository by default.
1874 1874
1875 1875 To check out a particular version, use -u/--update, or
1876 1876 -U/--noupdate to create a clone with no working directory.
1877 1877
1878 1878 To pull only a subset of changesets, specify one or more revisions
1879 1879 identifiers with -r/--rev or branches with -b/--branch. The
1880 1880 resulting clone will contain only the specified changesets and
1881 1881 their ancestors. These options (or 'clone src#rev dest') imply
1882 1882 --pull, even for local source repositories.
1883 1883
1884 1884 In normal clone mode, the remote normalizes repository data into a common
1885 1885 exchange format and the receiving end translates this data into its local
1886 1886 storage format. --stream activates a different clone mode that essentially
1887 1887 copies repository files from the remote with minimal data processing. This
1888 1888 significantly reduces the CPU cost of a clone both remotely and locally.
1889 1889 However, it often increases the transferred data size by 30-40%. This can
1890 1890 result in substantially faster clones where I/O throughput is plentiful,
1891 1891 especially for larger repositories. A side-effect of --stream clones is
1892 1892 that storage settings and requirements on the remote are applied locally:
1893 1893 a modern client may inherit legacy or inefficient storage used by the
1894 1894 remote or a legacy Mercurial client may not be able to clone from a
1895 1895 modern Mercurial remote.
1896 1896
1897 1897 .. note::
1898 1898
1899 1899 Specifying a tag will include the tagged changeset but not the
1900 1900 changeset containing the tag.
1901 1901
1902 1902 .. container:: verbose
1903 1903
1904 1904 For efficiency, hardlinks are used for cloning whenever the
1905 1905 source and destination are on the same filesystem (note this
1906 1906 applies only to the repository data, not to the working
1907 1907 directory). Some filesystems, such as AFS, implement hardlinking
1908 1908 incorrectly, but do not report errors. In these cases, use the
1909 1909 --pull option to avoid hardlinking.
1910 1910
1911 1911 Mercurial will update the working directory to the first applicable
1912 1912 revision from this list:
1913 1913
1914 1914 a) null if -U or the source repository has no changesets
1915 1915 b) if -u . and the source repository is local, the first parent of
1916 1916 the source repository's working directory
1917 1917 c) the changeset specified with -u (if a branch name, this means the
1918 1918 latest head of that branch)
1919 1919 d) the changeset specified with -r
1920 1920 e) the tipmost head specified with -b
1921 1921 f) the tipmost head specified with the url#branch source syntax
1922 1922 g) the revision marked with the '@' bookmark, if present
1923 1923 h) the tipmost head of the default branch
1924 1924 i) tip
1925 1925
1926 1926 When cloning from servers that support it, Mercurial may fetch
1927 1927 pre-generated data from a server-advertised URL or inline from the
1928 1928 same stream. When this is done, hooks operating on incoming changesets
1929 1929 and changegroups may fire more than once, once for each pre-generated
1930 1930 bundle and as well as for any additional remaining data. In addition,
1931 1931 if an error occurs, the repository may be rolled back to a partial
1932 1932 clone. This behavior may change in future releases.
1933 1933 See :hg:`help -e clonebundles` for more.
1934 1934
1935 1935 Examples:
1936 1936
1937 1937 - clone a remote repository to a new directory named hg/::
1938 1938
1939 1939 hg clone https://www.mercurial-scm.org/repo/hg/
1940 1940
1941 1941 - create a lightweight local clone::
1942 1942
1943 1943 hg clone project/ project-feature/
1944 1944
1945 1945 - clone from an absolute path on an ssh server (note double-slash)::
1946 1946
1947 1947 hg clone ssh://user@server//home/projects/alpha/
1948 1948
1949 1949 - do a streaming clone while checking out a specified version::
1950 1950
1951 1951 hg clone --stream http://server/repo -u 1.5
1952 1952
1953 1953 - create a repository without changesets after a particular revision::
1954 1954
1955 1955 hg clone -r 04e544 experimental/ good/
1956 1956
1957 1957 - clone (and track) a particular named branch::
1958 1958
1959 1959 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1960 1960
1961 1961 See :hg:`help urls` for details on specifying URLs.
1962 1962
1963 1963 Returns 0 on success.
1964 1964 """
1965 1965 opts = pycompat.byteskwargs(opts)
1966 1966 cmdutil.check_at_most_one_arg(opts, b'noupdate', b'updaterev')
1967 1967
1968 1968 # --include/--exclude can come from narrow or sparse.
1969 1969 includepats, excludepats = None, None
1970 1970
1971 1971 # hg.clone() differentiates between None and an empty set. So make sure
1972 1972 # patterns are sets if narrow is requested without patterns.
1973 1973 if opts.get(b'narrow'):
1974 1974 includepats = set()
1975 1975 excludepats = set()
1976 1976
1977 1977 if opts.get(b'include'):
1978 1978 includepats = narrowspec.parsepatterns(opts.get(b'include'))
1979 1979 if opts.get(b'exclude'):
1980 1980 excludepats = narrowspec.parsepatterns(opts.get(b'exclude'))
1981 1981
1982 1982 r = hg.clone(
1983 1983 ui,
1984 1984 opts,
1985 1985 source,
1986 1986 dest,
1987 1987 pull=opts.get(b'pull'),
1988 1988 stream=opts.get(b'stream') or opts.get(b'uncompressed'),
1989 1989 revs=opts.get(b'rev'),
1990 1990 update=opts.get(b'updaterev') or not opts.get(b'noupdate'),
1991 1991 branch=opts.get(b'branch'),
1992 1992 shareopts=opts.get(b'shareopts'),
1993 1993 storeincludepats=includepats,
1994 1994 storeexcludepats=excludepats,
1995 1995 depth=opts.get(b'depth') or None,
1996 1996 )
1997 1997
1998 1998 return r is None
1999 1999
2000 2000
2001 2001 @command(
2002 2002 b'commit|ci',
2003 2003 [
2004 2004 (
2005 2005 b'A',
2006 2006 b'addremove',
2007 2007 None,
2008 2008 _(b'mark new/missing files as added/removed before committing'),
2009 2009 ),
2010 2010 (b'', b'close-branch', None, _(b'mark a branch head as closed')),
2011 2011 (b'', b'amend', None, _(b'amend the parent of the working directory')),
2012 2012 (b's', b'secret', None, _(b'use the secret phase for committing')),
2013 2013 (b'', b'draft', None, _(b'use the draft phase for committing')),
2014 2014 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
2015 2015 (
2016 2016 b'',
2017 2017 b'force-close-branch',
2018 2018 None,
2019 2019 _(b'forcibly close branch from a non-head changeset (ADVANCED)'),
2020 2020 ),
2021 2021 (b'i', b'interactive', None, _(b'use interactive mode')),
2022 2022 ]
2023 2023 + walkopts
2024 2024 + commitopts
2025 2025 + commitopts2
2026 2026 + subrepoopts,
2027 2027 _(b'[OPTION]... [FILE]...'),
2028 2028 helpcategory=command.CATEGORY_COMMITTING,
2029 2029 helpbasic=True,
2030 2030 inferrepo=True,
2031 2031 )
2032 2032 def commit(ui, repo, *pats, **opts):
2033 2033 """commit the specified files or all outstanding changes
2034 2034
2035 2035 Commit changes to the given files into the repository. Unlike a
2036 2036 centralized SCM, this operation is a local operation. See
2037 2037 :hg:`push` for a way to actively distribute your changes.
2038 2038
2039 2039 If a list of files is omitted, all changes reported by :hg:`status`
2040 2040 will be committed.
2041 2041
2042 2042 If you are committing the result of a merge, do not provide any
2043 2043 filenames or -I/-X filters.
2044 2044
2045 2045 If no commit message is specified, Mercurial starts your
2046 2046 configured editor where you can enter a message. In case your
2047 2047 commit fails, you will find a backup of your message in
2048 2048 ``.hg/last-message.txt``.
2049 2049
2050 2050 The --close-branch flag can be used to mark the current branch
2051 2051 head closed. When all heads of a branch are closed, the branch
2052 2052 will be considered closed and no longer listed.
2053 2053
2054 2054 The --amend flag can be used to amend the parent of the
2055 2055 working directory with a new commit that contains the changes
2056 2056 in the parent in addition to those currently reported by :hg:`status`,
2057 2057 if there are any. The old commit is stored in a backup bundle in
2058 2058 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
2059 2059 on how to restore it).
2060 2060
2061 2061 Message, user and date are taken from the amended commit unless
2062 2062 specified. When a message isn't specified on the command line,
2063 2063 the editor will open with the message of the amended commit.
2064 2064
2065 2065 It is not possible to amend public changesets (see :hg:`help phases`)
2066 2066 or changesets that have children.
2067 2067
2068 2068 See :hg:`help dates` for a list of formats valid for -d/--date.
2069 2069
2070 2070 Returns 0 on success, 1 if nothing changed.
2071 2071
2072 2072 .. container:: verbose
2073 2073
2074 2074 Examples:
2075 2075
2076 2076 - commit all files ending in .py::
2077 2077
2078 2078 hg commit --include "set:**.py"
2079 2079
2080 2080 - commit all non-binary files::
2081 2081
2082 2082 hg commit --exclude "set:binary()"
2083 2083
2084 2084 - amend the current commit and set the date to now::
2085 2085
2086 2086 hg commit --amend --date now
2087 2087 """
2088 2088 cmdutil.check_at_most_one_arg(opts, 'draft', 'secret')
2089 2089 cmdutil.check_incompatible_arguments(opts, 'subrepos', ['amend'])
2090 2090 with repo.wlock(), repo.lock():
2091 2091 return _docommit(ui, repo, *pats, **opts)
2092 2092
2093 2093
2094 2094 def _docommit(ui, repo, *pats, **opts):
2095 2095 if opts.get('interactive'):
2096 2096 opts.pop('interactive')
2097 2097 ret = cmdutil.dorecord(
2098 2098 ui, repo, commit, None, False, cmdutil.recordfilter, *pats, **opts
2099 2099 )
2100 2100 # ret can be 0 (no changes to record) or the value returned by
2101 2101 # commit(), 1 if nothing changed or None on success.
2102 2102 return 1 if ret == 0 else ret
2103 2103
2104 2104 if opts.get('subrepos'):
2105 2105 # Let --subrepos on the command line override config setting.
2106 2106 ui.setconfig(b'ui', b'commitsubrepos', True, b'commit')
2107 2107
2108 2108 cmdutil.checkunfinished(repo, commit=True)
2109 2109
2110 2110 branch = repo[None].branch()
2111 2111 bheads = repo.branchheads(branch)
2112 2112 tip = repo.changelog.tip()
2113 2113
2114 2114 extra = {}
2115 2115 if opts.get('close_branch') or opts.get('force_close_branch'):
2116 2116 extra[b'close'] = b'1'
2117 2117
2118 2118 if repo[b'.'].closesbranch():
2119 2119 # Not ideal, but let us do an extra status early to prevent early
2120 2120 # bail out.
2121 2121 matcher = scmutil.match(
2122 2122 repo[None], pats, pycompat.byteskwargs(opts)
2123 2123 )
2124 2124 s = repo.status(match=matcher)
2125 2125 if s.modified or s.added or s.removed:
2126 2126 bheads = repo.branchheads(branch, closed=True)
2127 2127 else:
2128 2128 msg = _(b'current revision is already a branch closing head')
2129 2129 raise error.InputError(msg)
2130 2130
2131 2131 if not bheads:
2132 2132 raise error.InputError(
2133 2133 _(b'branch "%s" has no heads to close') % branch
2134 2134 )
2135 2135 elif (
2136 2136 branch == repo[b'.'].branch()
2137 2137 and repo[b'.'].node() not in bheads
2138 2138 and not opts.get('force_close_branch')
2139 2139 ):
2140 2140 hint = _(
2141 2141 b'use --force-close-branch to close branch from a non-head'
2142 2142 b' changeset'
2143 2143 )
2144 2144 raise error.InputError(_(b'can only close branch heads'), hint=hint)
2145 2145 elif opts.get('amend'):
2146 2146 if (
2147 2147 repo[b'.'].p1().branch() != branch
2148 2148 and repo[b'.'].p2().branch() != branch
2149 2149 ):
2150 2150 raise error.InputError(_(b'can only close branch heads'))
2151 2151
2152 2152 if opts.get('amend'):
2153 2153 if ui.configbool(b'ui', b'commitsubrepos'):
2154 2154 raise error.InputError(
2155 2155 _(b'cannot amend with ui.commitsubrepos enabled')
2156 2156 )
2157 2157
2158 2158 old = repo[b'.']
2159 2159 rewriteutil.precheck(repo, [old.rev()], b'amend')
2160 2160
2161 2161 # Currently histedit gets confused if an amend happens while histedit
2162 2162 # is in progress. Since we have a checkunfinished command, we are
2163 2163 # temporarily honoring it.
2164 2164 #
2165 2165 # Note: eventually this guard will be removed. Please do not expect
2166 2166 # this behavior to remain.
2167 2167 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2168 2168 cmdutil.checkunfinished(repo)
2169 2169
2170 2170 node = cmdutil.amend(ui, repo, old, extra, pats, opts)
2171 2171 opts = pycompat.byteskwargs(opts)
2172 2172 if node == old.node():
2173 2173 ui.status(_(b"nothing changed\n"))
2174 2174 return 1
2175 2175 else:
2176 2176
2177 2177 def commitfunc(ui, repo, message, match, opts):
2178 2178 overrides = {}
2179 2179 if opts.get(b'secret'):
2180 2180 overrides[(b'phases', b'new-commit')] = b'secret'
2181 2181 elif opts.get(b'draft'):
2182 2182 overrides[(b'phases', b'new-commit')] = b'draft'
2183 2183
2184 2184 baseui = repo.baseui
2185 2185 with baseui.configoverride(overrides, b'commit'):
2186 2186 with ui.configoverride(overrides, b'commit'):
2187 2187 editform = cmdutil.mergeeditform(
2188 2188 repo[None], b'commit.normal'
2189 2189 )
2190 2190 editor = cmdutil.getcommiteditor(
2191 2191 editform=editform, **pycompat.strkwargs(opts)
2192 2192 )
2193 2193 return repo.commit(
2194 2194 message,
2195 2195 opts.get(b'user'),
2196 2196 opts.get(b'date'),
2197 2197 match,
2198 2198 editor=editor,
2199 2199 extra=extra,
2200 2200 )
2201 2201
2202 2202 opts = pycompat.byteskwargs(opts)
2203 2203 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
2204 2204
2205 2205 if not node:
2206 2206 stat = cmdutil.postcommitstatus(repo, pats, opts)
2207 2207 if stat.deleted:
2208 2208 ui.status(
2209 2209 _(
2210 2210 b"nothing changed (%d missing files, see "
2211 2211 b"'hg status')\n"
2212 2212 )
2213 2213 % len(stat.deleted)
2214 2214 )
2215 2215 else:
2216 2216 ui.status(_(b"nothing changed\n"))
2217 2217 return 1
2218 2218
2219 2219 cmdutil.commitstatus(repo, node, branch, bheads, tip, opts)
2220 2220
2221 2221 if not ui.quiet and ui.configbool(b'commands', b'commit.post-status'):
2222 2222 status(
2223 2223 ui,
2224 2224 repo,
2225 2225 modified=True,
2226 2226 added=True,
2227 2227 removed=True,
2228 2228 deleted=True,
2229 2229 unknown=True,
2230 2230 subrepos=opts.get(b'subrepos'),
2231 2231 )
2232 2232
2233 2233
2234 2234 @command(
2235 2235 b'config|showconfig|debugconfig',
2236 2236 [
2237 2237 (b'u', b'untrusted', None, _(b'show untrusted configuration options')),
2238 2238 # This is experimental because we need
2239 2239 # * reasonable behavior around aliases,
2240 2240 # * decide if we display [debug] [experimental] and [devel] section par
2241 2241 # default
2242 2242 # * some way to display "generic" config entry (the one matching
2243 2243 # regexp,
2244 2244 # * proper display of the different value type
2245 2245 # * a better way to handle <DYNAMIC> values (and variable types),
2246 2246 # * maybe some type information ?
2247 2247 (
2248 2248 b'',
2249 2249 b'exp-all-known',
2250 2250 None,
2251 2251 _(b'show all known config option (EXPERIMENTAL)'),
2252 2252 ),
2253 2253 (b'e', b'edit', None, _(b'edit user config')),
2254 2254 (b'l', b'local', None, _(b'edit repository config')),
2255 2255 (b'', b'source', None, _(b'show source of configuration value')),
2256 2256 (
2257 2257 b'',
2258 2258 b'shared',
2259 2259 None,
2260 2260 _(b'edit shared source repository config (EXPERIMENTAL)'),
2261 2261 ),
2262 2262 (b'', b'non-shared', None, _(b'edit non shared config (EXPERIMENTAL)')),
2263 2263 (b'g', b'global', None, _(b'edit global config')),
2264 2264 ]
2265 2265 + formatteropts,
2266 2266 _(b'[-u] [NAME]...'),
2267 2267 helpcategory=command.CATEGORY_HELP,
2268 2268 optionalrepo=True,
2269 2269 intents={INTENT_READONLY},
2270 2270 )
2271 2271 def config(ui, repo, *values, **opts):
2272 2272 """show combined config settings from all hgrc files
2273 2273
2274 2274 With no arguments, print names and values of all config items.
2275 2275
2276 2276 With one argument of the form section.name, print just the value
2277 2277 of that config item.
2278 2278
2279 2279 With multiple arguments, print names and values of all config
2280 2280 items with matching section names or section.names.
2281 2281
2282 2282 With --edit, start an editor on the user-level config file. With
2283 2283 --global, edit the system-wide config file. With --local, edit the
2284 2284 repository-level config file.
2285 2285
2286 2286 With --source, the source (filename and line number) is printed
2287 2287 for each config item.
2288 2288
2289 2289 See :hg:`help config` for more information about config files.
2290 2290
2291 2291 .. container:: verbose
2292 2292
2293 2293 --non-shared flag is used to edit `.hg/hgrc-not-shared` config file.
2294 2294 This file is not shared across shares when in share-safe mode.
2295 2295
2296 2296 Template:
2297 2297
2298 2298 The following keywords are supported. See also :hg:`help templates`.
2299 2299
2300 2300 :name: String. Config name.
2301 2301 :source: String. Filename and line number where the item is defined.
2302 2302 :value: String. Config value.
2303 2303
2304 2304 The --shared flag can be used to edit the config file of shared source
2305 2305 repository. It only works when you have shared using the experimental
2306 2306 share safe feature.
2307 2307
2308 2308 Returns 0 on success, 1 if NAME does not exist.
2309 2309
2310 2310 """
2311 2311
2312 2312 opts = pycompat.byteskwargs(opts)
2313 2313 editopts = (b'edit', b'local', b'global', b'shared', b'non_shared')
2314 2314 if any(opts.get(o) for o in editopts):
2315 2315 cmdutil.check_at_most_one_arg(opts, *editopts[1:])
2316 2316 if opts.get(b'local'):
2317 2317 if not repo:
2318 2318 raise error.InputError(
2319 2319 _(b"can't use --local outside a repository")
2320 2320 )
2321 2321 paths = [repo.vfs.join(b'hgrc')]
2322 2322 elif opts.get(b'global'):
2323 2323 paths = rcutil.systemrcpath()
2324 2324 elif opts.get(b'shared'):
2325 2325 if not repo.shared():
2326 2326 raise error.InputError(
2327 2327 _(b"repository is not shared; can't use --shared")
2328 2328 )
2329 2329 if requirements.SHARESAFE_REQUIREMENT not in repo.requirements:
2330 2330 raise error.InputError(
2331 2331 _(
2332 2332 b"share safe feature not enabled; "
2333 2333 b"unable to edit shared source repository config"
2334 2334 )
2335 2335 )
2336 2336 paths = [vfsmod.vfs(repo.sharedpath).join(b'hgrc')]
2337 2337 elif opts.get(b'non_shared'):
2338 2338 paths = [repo.vfs.join(b'hgrc-not-shared')]
2339 2339 else:
2340 2340 paths = rcutil.userrcpath()
2341 2341
2342 2342 for f in paths:
2343 2343 if os.path.exists(f):
2344 2344 break
2345 2345 else:
2346 2346 if opts.get(b'global'):
2347 2347 samplehgrc = uimod.samplehgrcs[b'global']
2348 2348 elif opts.get(b'local'):
2349 2349 samplehgrc = uimod.samplehgrcs[b'local']
2350 2350 else:
2351 2351 samplehgrc = uimod.samplehgrcs[b'user']
2352 2352
2353 2353 f = paths[0]
2354 2354 fp = open(f, b"wb")
2355 2355 fp.write(util.tonativeeol(samplehgrc))
2356 2356 fp.close()
2357 2357
2358 2358 editor = ui.geteditor()
2359 2359 ui.system(
2360 2360 b"%s \"%s\"" % (editor, f),
2361 2361 onerr=error.InputError,
2362 2362 errprefix=_(b"edit failed"),
2363 2363 blockedtag=b'config_edit',
2364 2364 )
2365 2365 return
2366 2366 ui.pager(b'config')
2367 2367 fm = ui.formatter(b'config', opts)
2368 2368 for t, f in rcutil.rccomponents():
2369 2369 if t == b'path':
2370 2370 ui.debug(b'read config from: %s\n' % f)
2371 2371 elif t == b'resource':
2372 2372 ui.debug(b'read config from: resource:%s.%s\n' % (f[0], f[1]))
2373 2373 elif t == b'items':
2374 2374 # Don't print anything for 'items'.
2375 2375 pass
2376 2376 else:
2377 2377 raise error.ProgrammingError(b'unknown rctype: %s' % t)
2378 2378 untrusted = bool(opts.get(b'untrusted'))
2379 2379
2380 2380 selsections = selentries = []
2381 2381 if values:
2382 2382 selsections = [v for v in values if b'.' not in v]
2383 2383 selentries = [v for v in values if b'.' in v]
2384 2384 uniquesel = len(selentries) == 1 and not selsections
2385 2385 selsections = set(selsections)
2386 2386 selentries = set(selentries)
2387 2387
2388 2388 matched = False
2389 2389 all_known = opts[b'exp_all_known']
2390 2390 show_source = ui.debugflag or opts.get(b'source')
2391 2391 entries = ui.walkconfig(untrusted=untrusted, all_known=all_known)
2392 2392 for section, name, value in entries:
2393 2393 source = ui.configsource(section, name, untrusted)
2394 2394 value = pycompat.bytestr(value)
2395 2395 defaultvalue = ui.configdefault(section, name)
2396 2396 if fm.isplain():
2397 2397 source = source or b'none'
2398 2398 value = value.replace(b'\n', b'\\n')
2399 2399 entryname = section + b'.' + name
2400 2400 if values and not (section in selsections or entryname in selentries):
2401 2401 continue
2402 2402 fm.startitem()
2403 2403 fm.condwrite(show_source, b'source', b'%s: ', source)
2404 2404 if uniquesel:
2405 2405 fm.data(name=entryname)
2406 2406 fm.write(b'value', b'%s\n', value)
2407 2407 else:
2408 2408 fm.write(b'name value', b'%s=%s\n', entryname, value)
2409 2409 if formatter.isprintable(defaultvalue):
2410 2410 fm.data(defaultvalue=defaultvalue)
2411 2411 elif isinstance(defaultvalue, list) and all(
2412 2412 formatter.isprintable(e) for e in defaultvalue
2413 2413 ):
2414 2414 fm.data(defaultvalue=fm.formatlist(defaultvalue, name=b'value'))
2415 2415 # TODO: no idea how to process unsupported defaultvalue types
2416 2416 matched = True
2417 2417 fm.end()
2418 2418 if matched:
2419 2419 return 0
2420 2420 return 1
2421 2421
2422 2422
2423 2423 @command(
2424 2424 b'continue',
2425 2425 dryrunopts,
2426 2426 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
2427 2427 helpbasic=True,
2428 2428 )
2429 2429 def continuecmd(ui, repo, **opts):
2430 2430 """resumes an interrupted operation (EXPERIMENTAL)
2431 2431
2432 2432 Finishes a multistep operation like graft, histedit, rebase, merge,
2433 2433 and unshelve if they are in an interrupted state.
2434 2434
2435 2435 use --dry-run/-n to dry run the command.
2436 2436 """
2437 2437 dryrun = opts.get('dry_run')
2438 2438 contstate = cmdutil.getunfinishedstate(repo)
2439 2439 if not contstate:
2440 2440 raise error.StateError(_(b'no operation in progress'))
2441 2441 if not contstate.continuefunc:
2442 2442 raise error.StateError(
2443 2443 (
2444 2444 _(b"%s in progress but does not support 'hg continue'")
2445 2445 % (contstate._opname)
2446 2446 ),
2447 2447 hint=contstate.continuemsg(),
2448 2448 )
2449 2449 if dryrun:
2450 2450 ui.status(_(b'%s in progress, will be resumed\n') % (contstate._opname))
2451 2451 return
2452 2452 return contstate.continuefunc(ui, repo)
2453 2453
2454 2454
2455 2455 @command(
2456 2456 b'copy|cp',
2457 2457 [
2458 2458 (b'', b'forget', None, _(b'unmark a destination file as copied')),
2459 2459 (b'A', b'after', None, _(b'record a copy that has already occurred')),
2460 2460 (
2461 2461 b'',
2462 2462 b'at-rev',
2463 2463 b'',
2464 2464 _(b'(un)mark copies in the given revision (EXPERIMENTAL)'),
2465 2465 _(b'REV'),
2466 2466 ),
2467 2467 (
2468 2468 b'f',
2469 2469 b'force',
2470 2470 None,
2471 2471 _(b'forcibly copy over an existing managed file'),
2472 2472 ),
2473 2473 ]
2474 2474 + walkopts
2475 2475 + dryrunopts,
2476 2476 _(b'[OPTION]... (SOURCE... DEST | --forget DEST...)'),
2477 2477 helpcategory=command.CATEGORY_FILE_CONTENTS,
2478 2478 )
2479 2479 def copy(ui, repo, *pats, **opts):
2480 2480 """mark files as copied for the next commit
2481 2481
2482 2482 Mark dest as having copies of source files. If dest is a
2483 2483 directory, copies are put in that directory. If dest is a file,
2484 2484 the source must be a single file.
2485 2485
2486 2486 By default, this command copies the contents of files as they
2487 2487 exist in the working directory. If invoked with -A/--after, the
2488 2488 operation is recorded, but no copying is performed.
2489 2489
2490 2490 To undo marking a destination file as copied, use --forget. With that
2491 2491 option, all given (positional) arguments are unmarked as copies. The
2492 2492 destination file(s) will be left in place (still tracked). Note that
2493 2493 :hg:`copy --forget` behaves the same way as :hg:`rename --forget`.
2494 2494
2495 2495 This command takes effect with the next commit by default.
2496 2496
2497 2497 Returns 0 on success, 1 if errors are encountered.
2498 2498 """
2499 2499 opts = pycompat.byteskwargs(opts)
2500 2500
2501 context = repo.dirstate.changing_files
2501 context = lambda repo: repo.dirstate.changing_files(repo)
2502 2502 rev = opts.get(b'at_rev')
2503 2503 ctx = None
2504 2504 if rev:
2505 2505 ctx = logcmdutil.revsingle(repo, rev)
2506 2506 if ctx.rev() is not None:
2507 2507
2508 2508 def context(repo):
2509 2509 return util.nullcontextmanager()
2510 2510
2511 2511 opts[b'at_rev'] = ctx.rev()
2512 2512 with repo.wlock(), context(repo):
2513 2513 return cmdutil.copy(ui, repo, pats, opts)
2514 2514
2515 2515
2516 2516 @command(
2517 2517 b'debugcommands',
2518 2518 [],
2519 2519 _(b'[COMMAND]'),
2520 2520 helpcategory=command.CATEGORY_HELP,
2521 2521 norepo=True,
2522 2522 )
2523 2523 def debugcommands(ui, cmd=b'', *args):
2524 2524 """list all available commands and options"""
2525 2525 for cmd, vals in sorted(table.items()):
2526 2526 cmd = cmd.split(b'|')[0]
2527 2527 opts = b', '.join([i[1] for i in vals[1]])
2528 2528 ui.write(b'%s: %s\n' % (cmd, opts))
2529 2529
2530 2530
2531 2531 @command(
2532 2532 b'debugcomplete',
2533 2533 [(b'o', b'options', None, _(b'show the command options'))],
2534 2534 _(b'[-o] CMD'),
2535 2535 helpcategory=command.CATEGORY_HELP,
2536 2536 norepo=True,
2537 2537 )
2538 2538 def debugcomplete(ui, cmd=b'', **opts):
2539 2539 """returns the completion list associated with the given command"""
2540 2540
2541 2541 if opts.get('options'):
2542 2542 options = []
2543 2543 otables = [globalopts]
2544 2544 if cmd:
2545 2545 aliases, entry = cmdutil.findcmd(cmd, table, False)
2546 2546 otables.append(entry[1])
2547 2547 for t in otables:
2548 2548 for o in t:
2549 2549 if b"(DEPRECATED)" in o[3]:
2550 2550 continue
2551 2551 if o[0]:
2552 2552 options.append(b'-%s' % o[0])
2553 2553 options.append(b'--%s' % o[1])
2554 2554 ui.write(b"%s\n" % b"\n".join(options))
2555 2555 return
2556 2556
2557 2557 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
2558 2558 if ui.verbose:
2559 2559 cmdlist = [b' '.join(c[0]) for c in cmdlist.values()]
2560 2560 ui.write(b"%s\n" % b"\n".join(sorted(cmdlist)))
2561 2561
2562 2562
2563 2563 @command(
2564 2564 b'diff',
2565 2565 [
2566 2566 (b'r', b'rev', [], _(b'revision (DEPRECATED)'), _(b'REV')),
2567 2567 (b'', b'from', b'', _(b'revision to diff from'), _(b'REV1')),
2568 2568 (b'', b'to', b'', _(b'revision to diff to'), _(b'REV2')),
2569 2569 (b'c', b'change', b'', _(b'change made by revision'), _(b'REV')),
2570 2570 ]
2571 2571 + diffopts
2572 2572 + diffopts2
2573 2573 + walkopts
2574 2574 + subrepoopts,
2575 2575 _(b'[OPTION]... ([-c REV] | [--from REV1] [--to REV2]) [FILE]...'),
2576 2576 helpcategory=command.CATEGORY_FILE_CONTENTS,
2577 2577 helpbasic=True,
2578 2578 inferrepo=True,
2579 2579 intents={INTENT_READONLY},
2580 2580 )
2581 2581 def diff(ui, repo, *pats, **opts):
2582 2582 """diff repository (or selected files)
2583 2583
2584 2584 Show differences between revisions for the specified files.
2585 2585
2586 2586 Differences between files are shown using the unified diff format.
2587 2587
2588 2588 .. note::
2589 2589
2590 2590 :hg:`diff` may generate unexpected results for merges, as it will
2591 2591 default to comparing against the working directory's first
2592 2592 parent changeset if no revisions are specified. To diff against the
2593 2593 conflict regions, you can use `--config diff.merge=yes`.
2594 2594
2595 2595 By default, the working directory files are compared to its first parent. To
2596 2596 see the differences from another revision, use --from. To see the difference
2597 2597 to another revision, use --to. For example, :hg:`diff --from .^` will show
2598 2598 the differences from the working copy's grandparent to the working copy,
2599 2599 :hg:`diff --to .` will show the diff from the working copy to its parent
2600 2600 (i.e. the reverse of the default), and :hg:`diff --from 1.0 --to 1.2` will
2601 2601 show the diff between those two revisions.
2602 2602
2603 2603 Alternatively you can specify -c/--change with a revision to see the changes
2604 2604 in that changeset relative to its first parent (i.e. :hg:`diff -c 42` is
2605 2605 equivalent to :hg:`diff --from 42^ --to 42`)
2606 2606
2607 2607 Without the -a/--text option, diff will avoid generating diffs of
2608 2608 files it detects as binary. With -a, diff will generate a diff
2609 2609 anyway, probably with undesirable results.
2610 2610
2611 2611 Use the -g/--git option to generate diffs in the git extended diff
2612 2612 format. For more information, read :hg:`help diffs`.
2613 2613
2614 2614 .. container:: verbose
2615 2615
2616 2616 Examples:
2617 2617
2618 2618 - compare a file in the current working directory to its parent::
2619 2619
2620 2620 hg diff foo.c
2621 2621
2622 2622 - compare two historical versions of a directory, with rename info::
2623 2623
2624 2624 hg diff --git --from 1.0 --to 1.2 lib/
2625 2625
2626 2626 - get change stats relative to the last change on some date::
2627 2627
2628 2628 hg diff --stat --from "date('may 2')"
2629 2629
2630 2630 - diff all newly-added files that contain a keyword::
2631 2631
2632 2632 hg diff "set:added() and grep(GNU)"
2633 2633
2634 2634 - compare a revision and its parents::
2635 2635
2636 2636 hg diff -c 9353 # compare against first parent
2637 2637 hg diff --from 9353^ --to 9353 # same using revset syntax
2638 2638 hg diff --from 9353^2 --to 9353 # compare against the second parent
2639 2639
2640 2640 Returns 0 on success.
2641 2641 """
2642 2642
2643 2643 cmdutil.check_at_most_one_arg(opts, 'rev', 'change')
2644 2644 opts = pycompat.byteskwargs(opts)
2645 2645 revs = opts.get(b'rev')
2646 2646 change = opts.get(b'change')
2647 2647 from_rev = opts.get(b'from')
2648 2648 to_rev = opts.get(b'to')
2649 2649 stat = opts.get(b'stat')
2650 2650 reverse = opts.get(b'reverse')
2651 2651
2652 2652 cmdutil.check_incompatible_arguments(opts, b'from', [b'rev', b'change'])
2653 2653 cmdutil.check_incompatible_arguments(opts, b'to', [b'rev', b'change'])
2654 2654 if change:
2655 2655 repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
2656 2656 ctx2 = logcmdutil.revsingle(repo, change, None)
2657 2657 ctx1 = logcmdutil.diff_parent(ctx2)
2658 2658 elif from_rev or to_rev:
2659 2659 repo = scmutil.unhidehashlikerevs(
2660 2660 repo, [from_rev] + [to_rev], b'nowarn'
2661 2661 )
2662 2662 ctx1 = logcmdutil.revsingle(repo, from_rev, None)
2663 2663 ctx2 = logcmdutil.revsingle(repo, to_rev, None)
2664 2664 else:
2665 2665 repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn')
2666 2666 ctx1, ctx2 = logcmdutil.revpair(repo, revs)
2667 2667
2668 2668 if reverse:
2669 2669 ctxleft = ctx2
2670 2670 ctxright = ctx1
2671 2671 else:
2672 2672 ctxleft = ctx1
2673 2673 ctxright = ctx2
2674 2674
2675 2675 diffopts = patch.diffallopts(ui, opts)
2676 2676 m = scmutil.match(ctx2, pats, opts)
2677 2677 m = repo.narrowmatch(m)
2678 2678 ui.pager(b'diff')
2679 2679 logcmdutil.diffordiffstat(
2680 2680 ui,
2681 2681 repo,
2682 2682 diffopts,
2683 2683 ctxleft,
2684 2684 ctxright,
2685 2685 m,
2686 2686 stat=stat,
2687 2687 listsubrepos=opts.get(b'subrepos'),
2688 2688 root=opts.get(b'root'),
2689 2689 )
2690 2690
2691 2691
2692 2692 @command(
2693 2693 b'export',
2694 2694 [
2695 2695 (
2696 2696 b'B',
2697 2697 b'bookmark',
2698 2698 b'',
2699 2699 _(b'export changes only reachable by given bookmark'),
2700 2700 _(b'BOOKMARK'),
2701 2701 ),
2702 2702 (
2703 2703 b'o',
2704 2704 b'output',
2705 2705 b'',
2706 2706 _(b'print output to file with formatted name'),
2707 2707 _(b'FORMAT'),
2708 2708 ),
2709 2709 (b'', b'switch-parent', None, _(b'diff against the second parent')),
2710 2710 (b'r', b'rev', [], _(b'revisions to export'), _(b'REV')),
2711 2711 ]
2712 2712 + diffopts
2713 2713 + formatteropts,
2714 2714 _(b'[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'),
2715 2715 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2716 2716 helpbasic=True,
2717 2717 intents={INTENT_READONLY},
2718 2718 )
2719 2719 def export(ui, repo, *changesets, **opts):
2720 2720 """dump the header and diffs for one or more changesets
2721 2721
2722 2722 Print the changeset header and diffs for one or more revisions.
2723 2723 If no revision is given, the parent of the working directory is used.
2724 2724
2725 2725 The information shown in the changeset header is: author, date,
2726 2726 branch name (if non-default), changeset hash, parent(s) and commit
2727 2727 comment.
2728 2728
2729 2729 .. note::
2730 2730
2731 2731 :hg:`export` may generate unexpected diff output for merge
2732 2732 changesets, as it will compare the merge changeset against its
2733 2733 first parent only.
2734 2734
2735 2735 Output may be to a file, in which case the name of the file is
2736 2736 given using a template string. See :hg:`help templates`. In addition
2737 2737 to the common template keywords, the following formatting rules are
2738 2738 supported:
2739 2739
2740 2740 :``%%``: literal "%" character
2741 2741 :``%H``: changeset hash (40 hexadecimal digits)
2742 2742 :``%N``: number of patches being generated
2743 2743 :``%R``: changeset revision number
2744 2744 :``%b``: basename of the exporting repository
2745 2745 :``%h``: short-form changeset hash (12 hexadecimal digits)
2746 2746 :``%m``: first line of the commit message (only alphanumeric characters)
2747 2747 :``%n``: zero-padded sequence number, starting at 1
2748 2748 :``%r``: zero-padded changeset revision number
2749 2749 :``\\``: literal "\\" character
2750 2750
2751 2751 Without the -a/--text option, export will avoid generating diffs
2752 2752 of files it detects as binary. With -a, export will generate a
2753 2753 diff anyway, probably with undesirable results.
2754 2754
2755 2755 With -B/--bookmark changesets reachable by the given bookmark are
2756 2756 selected.
2757 2757
2758 2758 Use the -g/--git option to generate diffs in the git extended diff
2759 2759 format. See :hg:`help diffs` for more information.
2760 2760
2761 2761 With the --switch-parent option, the diff will be against the
2762 2762 second parent. It can be useful to review a merge.
2763 2763
2764 2764 .. container:: verbose
2765 2765
2766 2766 Template:
2767 2767
2768 2768 The following keywords are supported in addition to the common template
2769 2769 keywords and functions. See also :hg:`help templates`.
2770 2770
2771 2771 :diff: String. Diff content.
2772 2772 :parents: List of strings. Parent nodes of the changeset.
2773 2773
2774 2774 Examples:
2775 2775
2776 2776 - use export and import to transplant a bugfix to the current
2777 2777 branch::
2778 2778
2779 2779 hg export -r 9353 | hg import -
2780 2780
2781 2781 - export all the changesets between two revisions to a file with
2782 2782 rename information::
2783 2783
2784 2784 hg export --git -r 123:150 > changes.txt
2785 2785
2786 2786 - split outgoing changes into a series of patches with
2787 2787 descriptive names::
2788 2788
2789 2789 hg export -r "outgoing()" -o "%n-%m.patch"
2790 2790
2791 2791 Returns 0 on success.
2792 2792 """
2793 2793 opts = pycompat.byteskwargs(opts)
2794 2794 bookmark = opts.get(b'bookmark')
2795 2795 changesets += tuple(opts.get(b'rev', []))
2796 2796
2797 2797 cmdutil.check_at_most_one_arg(opts, b'rev', b'bookmark')
2798 2798
2799 2799 if bookmark:
2800 2800 if bookmark not in repo._bookmarks:
2801 2801 raise error.InputError(_(b"bookmark '%s' not found") % bookmark)
2802 2802
2803 2803 revs = scmutil.bookmarkrevs(repo, bookmark)
2804 2804 else:
2805 2805 if not changesets:
2806 2806 changesets = [b'.']
2807 2807
2808 2808 repo = scmutil.unhidehashlikerevs(repo, changesets, b'nowarn')
2809 2809 revs = logcmdutil.revrange(repo, changesets)
2810 2810
2811 2811 if not revs:
2812 2812 raise error.InputError(_(b"export requires at least one changeset"))
2813 2813 if len(revs) > 1:
2814 2814 ui.note(_(b'exporting patches:\n'))
2815 2815 else:
2816 2816 ui.note(_(b'exporting patch:\n'))
2817 2817
2818 2818 fntemplate = opts.get(b'output')
2819 2819 if cmdutil.isstdiofilename(fntemplate):
2820 2820 fntemplate = b''
2821 2821
2822 2822 if fntemplate:
2823 2823 fm = formatter.nullformatter(ui, b'export', opts)
2824 2824 else:
2825 2825 ui.pager(b'export')
2826 2826 fm = ui.formatter(b'export', opts)
2827 2827 with fm:
2828 2828 cmdutil.export(
2829 2829 repo,
2830 2830 revs,
2831 2831 fm,
2832 2832 fntemplate=fntemplate,
2833 2833 switch_parent=opts.get(b'switch_parent'),
2834 2834 opts=patch.diffallopts(ui, opts),
2835 2835 )
2836 2836
2837 2837
2838 2838 @command(
2839 2839 b'files',
2840 2840 [
2841 2841 (
2842 2842 b'r',
2843 2843 b'rev',
2844 2844 b'',
2845 2845 _(b'search the repository as it is in REV'),
2846 2846 _(b'REV'),
2847 2847 ),
2848 2848 (
2849 2849 b'0',
2850 2850 b'print0',
2851 2851 None,
2852 2852 _(b'end filenames with NUL, for use with xargs'),
2853 2853 ),
2854 2854 ]
2855 2855 + walkopts
2856 2856 + formatteropts
2857 2857 + subrepoopts,
2858 2858 _(b'[OPTION]... [FILE]...'),
2859 2859 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
2860 2860 intents={INTENT_READONLY},
2861 2861 )
2862 2862 def files(ui, repo, *pats, **opts):
2863 2863 """list tracked files
2864 2864
2865 2865 Print files under Mercurial control in the working directory or
2866 2866 specified revision for given files (excluding removed files).
2867 2867 Files can be specified as filenames or filesets.
2868 2868
2869 2869 If no files are given to match, this command prints the names
2870 2870 of all files under Mercurial control.
2871 2871
2872 2872 .. container:: verbose
2873 2873
2874 2874 Template:
2875 2875
2876 2876 The following keywords are supported in addition to the common template
2877 2877 keywords and functions. See also :hg:`help templates`.
2878 2878
2879 2879 :flags: String. Character denoting file's symlink and executable bits.
2880 2880 :path: String. Repository-absolute path of the file.
2881 2881 :size: Integer. Size of the file in bytes.
2882 2882
2883 2883 Examples:
2884 2884
2885 2885 - list all files under the current directory::
2886 2886
2887 2887 hg files .
2888 2888
2889 2889 - shows sizes and flags for current revision::
2890 2890
2891 2891 hg files -vr .
2892 2892
2893 2893 - list all files named README::
2894 2894
2895 2895 hg files -I "**/README"
2896 2896
2897 2897 - list all binary files::
2898 2898
2899 2899 hg files "set:binary()"
2900 2900
2901 2901 - find files containing a regular expression::
2902 2902
2903 2903 hg files "set:grep('bob')"
2904 2904
2905 2905 - search tracked file contents with xargs and grep::
2906 2906
2907 2907 hg files -0 | xargs -0 grep foo
2908 2908
2909 2909 See :hg:`help patterns` and :hg:`help filesets` for more information
2910 2910 on specifying file patterns.
2911 2911
2912 2912 Returns 0 if a match is found, 1 otherwise.
2913 2913
2914 2914 """
2915 2915
2916 2916 opts = pycompat.byteskwargs(opts)
2917 2917 rev = opts.get(b'rev')
2918 2918 if rev:
2919 2919 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
2920 2920 ctx = logcmdutil.revsingle(repo, rev, None)
2921 2921
2922 2922 end = b'\n'
2923 2923 if opts.get(b'print0'):
2924 2924 end = b'\0'
2925 2925 fmt = b'%s' + end
2926 2926
2927 2927 m = scmutil.match(ctx, pats, opts)
2928 2928 ui.pager(b'files')
2929 2929 uipathfn = scmutil.getuipathfn(ctx.repo(), legacyrelativevalue=True)
2930 2930 with ui.formatter(b'files', opts) as fm:
2931 2931 return cmdutil.files(
2932 2932 ui, ctx, m, uipathfn, fm, fmt, opts.get(b'subrepos')
2933 2933 )
2934 2934
2935 2935
2936 2936 @command(
2937 2937 b'forget',
2938 2938 [
2939 2939 (b'i', b'interactive', None, _(b'use interactive mode')),
2940 2940 ]
2941 2941 + walkopts
2942 2942 + dryrunopts,
2943 2943 _(b'[OPTION]... FILE...'),
2944 2944 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
2945 2945 helpbasic=True,
2946 2946 inferrepo=True,
2947 2947 )
2948 2948 def forget(ui, repo, *pats, **opts):
2949 2949 """forget the specified files on the next commit
2950 2950
2951 2951 Mark the specified files so they will no longer be tracked
2952 2952 after the next commit.
2953 2953
2954 2954 This only removes files from the current branch, not from the
2955 2955 entire project history, and it does not delete them from the
2956 2956 working directory.
2957 2957
2958 2958 To delete the file from the working directory, see :hg:`remove`.
2959 2959
2960 2960 To undo a forget before the next commit, see :hg:`add`.
2961 2961
2962 2962 .. container:: verbose
2963 2963
2964 2964 Examples:
2965 2965
2966 2966 - forget newly-added binary files::
2967 2967
2968 2968 hg forget "set:added() and binary()"
2969 2969
2970 2970 - forget files that would be excluded by .hgignore::
2971 2971
2972 2972 hg forget "set:hgignore()"
2973 2973
2974 2974 Returns 0 on success.
2975 2975 """
2976 2976
2977 2977 opts = pycompat.byteskwargs(opts)
2978 2978 if not pats:
2979 2979 raise error.InputError(_(b'no files specified'))
2980 2980
2981 2981 with repo.wlock(), repo.dirstate.changing_files(repo):
2982 2982 m = scmutil.match(repo[None], pats, opts)
2983 2983 dryrun, interactive = opts.get(b'dry_run'), opts.get(b'interactive')
2984 2984 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2985 2985 rejected = cmdutil.forget(
2986 2986 ui,
2987 2987 repo,
2988 2988 m,
2989 2989 prefix=b"",
2990 2990 uipathfn=uipathfn,
2991 2991 explicitonly=False,
2992 2992 dryrun=dryrun,
2993 2993 interactive=interactive,
2994 2994 )[0]
2995 2995 return rejected and 1 or 0
2996 2996
2997 2997
2998 2998 @command(
2999 2999 b'graft',
3000 3000 [
3001 3001 (b'r', b'rev', [], _(b'revisions to graft'), _(b'REV')),
3002 3002 (
3003 3003 b'',
3004 3004 b'base',
3005 3005 b'',
3006 3006 _(b'base revision when doing the graft merge (ADVANCED)'),
3007 3007 _(b'REV'),
3008 3008 ),
3009 3009 (b'c', b'continue', False, _(b'resume interrupted graft')),
3010 3010 (b'', b'stop', False, _(b'stop interrupted graft')),
3011 3011 (b'', b'abort', False, _(b'abort interrupted graft')),
3012 3012 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
3013 3013 (b'', b'log', None, _(b'append graft info to log message')),
3014 3014 (
3015 3015 b'',
3016 3016 b'no-commit',
3017 3017 None,
3018 3018 _(b"don't commit, just apply the changes in working directory"),
3019 3019 ),
3020 3020 (b'f', b'force', False, _(b'force graft')),
3021 3021 (
3022 3022 b'D',
3023 3023 b'currentdate',
3024 3024 False,
3025 3025 _(b'record the current date as commit date'),
3026 3026 ),
3027 3027 (
3028 3028 b'U',
3029 3029 b'currentuser',
3030 3030 False,
3031 3031 _(b'record the current user as committer'),
3032 3032 ),
3033 3033 ]
3034 3034 + commitopts2
3035 3035 + mergetoolopts
3036 3036 + dryrunopts,
3037 3037 _(b'[OPTION]... [-r REV]... REV...'),
3038 3038 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
3039 3039 )
3040 3040 def graft(ui, repo, *revs, **opts):
3041 3041 """copy changes from other branches onto the current branch
3042 3042
3043 3043 This command uses Mercurial's merge logic to copy individual
3044 3044 changes from other branches without merging branches in the
3045 3045 history graph. This is sometimes known as 'backporting' or
3046 3046 'cherry-picking'. By default, graft will copy user, date, and
3047 3047 description from the source changesets.
3048 3048
3049 3049 Changesets that are ancestors of the current revision, that have
3050 3050 already been grafted, or that are merges will be skipped.
3051 3051
3052 3052 If --log is specified, log messages will have a comment appended
3053 3053 of the form::
3054 3054
3055 3055 (grafted from CHANGESETHASH)
3056 3056
3057 3057 If --force is specified, revisions will be grafted even if they
3058 3058 are already ancestors of, or have been grafted to, the destination.
3059 3059 This is useful when the revisions have since been backed out.
3060 3060
3061 3061 If a graft merge results in conflicts, the graft process is
3062 3062 interrupted so that the current merge can be manually resolved.
3063 3063 Once all conflicts are addressed, the graft process can be
3064 3064 continued with the -c/--continue option.
3065 3065
3066 3066 The -c/--continue option reapplies all the earlier options.
3067 3067
3068 3068 .. container:: verbose
3069 3069
3070 3070 The --base option exposes more of how graft internally uses merge with a
3071 3071 custom base revision. --base can be used to specify another ancestor than
3072 3072 the first and only parent.
3073 3073
3074 3074 The command::
3075 3075
3076 3076 hg graft -r 345 --base 234
3077 3077
3078 3078 is thus pretty much the same as::
3079 3079
3080 3080 hg diff --from 234 --to 345 | hg import
3081 3081
3082 3082 but using merge to resolve conflicts and track moved files.
3083 3083
3084 3084 The result of a merge can thus be backported as a single commit by
3085 3085 specifying one of the merge parents as base, and thus effectively
3086 3086 grafting the changes from the other side.
3087 3087
3088 3088 It is also possible to collapse multiple changesets and clean up history
3089 3089 by specifying another ancestor as base, much like rebase --collapse
3090 3090 --keep.
3091 3091
3092 3092 The commit message can be tweaked after the fact using commit --amend .
3093 3093
3094 3094 For using non-ancestors as the base to backout changes, see the backout
3095 3095 command and the hidden --parent option.
3096 3096
3097 3097 .. container:: verbose
3098 3098
3099 3099 Examples:
3100 3100
3101 3101 - copy a single change to the stable branch and edit its description::
3102 3102
3103 3103 hg update stable
3104 3104 hg graft --edit 9393
3105 3105
3106 3106 - graft a range of changesets with one exception, updating dates::
3107 3107
3108 3108 hg graft -D "2085::2093 and not 2091"
3109 3109
3110 3110 - continue a graft after resolving conflicts::
3111 3111
3112 3112 hg graft -c
3113 3113
3114 3114 - show the source of a grafted changeset::
3115 3115
3116 3116 hg log --debug -r .
3117 3117
3118 3118 - show revisions sorted by date::
3119 3119
3120 3120 hg log -r "sort(all(), date)"
3121 3121
3122 3122 - backport the result of a merge as a single commit::
3123 3123
3124 3124 hg graft -r 123 --base 123^
3125 3125
3126 3126 - land a feature branch as one changeset::
3127 3127
3128 3128 hg up -cr default
3129 3129 hg graft -r featureX --base "ancestor('featureX', 'default')"
3130 3130
3131 3131 See :hg:`help revisions` for more about specifying revisions.
3132 3132
3133 3133 Returns 0 on successful completion, 1 if there are unresolved files.
3134 3134 """
3135 3135 with repo.wlock():
3136 3136 return _dograft(ui, repo, *revs, **opts)
3137 3137
3138 3138
3139 3139 def _dograft(ui, repo, *revs, **opts):
3140 3140 if revs and opts.get('rev'):
3141 3141 ui.warn(
3142 3142 _(
3143 3143 b'warning: inconsistent use of --rev might give unexpected '
3144 3144 b'revision ordering!\n'
3145 3145 )
3146 3146 )
3147 3147
3148 3148 revs = list(revs)
3149 3149 revs.extend(opts.get('rev'))
3150 3150 # a dict of data to be stored in state file
3151 3151 statedata = {}
3152 3152 # list of new nodes created by ongoing graft
3153 3153 statedata[b'newnodes'] = []
3154 3154
3155 3155 cmdutil.resolve_commit_options(ui, opts)
3156 3156
3157 3157 editor = cmdutil.getcommiteditor(editform=b'graft', **opts)
3158 3158
3159 3159 cmdutil.check_at_most_one_arg(opts, 'abort', 'stop', 'continue')
3160 3160
3161 3161 cont = False
3162 3162 if opts.get('no_commit'):
3163 3163 cmdutil.check_incompatible_arguments(
3164 3164 opts,
3165 3165 'no_commit',
3166 3166 ['edit', 'currentuser', 'currentdate', 'log'],
3167 3167 )
3168 3168
3169 3169 graftstate = statemod.cmdstate(repo, b'graftstate')
3170 3170
3171 3171 if opts.get('stop'):
3172 3172 cmdutil.check_incompatible_arguments(
3173 3173 opts,
3174 3174 'stop',
3175 3175 [
3176 3176 'edit',
3177 3177 'log',
3178 3178 'user',
3179 3179 'date',
3180 3180 'currentdate',
3181 3181 'currentuser',
3182 3182 'rev',
3183 3183 ],
3184 3184 )
3185 3185 return _stopgraft(ui, repo, graftstate)
3186 3186 elif opts.get('abort'):
3187 3187 cmdutil.check_incompatible_arguments(
3188 3188 opts,
3189 3189 'abort',
3190 3190 [
3191 3191 'edit',
3192 3192 'log',
3193 3193 'user',
3194 3194 'date',
3195 3195 'currentdate',
3196 3196 'currentuser',
3197 3197 'rev',
3198 3198 ],
3199 3199 )
3200 3200 return cmdutil.abortgraft(ui, repo, graftstate)
3201 3201 elif opts.get('continue'):
3202 3202 cont = True
3203 3203 if revs:
3204 3204 raise error.InputError(_(b"can't specify --continue and revisions"))
3205 3205 # read in unfinished revisions
3206 3206 if graftstate.exists():
3207 3207 statedata = cmdutil.readgraftstate(repo, graftstate)
3208 3208 if statedata.get(b'date'):
3209 3209 opts['date'] = statedata[b'date']
3210 3210 if statedata.get(b'user'):
3211 3211 opts['user'] = statedata[b'user']
3212 3212 if statedata.get(b'log'):
3213 3213 opts['log'] = True
3214 3214 if statedata.get(b'no_commit'):
3215 3215 opts['no_commit'] = statedata.get(b'no_commit')
3216 3216 if statedata.get(b'base'):
3217 3217 opts['base'] = statedata.get(b'base')
3218 3218 nodes = statedata[b'nodes']
3219 3219 revs = [repo[node].rev() for node in nodes]
3220 3220 else:
3221 3221 cmdutil.wrongtooltocontinue(repo, _(b'graft'))
3222 3222 else:
3223 3223 if not revs:
3224 3224 raise error.InputError(_(b'no revisions specified'))
3225 3225 cmdutil.checkunfinished(repo)
3226 3226 cmdutil.bailifchanged(repo)
3227 3227 revs = logcmdutil.revrange(repo, revs)
3228 3228
3229 3229 skipped = set()
3230 3230 basectx = None
3231 3231 if opts.get('base'):
3232 3232 basectx = logcmdutil.revsingle(repo, opts['base'], None)
3233 3233 if basectx is None:
3234 3234 # check for merges
3235 3235 for rev in repo.revs(b'%ld and merge()', revs):
3236 3236 ui.warn(_(b'skipping ungraftable merge revision %d\n') % rev)
3237 3237 skipped.add(rev)
3238 3238 revs = [r for r in revs if r not in skipped]
3239 3239 if not revs:
3240 3240 return -1
3241 3241 if basectx is not None and len(revs) != 1:
3242 3242 raise error.InputError(_(b'only one revision allowed with --base '))
3243 3243
3244 3244 # Don't check in the --continue case, in effect retaining --force across
3245 3245 # --continues. That's because without --force, any revisions we decided to
3246 3246 # skip would have been filtered out here, so they wouldn't have made their
3247 3247 # way to the graftstate. With --force, any revisions we would have otherwise
3248 3248 # skipped would not have been filtered out, and if they hadn't been applied
3249 3249 # already, they'd have been in the graftstate.
3250 3250 if not (cont or opts.get('force')) and basectx is None:
3251 3251 # check for ancestors of dest branch
3252 3252 ancestors = repo.revs(b'%ld & (::.)', revs)
3253 3253 for rev in ancestors:
3254 3254 ui.warn(_(b'skipping ancestor revision %d:%s\n') % (rev, repo[rev]))
3255 3255
3256 3256 revs = [r for r in revs if r not in ancestors]
3257 3257
3258 3258 if not revs:
3259 3259 return -1
3260 3260
3261 3261 # analyze revs for earlier grafts
3262 3262 ids = {}
3263 3263 for ctx in repo.set(b"%ld", revs):
3264 3264 ids[ctx.hex()] = ctx.rev()
3265 3265 n = ctx.extra().get(b'source')
3266 3266 if n:
3267 3267 ids[n] = ctx.rev()
3268 3268
3269 3269 # check ancestors for earlier grafts
3270 3270 ui.debug(b'scanning for duplicate grafts\n')
3271 3271
3272 3272 # The only changesets we can be sure doesn't contain grafts of any
3273 3273 # revs, are the ones that are common ancestors of *all* revs:
3274 3274 for rev in repo.revs(b'only(%d,ancestor(%ld))', repo[b'.'].rev(), revs):
3275 3275 ctx = repo[rev]
3276 3276 n = ctx.extra().get(b'source')
3277 3277 if n in ids:
3278 3278 try:
3279 3279 r = repo[n].rev()
3280 3280 except error.RepoLookupError:
3281 3281 r = None
3282 3282 if r in revs:
3283 3283 ui.warn(
3284 3284 _(
3285 3285 b'skipping revision %d:%s '
3286 3286 b'(already grafted to %d:%s)\n'
3287 3287 )
3288 3288 % (r, repo[r], rev, ctx)
3289 3289 )
3290 3290 revs.remove(r)
3291 3291 elif ids[n] in revs:
3292 3292 if r is None:
3293 3293 ui.warn(
3294 3294 _(
3295 3295 b'skipping already grafted revision %d:%s '
3296 3296 b'(%d:%s also has unknown origin %s)\n'
3297 3297 )
3298 3298 % (ids[n], repo[ids[n]], rev, ctx, n[:12])
3299 3299 )
3300 3300 else:
3301 3301 ui.warn(
3302 3302 _(
3303 3303 b'skipping already grafted revision %d:%s '
3304 3304 b'(%d:%s also has origin %d:%s)\n'
3305 3305 )
3306 3306 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12])
3307 3307 )
3308 3308 revs.remove(ids[n])
3309 3309 elif ctx.hex() in ids:
3310 3310 r = ids[ctx.hex()]
3311 3311 if r in revs:
3312 3312 ui.warn(
3313 3313 _(
3314 3314 b'skipping already grafted revision %d:%s '
3315 3315 b'(was grafted from %d:%s)\n'
3316 3316 )
3317 3317 % (r, repo[r], rev, ctx)
3318 3318 )
3319 3319 revs.remove(r)
3320 3320 if not revs:
3321 3321 return -1
3322 3322
3323 3323 if opts.get('no_commit'):
3324 3324 statedata[b'no_commit'] = True
3325 3325 if opts.get('base'):
3326 3326 statedata[b'base'] = opts['base']
3327 3327 for pos, ctx in enumerate(repo.set(b"%ld", revs)):
3328 3328 desc = b'%d:%s "%s"' % (
3329 3329 ctx.rev(),
3330 3330 ctx,
3331 3331 ctx.description().split(b'\n', 1)[0],
3332 3332 )
3333 3333 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
3334 3334 if names:
3335 3335 desc += b' (%s)' % b' '.join(names)
3336 3336 ui.status(_(b'grafting %s\n') % desc)
3337 3337 if opts.get('dry_run'):
3338 3338 continue
3339 3339
3340 3340 source = ctx.extra().get(b'source')
3341 3341 extra = {}
3342 3342 if source:
3343 3343 extra[b'source'] = source
3344 3344 extra[b'intermediate-source'] = ctx.hex()
3345 3345 else:
3346 3346 extra[b'source'] = ctx.hex()
3347 3347 user = ctx.user()
3348 3348 if opts.get('user'):
3349 3349 user = opts['user']
3350 3350 statedata[b'user'] = user
3351 3351 date = ctx.date()
3352 3352 if opts.get('date'):
3353 3353 date = opts['date']
3354 3354 statedata[b'date'] = date
3355 3355 message = ctx.description()
3356 3356 if opts.get('log'):
3357 3357 message += b'\n(grafted from %s)' % ctx.hex()
3358 3358 statedata[b'log'] = True
3359 3359
3360 3360 # we don't merge the first commit when continuing
3361 3361 if not cont:
3362 3362 # perform the graft merge with p1(rev) as 'ancestor'
3363 3363 overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')}
3364 3364 base = ctx.p1() if basectx is None else basectx
3365 3365 with ui.configoverride(overrides, b'graft'):
3366 3366 stats = mergemod.graft(
3367 3367 repo, ctx, base, [b'local', b'graft', b'parent of graft']
3368 3368 )
3369 3369 # report any conflicts
3370 3370 if stats.unresolvedcount > 0:
3371 3371 # write out state for --continue
3372 3372 nodes = [repo[rev].hex() for rev in revs[pos:]]
3373 3373 statedata[b'nodes'] = nodes
3374 3374 stateversion = 1
3375 3375 graftstate.save(stateversion, statedata)
3376 3376 ui.error(_(b"abort: unresolved conflicts, can't continue\n"))
3377 3377 ui.error(_(b"(use 'hg resolve' and 'hg graft --continue')\n"))
3378 3378 return 1
3379 3379 else:
3380 3380 cont = False
3381 3381
3382 3382 # commit if --no-commit is false
3383 3383 if not opts.get('no_commit'):
3384 3384 node = repo.commit(
3385 3385 text=message, user=user, date=date, extra=extra, editor=editor
3386 3386 )
3387 3387 if node is None:
3388 3388 ui.warn(
3389 3389 _(b'note: graft of %d:%s created no changes to commit\n')
3390 3390 % (ctx.rev(), ctx)
3391 3391 )
3392 3392 # checking that newnodes exist because old state files won't have it
3393 3393 elif statedata.get(b'newnodes') is not None:
3394 3394 nn = statedata[b'newnodes']
3395 3395 assert isinstance(nn, list) # list of bytes
3396 3396 nn.append(node)
3397 3397
3398 3398 # remove state when we complete successfully
3399 3399 if not opts.get('dry_run'):
3400 3400 graftstate.delete()
3401 3401
3402 3402 return 0
3403 3403
3404 3404
3405 3405 def _stopgraft(ui, repo, graftstate):
3406 3406 """stop the interrupted graft"""
3407 3407 if not graftstate.exists():
3408 3408 raise error.StateError(_(b"no interrupted graft found"))
3409 3409 pctx = repo[b'.']
3410 3410 mergemod.clean_update(pctx)
3411 3411 graftstate.delete()
3412 3412 ui.status(_(b"stopped the interrupted graft\n"))
3413 3413 ui.status(_(b"working directory is now at %s\n") % pctx.hex()[:12])
3414 3414 return 0
3415 3415
3416 3416
3417 3417 statemod.addunfinished(
3418 3418 b'graft',
3419 3419 fname=b'graftstate',
3420 3420 clearable=True,
3421 3421 stopflag=True,
3422 3422 continueflag=True,
3423 3423 abortfunc=cmdutil.hgabortgraft,
3424 3424 cmdhint=_(b"use 'hg graft --continue' or 'hg graft --stop' to stop"),
3425 3425 )
3426 3426
3427 3427
3428 3428 @command(
3429 3429 b'grep',
3430 3430 [
3431 3431 (b'0', b'print0', None, _(b'end fields with NUL')),
3432 3432 (b'', b'all', None, _(b'an alias to --diff (DEPRECATED)')),
3433 3433 (
3434 3434 b'',
3435 3435 b'diff',
3436 3436 None,
3437 3437 _(
3438 3438 b'search revision differences for when the pattern was added '
3439 3439 b'or removed'
3440 3440 ),
3441 3441 ),
3442 3442 (b'a', b'text', None, _(b'treat all files as text')),
3443 3443 (
3444 3444 b'f',
3445 3445 b'follow',
3446 3446 None,
3447 3447 _(
3448 3448 b'follow changeset history,'
3449 3449 b' or file history across copies and renames'
3450 3450 ),
3451 3451 ),
3452 3452 (b'i', b'ignore-case', None, _(b'ignore case when matching')),
3453 3453 (
3454 3454 b'l',
3455 3455 b'files-with-matches',
3456 3456 None,
3457 3457 _(b'print only filenames and revisions that match'),
3458 3458 ),
3459 3459 (b'n', b'line-number', None, _(b'print matching line numbers')),
3460 3460 (
3461 3461 b'r',
3462 3462 b'rev',
3463 3463 [],
3464 3464 _(b'search files changed within revision range'),
3465 3465 _(b'REV'),
3466 3466 ),
3467 3467 (
3468 3468 b'',
3469 3469 b'all-files',
3470 3470 None,
3471 3471 _(
3472 3472 b'include all files in the changeset while grepping (DEPRECATED)'
3473 3473 ),
3474 3474 ),
3475 3475 (b'u', b'user', None, _(b'list the author (long with -v)')),
3476 3476 (b'd', b'date', None, _(b'list the date (short with -q)')),
3477 3477 ]
3478 3478 + formatteropts
3479 3479 + walkopts,
3480 3480 _(b'[--diff] [OPTION]... PATTERN [FILE]...'),
3481 3481 helpcategory=command.CATEGORY_FILE_CONTENTS,
3482 3482 inferrepo=True,
3483 3483 intents={INTENT_READONLY},
3484 3484 )
3485 3485 def grep(ui, repo, pattern, *pats, **opts):
3486 3486 """search for a pattern in specified files
3487 3487
3488 3488 Search the working directory or revision history for a regular
3489 3489 expression in the specified files for the entire repository.
3490 3490
3491 3491 By default, grep searches the repository files in the working
3492 3492 directory and prints the files where it finds a match. To specify
3493 3493 historical revisions instead of the working directory, use the
3494 3494 --rev flag.
3495 3495
3496 3496 To search instead historical revision differences that contains a
3497 3497 change in match status ("-" for a match that becomes a non-match,
3498 3498 or "+" for a non-match that becomes a match), use the --diff flag.
3499 3499
3500 3500 PATTERN can be any Python (roughly Perl-compatible) regular
3501 3501 expression.
3502 3502
3503 3503 If no FILEs are specified and the --rev flag isn't supplied, all
3504 3504 files in the working directory are searched. When using the --rev
3505 3505 flag and specifying FILEs, use the --follow argument to also
3506 3506 follow the specified FILEs across renames and copies.
3507 3507
3508 3508 .. container:: verbose
3509 3509
3510 3510 Template:
3511 3511
3512 3512 The following keywords are supported in addition to the common template
3513 3513 keywords and functions. See also :hg:`help templates`.
3514 3514
3515 3515 :change: String. Character denoting insertion ``+`` or removal ``-``.
3516 3516 Available if ``--diff`` is specified.
3517 3517 :lineno: Integer. Line number of the match.
3518 3518 :path: String. Repository-absolute path of the file.
3519 3519 :texts: List of text chunks.
3520 3520
3521 3521 And each entry of ``{texts}`` provides the following sub-keywords.
3522 3522
3523 3523 :matched: Boolean. True if the chunk matches the specified pattern.
3524 3524 :text: String. Chunk content.
3525 3525
3526 3526 See :hg:`help templates.operators` for the list expansion syntax.
3527 3527
3528 3528 Returns 0 if a match is found, 1 otherwise.
3529 3529
3530 3530 """
3531 3531 cmdutil.check_incompatible_arguments(opts, 'all_files', ['all', 'diff'])
3532 3532 opts = pycompat.byteskwargs(opts)
3533 3533 diff = opts.get(b'all') or opts.get(b'diff')
3534 3534 follow = opts.get(b'follow')
3535 3535 if opts.get(b'all_files') is None and not diff:
3536 3536 opts[b'all_files'] = True
3537 3537 plaingrep = (
3538 3538 opts.get(b'all_files')
3539 3539 and not opts.get(b'rev')
3540 3540 and not opts.get(b'follow')
3541 3541 )
3542 3542 all_files = opts.get(b'all_files')
3543 3543 if plaingrep:
3544 3544 opts[b'rev'] = [b'wdir()']
3545 3545
3546 3546 reflags = re.M
3547 3547 if opts.get(b'ignore_case'):
3548 3548 reflags |= re.I
3549 3549 try:
3550 3550 regexp = util.re.compile(pattern, reflags)
3551 3551 except re.error as inst:
3552 3552 ui.warn(
3553 3553 _(b"grep: invalid match pattern: %s\n")
3554 3554 % stringutil.forcebytestr(inst)
3555 3555 )
3556 3556 return 1
3557 3557 sep, eol = b':', b'\n'
3558 3558 if opts.get(b'print0'):
3559 3559 sep = eol = b'\0'
3560 3560
3561 3561 searcher = grepmod.grepsearcher(
3562 3562 ui, repo, regexp, all_files=all_files, diff=diff, follow=follow
3563 3563 )
3564 3564
3565 3565 getfile = searcher._getfile
3566 3566
3567 3567 uipathfn = scmutil.getuipathfn(repo)
3568 3568
3569 3569 def display(fm, fn, ctx, pstates, states):
3570 3570 rev = scmutil.intrev(ctx)
3571 3571 if fm.isplain():
3572 3572 formatuser = ui.shortuser
3573 3573 else:
3574 3574 formatuser = pycompat.bytestr
3575 3575 if ui.quiet:
3576 3576 datefmt = b'%Y-%m-%d'
3577 3577 else:
3578 3578 datefmt = b'%a %b %d %H:%M:%S %Y %1%2'
3579 3579 found = False
3580 3580
3581 3581 @util.cachefunc
3582 3582 def binary():
3583 3583 flog = getfile(fn)
3584 3584 try:
3585 3585 return stringutil.binary(flog.read(ctx.filenode(fn)))
3586 3586 except error.WdirUnsupported:
3587 3587 return ctx[fn].isbinary()
3588 3588
3589 3589 fieldnamemap = {b'linenumber': b'lineno'}
3590 3590 if diff:
3591 3591 iter = grepmod.difflinestates(pstates, states)
3592 3592 else:
3593 3593 iter = [(b'', l) for l in states]
3594 3594 for change, l in iter:
3595 3595 fm.startitem()
3596 3596 fm.context(ctx=ctx)
3597 3597 fm.data(node=fm.hexfunc(scmutil.binnode(ctx)), path=fn)
3598 3598 fm.plain(uipathfn(fn), label=b'grep.filename')
3599 3599
3600 3600 cols = [
3601 3601 (b'rev', b'%d', rev, not plaingrep, b''),
3602 3602 (
3603 3603 b'linenumber',
3604 3604 b'%d',
3605 3605 l.linenum,
3606 3606 opts.get(b'line_number'),
3607 3607 b'',
3608 3608 ),
3609 3609 ]
3610 3610 if diff:
3611 3611 cols.append(
3612 3612 (
3613 3613 b'change',
3614 3614 b'%s',
3615 3615 change,
3616 3616 True,
3617 3617 b'grep.inserted '
3618 3618 if change == b'+'
3619 3619 else b'grep.deleted ',
3620 3620 )
3621 3621 )
3622 3622 cols.extend(
3623 3623 [
3624 3624 (
3625 3625 b'user',
3626 3626 b'%s',
3627 3627 formatuser(ctx.user()),
3628 3628 opts.get(b'user'),
3629 3629 b'',
3630 3630 ),
3631 3631 (
3632 3632 b'date',
3633 3633 b'%s',
3634 3634 fm.formatdate(ctx.date(), datefmt),
3635 3635 opts.get(b'date'),
3636 3636 b'',
3637 3637 ),
3638 3638 ]
3639 3639 )
3640 3640 for name, fmt, data, cond, extra_label in cols:
3641 3641 if cond:
3642 3642 fm.plain(sep, label=b'grep.sep')
3643 3643 field = fieldnamemap.get(name, name)
3644 3644 label = extra_label + (b'grep.%s' % name)
3645 3645 fm.condwrite(cond, field, fmt, data, label=label)
3646 3646 if not opts.get(b'files_with_matches'):
3647 3647 fm.plain(sep, label=b'grep.sep')
3648 3648 if not opts.get(b'text') and binary():
3649 3649 fm.plain(_(b" Binary file matches"))
3650 3650 else:
3651 3651 displaymatches(fm.nested(b'texts', tmpl=b'{text}'), l)
3652 3652 fm.plain(eol)
3653 3653 found = True
3654 3654 if opts.get(b'files_with_matches'):
3655 3655 break
3656 3656 return found
3657 3657
3658 3658 def displaymatches(fm, l):
3659 3659 p = 0
3660 3660 for s, e in l.findpos(regexp):
3661 3661 if p < s:
3662 3662 fm.startitem()
3663 3663 fm.write(b'text', b'%s', l.line[p:s])
3664 3664 fm.data(matched=False)
3665 3665 fm.startitem()
3666 3666 fm.write(b'text', b'%s', l.line[s:e], label=b'grep.match')
3667 3667 fm.data(matched=True)
3668 3668 p = e
3669 3669 if p < len(l.line):
3670 3670 fm.startitem()
3671 3671 fm.write(b'text', b'%s', l.line[p:])
3672 3672 fm.data(matched=False)
3673 3673 fm.end()
3674 3674
3675 3675 found = False
3676 3676
3677 3677 wopts = logcmdutil.walkopts(
3678 3678 pats=pats,
3679 3679 opts=opts,
3680 3680 revspec=opts[b'rev'],
3681 3681 include_pats=opts[b'include'],
3682 3682 exclude_pats=opts[b'exclude'],
3683 3683 follow=follow,
3684 3684 force_changelog_traversal=all_files,
3685 3685 filter_revisions_by_pats=not all_files,
3686 3686 )
3687 3687 revs, makefilematcher = logcmdutil.makewalker(repo, wopts)
3688 3688
3689 3689 ui.pager(b'grep')
3690 3690 fm = ui.formatter(b'grep', opts)
3691 3691 for fn, ctx, pstates, states in searcher.searchfiles(revs, makefilematcher):
3692 3692 r = display(fm, fn, ctx, pstates, states)
3693 3693 found = found or r
3694 3694 if r and not diff and not all_files:
3695 3695 searcher.skipfile(fn, ctx.rev())
3696 3696 fm.end()
3697 3697
3698 3698 return not found
3699 3699
3700 3700
3701 3701 @command(
3702 3702 b'heads',
3703 3703 [
3704 3704 (
3705 3705 b'r',
3706 3706 b'rev',
3707 3707 b'',
3708 3708 _(b'show only heads which are descendants of STARTREV'),
3709 3709 _(b'STARTREV'),
3710 3710 ),
3711 3711 (b't', b'topo', False, _(b'show topological heads only')),
3712 3712 (
3713 3713 b'a',
3714 3714 b'active',
3715 3715 False,
3716 3716 _(b'show active branchheads only (DEPRECATED)'),
3717 3717 ),
3718 3718 (b'c', b'closed', False, _(b'show normal and closed branch heads')),
3719 3719 ]
3720 3720 + templateopts,
3721 3721 _(b'[-ct] [-r STARTREV] [REV]...'),
3722 3722 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
3723 3723 intents={INTENT_READONLY},
3724 3724 )
3725 3725 def heads(ui, repo, *branchrevs, **opts):
3726 3726 """show branch heads
3727 3727
3728 3728 With no arguments, show all open branch heads in the repository.
3729 3729 Branch heads are changesets that have no descendants on the
3730 3730 same branch. They are where development generally takes place and
3731 3731 are the usual targets for update and merge operations.
3732 3732
3733 3733 If one or more REVs are given, only open branch heads on the
3734 3734 branches associated with the specified changesets are shown. This
3735 3735 means that you can use :hg:`heads .` to see the heads on the
3736 3736 currently checked-out branch.
3737 3737
3738 3738 If -c/--closed is specified, also show branch heads marked closed
3739 3739 (see :hg:`commit --close-branch`).
3740 3740
3741 3741 If STARTREV is specified, only those heads that are descendants of
3742 3742 STARTREV will be displayed.
3743 3743
3744 3744 If -t/--topo is specified, named branch mechanics will be ignored and only
3745 3745 topological heads (changesets with no children) will be shown.
3746 3746
3747 3747 Returns 0 if matching heads are found, 1 if not.
3748 3748 """
3749 3749
3750 3750 opts = pycompat.byteskwargs(opts)
3751 3751 start = None
3752 3752 rev = opts.get(b'rev')
3753 3753 if rev:
3754 3754 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
3755 3755 start = logcmdutil.revsingle(repo, rev, None).node()
3756 3756
3757 3757 if opts.get(b'topo'):
3758 3758 heads = [repo[h] for h in repo.heads(start)]
3759 3759 else:
3760 3760 heads = []
3761 3761 for branch in repo.branchmap():
3762 3762 heads += repo.branchheads(branch, start, opts.get(b'closed'))
3763 3763 heads = [repo[h] for h in heads]
3764 3764
3765 3765 if branchrevs:
3766 3766 branches = {
3767 3767 repo[r].branch() for r in logcmdutil.revrange(repo, branchrevs)
3768 3768 }
3769 3769 heads = [h for h in heads if h.branch() in branches]
3770 3770
3771 3771 if opts.get(b'active') and branchrevs:
3772 3772 dagheads = repo.heads(start)
3773 3773 heads = [h for h in heads if h.node() in dagheads]
3774 3774
3775 3775 if branchrevs:
3776 3776 haveheads = {h.branch() for h in heads}
3777 3777 if branches - haveheads:
3778 3778 headless = b', '.join(b for b in branches - haveheads)
3779 3779 msg = _(b'no open branch heads found on branches %s')
3780 3780 if opts.get(b'rev'):
3781 3781 msg += _(b' (started at %s)') % opts[b'rev']
3782 3782 ui.warn((msg + b'\n') % headless)
3783 3783
3784 3784 if not heads:
3785 3785 return 1
3786 3786
3787 3787 ui.pager(b'heads')
3788 3788 heads = sorted(heads, key=lambda x: -(x.rev()))
3789 3789 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
3790 3790 for ctx in heads:
3791 3791 displayer.show(ctx)
3792 3792 displayer.close()
3793 3793
3794 3794
3795 3795 @command(
3796 3796 b'help',
3797 3797 [
3798 3798 (b'e', b'extension', None, _(b'show only help for extensions')),
3799 3799 (b'c', b'command', None, _(b'show only help for commands')),
3800 3800 (b'k', b'keyword', None, _(b'show topics matching keyword')),
3801 3801 (
3802 3802 b's',
3803 3803 b'system',
3804 3804 [],
3805 3805 _(b'show help for specific platform(s)'),
3806 3806 _(b'PLATFORM'),
3807 3807 ),
3808 3808 ],
3809 3809 _(b'[-eck] [-s PLATFORM] [TOPIC]'),
3810 3810 helpcategory=command.CATEGORY_HELP,
3811 3811 norepo=True,
3812 3812 intents={INTENT_READONLY},
3813 3813 )
3814 3814 def help_(ui, name=None, **opts):
3815 3815 """show help for a given topic or a help overview
3816 3816
3817 3817 With no arguments, print a list of commands with short help messages.
3818 3818
3819 3819 Given a topic, extension, or command name, print help for that
3820 3820 topic.
3821 3821
3822 3822 Returns 0 if successful.
3823 3823 """
3824 3824
3825 3825 keep = opts.get('system') or []
3826 3826 if len(keep) == 0:
3827 3827 if pycompat.sysplatform.startswith(b'win'):
3828 3828 keep.append(b'windows')
3829 3829 elif pycompat.sysplatform == b'OpenVMS':
3830 3830 keep.append(b'vms')
3831 3831 elif pycompat.sysplatform == b'plan9':
3832 3832 keep.append(b'plan9')
3833 3833 else:
3834 3834 keep.append(b'unix')
3835 3835 keep.append(pycompat.sysplatform.lower())
3836 3836 if ui.verbose:
3837 3837 keep.append(b'verbose')
3838 3838
3839 3839 commands = sys.modules[__name__]
3840 3840 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
3841 3841 ui.pager(b'help')
3842 3842 ui.write(formatted)
3843 3843
3844 3844
3845 3845 @command(
3846 3846 b'identify|id',
3847 3847 [
3848 3848 (b'r', b'rev', b'', _(b'identify the specified revision'), _(b'REV')),
3849 3849 (b'n', b'num', None, _(b'show local revision number')),
3850 3850 (b'i', b'id', None, _(b'show global revision id')),
3851 3851 (b'b', b'branch', None, _(b'show branch')),
3852 3852 (b't', b'tags', None, _(b'show tags')),
3853 3853 (b'B', b'bookmarks', None, _(b'show bookmarks')),
3854 3854 ]
3855 3855 + remoteopts
3856 3856 + formatteropts,
3857 3857 _(b'[-nibtB] [-r REV] [SOURCE]'),
3858 3858 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
3859 3859 optionalrepo=True,
3860 3860 intents={INTENT_READONLY},
3861 3861 )
3862 3862 def identify(
3863 3863 ui,
3864 3864 repo,
3865 3865 source=None,
3866 3866 rev=None,
3867 3867 num=None,
3868 3868 id=None,
3869 3869 branch=None,
3870 3870 tags=None,
3871 3871 bookmarks=None,
3872 3872 **opts
3873 3873 ):
3874 3874 """identify the working directory or specified revision
3875 3875
3876 3876 Print a summary identifying the repository state at REV using one or
3877 3877 two parent hash identifiers, followed by a "+" if the working
3878 3878 directory has uncommitted changes, the branch name (if not default),
3879 3879 a list of tags, and a list of bookmarks.
3880 3880
3881 3881 When REV is not given, print a summary of the current state of the
3882 3882 repository including the working directory. Specify -r. to get information
3883 3883 of the working directory parent without scanning uncommitted changes.
3884 3884
3885 3885 Specifying a path to a repository root or Mercurial bundle will
3886 3886 cause lookup to operate on that repository/bundle.
3887 3887
3888 3888 .. container:: verbose
3889 3889
3890 3890 Template:
3891 3891
3892 3892 The following keywords are supported in addition to the common template
3893 3893 keywords and functions. See also :hg:`help templates`.
3894 3894
3895 3895 :dirty: String. Character ``+`` denoting if the working directory has
3896 3896 uncommitted changes.
3897 3897 :id: String. One or two nodes, optionally followed by ``+``.
3898 3898 :parents: List of strings. Parent nodes of the changeset.
3899 3899
3900 3900 Examples:
3901 3901
3902 3902 - generate a build identifier for the working directory::
3903 3903
3904 3904 hg id --id > build-id.dat
3905 3905
3906 3906 - find the revision corresponding to a tag::
3907 3907
3908 3908 hg id -n -r 1.3
3909 3909
3910 3910 - check the most recent revision of a remote repository::
3911 3911
3912 3912 hg id -r tip https://www.mercurial-scm.org/repo/hg/
3913 3913
3914 3914 See :hg:`log` for generating more information about specific revisions,
3915 3915 including full hash identifiers.
3916 3916
3917 3917 Returns 0 if successful.
3918 3918 """
3919 3919
3920 3920 opts = pycompat.byteskwargs(opts)
3921 3921 if not repo and not source:
3922 3922 raise error.InputError(
3923 3923 _(b"there is no Mercurial repository here (.hg not found)")
3924 3924 )
3925 3925
3926 3926 default = not (num or id or branch or tags or bookmarks)
3927 3927 output = []
3928 3928 revs = []
3929 3929
3930 3930 peer = None
3931 3931 try:
3932 3932 if source:
3933 3933 path = urlutil.get_unique_pull_path_obj(b'identify', ui, source)
3934 3934 # only pass ui when no repo
3935 3935 peer = hg.peer(repo or ui, opts, path)
3936 3936 repo = peer.local()
3937 3937 branches = (path.branch, [])
3938 3938 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3939 3939
3940 3940 fm = ui.formatter(b'identify', opts)
3941 3941 fm.startitem()
3942 3942
3943 3943 if not repo:
3944 3944 if num or branch or tags:
3945 3945 raise error.InputError(
3946 3946 _(b"can't query remote revision number, branch, or tags")
3947 3947 )
3948 3948 if not rev and revs:
3949 3949 rev = revs[0]
3950 3950 if not rev:
3951 3951 rev = b"tip"
3952 3952
3953 3953 remoterev = peer.lookup(rev)
3954 3954 hexrev = fm.hexfunc(remoterev)
3955 3955 if default or id:
3956 3956 output = [hexrev]
3957 3957 fm.data(id=hexrev)
3958 3958
3959 3959 @util.cachefunc
3960 3960 def getbms():
3961 3961 bms = []
3962 3962
3963 3963 if b'bookmarks' in peer.listkeys(b'namespaces'):
3964 3964 hexremoterev = hex(remoterev)
3965 3965 bms = [
3966 3966 bm
3967 3967 for bm, bmr in peer.listkeys(b'bookmarks').items()
3968 3968 if bmr == hexremoterev
3969 3969 ]
3970 3970
3971 3971 return sorted(bms)
3972 3972
3973 3973 if fm.isplain():
3974 3974 if bookmarks:
3975 3975 output.extend(getbms())
3976 3976 elif default and not ui.quiet:
3977 3977 # multiple bookmarks for a single parent separated by '/'
3978 3978 bm = b'/'.join(getbms())
3979 3979 if bm:
3980 3980 output.append(bm)
3981 3981 else:
3982 3982 fm.data(node=hex(remoterev))
3983 3983 if bookmarks or b'bookmarks' in fm.datahint():
3984 3984 fm.data(bookmarks=fm.formatlist(getbms(), name=b'bookmark'))
3985 3985 else:
3986 3986 if rev:
3987 3987 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
3988 3988 ctx = logcmdutil.revsingle(repo, rev, None)
3989 3989
3990 3990 if ctx.rev() is None:
3991 3991 ctx = repo[None]
3992 3992 parents = ctx.parents()
3993 3993 taglist = []
3994 3994 for p in parents:
3995 3995 taglist.extend(p.tags())
3996 3996
3997 3997 dirty = b""
3998 3998 if ctx.dirty(missing=True, merge=False, branch=False):
3999 3999 dirty = b'+'
4000 4000 fm.data(dirty=dirty)
4001 4001
4002 4002 hexoutput = [fm.hexfunc(p.node()) for p in parents]
4003 4003 if default or id:
4004 4004 output = [b"%s%s" % (b'+'.join(hexoutput), dirty)]
4005 4005 fm.data(id=b"%s%s" % (b'+'.join(hexoutput), dirty))
4006 4006
4007 4007 if num:
4008 4008 numoutput = [b"%d" % p.rev() for p in parents]
4009 4009 output.append(b"%s%s" % (b'+'.join(numoutput), dirty))
4010 4010
4011 4011 fm.data(
4012 4012 parents=fm.formatlist(
4013 4013 [fm.hexfunc(p.node()) for p in parents], name=b'node'
4014 4014 )
4015 4015 )
4016 4016 else:
4017 4017 hexoutput = fm.hexfunc(ctx.node())
4018 4018 if default or id:
4019 4019 output = [hexoutput]
4020 4020 fm.data(id=hexoutput)
4021 4021
4022 4022 if num:
4023 4023 output.append(pycompat.bytestr(ctx.rev()))
4024 4024 taglist = ctx.tags()
4025 4025
4026 4026 if default and not ui.quiet:
4027 4027 b = ctx.branch()
4028 4028 if b != b'default':
4029 4029 output.append(b"(%s)" % b)
4030 4030
4031 4031 # multiple tags for a single parent separated by '/'
4032 4032 t = b'/'.join(taglist)
4033 4033 if t:
4034 4034 output.append(t)
4035 4035
4036 4036 # multiple bookmarks for a single parent separated by '/'
4037 4037 bm = b'/'.join(ctx.bookmarks())
4038 4038 if bm:
4039 4039 output.append(bm)
4040 4040 else:
4041 4041 if branch:
4042 4042 output.append(ctx.branch())
4043 4043
4044 4044 if tags:
4045 4045 output.extend(taglist)
4046 4046
4047 4047 if bookmarks:
4048 4048 output.extend(ctx.bookmarks())
4049 4049
4050 4050 fm.data(node=ctx.hex())
4051 4051 fm.data(branch=ctx.branch())
4052 4052 fm.data(tags=fm.formatlist(taglist, name=b'tag', sep=b':'))
4053 4053 fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'))
4054 4054 fm.context(ctx=ctx)
4055 4055
4056 4056 fm.plain(b"%s\n" % b' '.join(output))
4057 4057 fm.end()
4058 4058 finally:
4059 4059 if peer:
4060 4060 peer.close()
4061 4061
4062 4062
4063 4063 @command(
4064 4064 b'import|patch',
4065 4065 [
4066 4066 (
4067 4067 b'p',
4068 4068 b'strip',
4069 4069 1,
4070 4070 _(
4071 4071 b'directory strip option for patch. This has the same '
4072 4072 b'meaning as the corresponding patch option'
4073 4073 ),
4074 4074 _(b'NUM'),
4075 4075 ),
4076 4076 (b'b', b'base', b'', _(b'base path (DEPRECATED)'), _(b'PATH')),
4077 4077 (b'', b'secret', None, _(b'use the secret phase for committing')),
4078 4078 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
4079 4079 (
4080 4080 b'f',
4081 4081 b'force',
4082 4082 None,
4083 4083 _(b'skip check for outstanding uncommitted changes (DEPRECATED)'),
4084 4084 ),
4085 4085 (
4086 4086 b'',
4087 4087 b'no-commit',
4088 4088 None,
4089 4089 _(b"don't commit, just update the working directory"),
4090 4090 ),
4091 4091 (
4092 4092 b'',
4093 4093 b'bypass',
4094 4094 None,
4095 4095 _(b"apply patch without touching the working directory"),
4096 4096 ),
4097 4097 (b'', b'partial', None, _(b'commit even if some hunks fail')),
4098 4098 (b'', b'exact', None, _(b'abort if patch would apply lossily')),
4099 4099 (b'', b'prefix', b'', _(b'apply patch to subdirectory'), _(b'DIR')),
4100 4100 (
4101 4101 b'',
4102 4102 b'import-branch',
4103 4103 None,
4104 4104 _(b'use any branch information in patch (implied by --exact)'),
4105 4105 ),
4106 4106 ]
4107 4107 + commitopts
4108 4108 + commitopts2
4109 4109 + similarityopts,
4110 4110 _(b'[OPTION]... PATCH...'),
4111 4111 helpcategory=command.CATEGORY_IMPORT_EXPORT,
4112 4112 )
4113 4113 def import_(ui, repo, patch1=None, *patches, **opts):
4114 4114 """import an ordered set of patches
4115 4115
4116 4116 Import a list of patches and commit them individually (unless
4117 4117 --no-commit is specified).
4118 4118
4119 4119 To read a patch from standard input (stdin), use "-" as the patch
4120 4120 name. If a URL is specified, the patch will be downloaded from
4121 4121 there.
4122 4122
4123 4123 Import first applies changes to the working directory (unless
4124 4124 --bypass is specified), import will abort if there are outstanding
4125 4125 changes.
4126 4126
4127 4127 Use --bypass to apply and commit patches directly to the
4128 4128 repository, without affecting the working directory. Without
4129 4129 --exact, patches will be applied on top of the working directory
4130 4130 parent revision.
4131 4131
4132 4132 You can import a patch straight from a mail message. Even patches
4133 4133 as attachments work (to use the body part, it must have type
4134 4134 text/plain or text/x-patch). From and Subject headers of email
4135 4135 message are used as default committer and commit message. All
4136 4136 text/plain body parts before first diff are added to the commit
4137 4137 message.
4138 4138
4139 4139 If the imported patch was generated by :hg:`export`, user and
4140 4140 description from patch override values from message headers and
4141 4141 body. Values given on command line with -m/--message and -u/--user
4142 4142 override these.
4143 4143
4144 4144 If --exact is specified, import will set the working directory to
4145 4145 the parent of each patch before applying it, and will abort if the
4146 4146 resulting changeset has a different ID than the one recorded in
4147 4147 the patch. This will guard against various ways that portable
4148 4148 patch formats and mail systems might fail to transfer Mercurial
4149 4149 data or metadata. See :hg:`bundle` for lossless transmission.
4150 4150
4151 4151 Use --partial to ensure a changeset will be created from the patch
4152 4152 even if some hunks fail to apply. Hunks that fail to apply will be
4153 4153 written to a <target-file>.rej file. Conflicts can then be resolved
4154 4154 by hand before :hg:`commit --amend` is run to update the created
4155 4155 changeset. This flag exists to let people import patches that
4156 4156 partially apply without losing the associated metadata (author,
4157 4157 date, description, ...).
4158 4158
4159 4159 .. note::
4160 4160
4161 4161 When no hunks apply cleanly, :hg:`import --partial` will create
4162 4162 an empty changeset, importing only the patch metadata.
4163 4163
4164 4164 With -s/--similarity, hg will attempt to discover renames and
4165 4165 copies in the patch in the same way as :hg:`addremove`.
4166 4166
4167 4167 It is possible to use external patch programs to perform the patch
4168 4168 by setting the ``ui.patch`` configuration option. For the default
4169 4169 internal tool, the fuzz can also be configured via ``patch.fuzz``.
4170 4170 See :hg:`help config` for more information about configuration
4171 4171 files and how to use these options.
4172 4172
4173 4173 See :hg:`help dates` for a list of formats valid for -d/--date.
4174 4174
4175 4175 .. container:: verbose
4176 4176
4177 4177 Examples:
4178 4178
4179 4179 - import a traditional patch from a website and detect renames::
4180 4180
4181 4181 hg import -s 80 http://example.com/bugfix.patch
4182 4182
4183 4183 - import a changeset from an hgweb server::
4184 4184
4185 4185 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
4186 4186
4187 4187 - import all the patches in an Unix-style mbox::
4188 4188
4189 4189 hg import incoming-patches.mbox
4190 4190
4191 4191 - import patches from stdin::
4192 4192
4193 4193 hg import -
4194 4194
4195 4195 - attempt to exactly restore an exported changeset (not always
4196 4196 possible)::
4197 4197
4198 4198 hg import --exact proposed-fix.patch
4199 4199
4200 4200 - use an external tool to apply a patch which is too fuzzy for
4201 4201 the default internal tool.
4202 4202
4203 4203 hg import --config ui.patch="patch --merge" fuzzy.patch
4204 4204
4205 4205 - change the default fuzzing from 2 to a less strict 7
4206 4206
4207 4207 hg import --config ui.fuzz=7 fuzz.patch
4208 4208
4209 4209 Returns 0 on success, 1 on partial success (see --partial).
4210 4210 """
4211 4211
4212 4212 cmdutil.check_incompatible_arguments(
4213 4213 opts, 'no_commit', ['bypass', 'secret']
4214 4214 )
4215 4215 cmdutil.check_incompatible_arguments(opts, 'exact', ['edit', 'prefix'])
4216 4216 opts = pycompat.byteskwargs(opts)
4217 4217 if not patch1:
4218 4218 raise error.InputError(_(b'need at least one patch to import'))
4219 4219
4220 4220 patches = (patch1,) + patches
4221 4221
4222 4222 date = opts.get(b'date')
4223 4223 if date:
4224 4224 opts[b'date'] = dateutil.parsedate(date)
4225 4225
4226 4226 exact = opts.get(b'exact')
4227 4227 update = not opts.get(b'bypass')
4228 4228 try:
4229 4229 sim = float(opts.get(b'similarity') or 0)
4230 4230 except ValueError:
4231 4231 raise error.InputError(_(b'similarity must be a number'))
4232 4232 if sim < 0 or sim > 100:
4233 4233 raise error.InputError(_(b'similarity must be between 0 and 100'))
4234 4234 if sim and not update:
4235 4235 raise error.InputError(_(b'cannot use --similarity with --bypass'))
4236 4236
4237 4237 base = opts[b"base"]
4238 4238 msgs = []
4239 4239 ret = 0
4240 4240
4241 4241 with repo.wlock():
4242 4242 if update:
4243 4243 cmdutil.checkunfinished(repo)
4244 4244 if exact or not opts.get(b'force'):
4245 4245 cmdutil.bailifchanged(repo)
4246 4246
4247 4247 if not opts.get(b'no_commit'):
4248 4248 lock = repo.lock
4249 4249 tr = lambda: repo.transaction(b'import')
4250 4250 else:
4251 4251 lock = util.nullcontextmanager
4252 4252 tr = util.nullcontextmanager
4253 4253 with lock(), tr():
4254 4254 parents = repo[None].parents()
4255 4255 for patchurl in patches:
4256 4256 if patchurl == b'-':
4257 4257 ui.status(_(b'applying patch from stdin\n'))
4258 4258 patchfile = ui.fin
4259 4259 patchurl = b'stdin' # for error message
4260 4260 else:
4261 4261 patchurl = os.path.join(base, patchurl)
4262 4262 ui.status(_(b'applying %s\n') % patchurl)
4263 4263 patchfile = hg.openpath(ui, patchurl, sendaccept=False)
4264 4264
4265 4265 haspatch = False
4266 4266 for hunk in patch.split(patchfile):
4267 4267 with patch.extract(ui, hunk) as patchdata:
4268 4268 msg, node, rej = cmdutil.tryimportone(
4269 4269 ui, repo, patchdata, parents, opts, msgs, hg.clean
4270 4270 )
4271 4271 if msg:
4272 4272 haspatch = True
4273 4273 ui.note(msg + b'\n')
4274 4274 if update or exact:
4275 4275 parents = repo[None].parents()
4276 4276 else:
4277 4277 parents = [repo[node]]
4278 4278 if rej:
4279 4279 ui.write_err(_(b"patch applied partially\n"))
4280 4280 ui.write_err(
4281 4281 _(
4282 4282 b"(fix the .rej files and run "
4283 4283 b"`hg commit --amend`)\n"
4284 4284 )
4285 4285 )
4286 4286 ret = 1
4287 4287 break
4288 4288
4289 4289 if not haspatch:
4290 4290 raise error.InputError(_(b'%s: no diffs found') % patchurl)
4291 4291
4292 4292 if msgs:
4293 4293 repo.savecommitmessage(b'\n* * *\n'.join(msgs))
4294 4294 return ret
4295 4295
4296 4296
4297 4297 @command(
4298 4298 b'incoming|in',
4299 4299 [
4300 4300 (
4301 4301 b'f',
4302 4302 b'force',
4303 4303 None,
4304 4304 _(b'run even if remote repository is unrelated'),
4305 4305 ),
4306 4306 (b'n', b'newest-first', None, _(b'show newest record first')),
4307 4307 (b'', b'bundle', b'', _(b'file to store the bundles into'), _(b'FILE')),
4308 4308 (
4309 4309 b'r',
4310 4310 b'rev',
4311 4311 [],
4312 4312 _(b'a remote changeset intended to be added'),
4313 4313 _(b'REV'),
4314 4314 ),
4315 4315 (b'B', b'bookmarks', False, _(b"compare bookmarks")),
4316 4316 (
4317 4317 b'b',
4318 4318 b'branch',
4319 4319 [],
4320 4320 _(b'a specific branch you would like to pull'),
4321 4321 _(b'BRANCH'),
4322 4322 ),
4323 4323 ]
4324 4324 + logopts
4325 4325 + remoteopts
4326 4326 + subrepoopts,
4327 4327 _(b'[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'),
4328 4328 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
4329 4329 )
4330 4330 def incoming(ui, repo, source=b"default", **opts):
4331 4331 """show new changesets found in source
4332 4332
4333 4333 Show new changesets found in the specified path/URL or the default
4334 4334 pull location. These are the changesets that would have been pulled
4335 4335 by :hg:`pull` at the time you issued this command.
4336 4336
4337 4337 See pull for valid source format details.
4338 4338
4339 4339 .. container:: verbose
4340 4340
4341 4341 With -B/--bookmarks, the result of bookmark comparison between
4342 4342 local and remote repositories is displayed. With -v/--verbose,
4343 4343 status is also displayed for each bookmark like below::
4344 4344
4345 4345 BM1 01234567890a added
4346 4346 BM2 1234567890ab advanced
4347 4347 BM3 234567890abc diverged
4348 4348 BM4 34567890abcd changed
4349 4349
4350 4350 The action taken locally when pulling depends on the
4351 4351 status of each bookmark:
4352 4352
4353 4353 :``added``: pull will create it
4354 4354 :``advanced``: pull will update it
4355 4355 :``diverged``: pull will create a divergent bookmark
4356 4356 :``changed``: result depends on remote changesets
4357 4357
4358 4358 From the point of view of pulling behavior, bookmark
4359 4359 existing only in the remote repository are treated as ``added``,
4360 4360 even if it is in fact locally deleted.
4361 4361
4362 4362 .. container:: verbose
4363 4363
4364 4364 For remote repository, using --bundle avoids downloading the
4365 4365 changesets twice if the incoming is followed by a pull.
4366 4366
4367 4367 Examples:
4368 4368
4369 4369 - show incoming changes with patches and full description::
4370 4370
4371 4371 hg incoming -vp
4372 4372
4373 4373 - show incoming changes excluding merges, store a bundle::
4374 4374
4375 4375 hg in -vpM --bundle incoming.hg
4376 4376 hg pull incoming.hg
4377 4377
4378 4378 - briefly list changes inside a bundle::
4379 4379
4380 4380 hg in changes.hg -T "{desc|firstline}\\n"
4381 4381
4382 4382 Returns 0 if there are incoming changes, 1 otherwise.
4383 4383 """
4384 4384 opts = pycompat.byteskwargs(opts)
4385 4385 if opts.get(b'graph'):
4386 4386 logcmdutil.checkunsupportedgraphflags([], opts)
4387 4387
4388 4388 def display(other, chlist, displayer):
4389 4389 revdag = logcmdutil.graphrevs(other, chlist, opts)
4390 4390 logcmdutil.displaygraph(
4391 4391 ui, repo, revdag, displayer, graphmod.asciiedges
4392 4392 )
4393 4393
4394 4394 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
4395 4395 return 0
4396 4396
4397 4397 cmdutil.check_incompatible_arguments(opts, b'subrepos', [b'bundle'])
4398 4398
4399 4399 if opts.get(b'bookmarks'):
4400 4400 srcs = urlutil.get_pull_paths(repo, ui, [source])
4401 4401 for path in srcs:
4402 4402 # XXX the "branches" options are not used. Should it be used?
4403 4403 other = hg.peer(repo, opts, path)
4404 4404 try:
4405 4405 if b'bookmarks' not in other.listkeys(b'namespaces'):
4406 4406 ui.warn(_(b"remote doesn't support bookmarks\n"))
4407 4407 return 0
4408 4408 ui.pager(b'incoming')
4409 4409 ui.status(
4410 4410 _(b'comparing with %s\n') % urlutil.hidepassword(path.loc)
4411 4411 )
4412 4412 return bookmarks.incoming(
4413 4413 ui, repo, other, mode=path.bookmarks_mode
4414 4414 )
4415 4415 finally:
4416 4416 other.close()
4417 4417
4418 4418 return hg.incoming(ui, repo, source, opts)
4419 4419
4420 4420
4421 4421 @command(
4422 4422 b'init',
4423 4423 remoteopts,
4424 4424 _(b'[-e CMD] [--remotecmd CMD] [DEST]'),
4425 4425 helpcategory=command.CATEGORY_REPO_CREATION,
4426 4426 helpbasic=True,
4427 4427 norepo=True,
4428 4428 )
4429 4429 def init(ui, dest=b".", **opts):
4430 4430 """create a new repository in the given directory
4431 4431
4432 4432 Initialize a new repository in the given directory. If the given
4433 4433 directory does not exist, it will be created.
4434 4434
4435 4435 If no directory is given, the current directory is used.
4436 4436
4437 4437 It is possible to specify an ``ssh://`` URL as the destination.
4438 4438 See :hg:`help urls` for more information.
4439 4439
4440 4440 Returns 0 on success.
4441 4441 """
4442 4442 opts = pycompat.byteskwargs(opts)
4443 4443 path = urlutil.get_clone_path_obj(ui, dest)
4444 4444 peer = hg.peer(ui, opts, path, create=True)
4445 4445 peer.close()
4446 4446
4447 4447
4448 4448 @command(
4449 4449 b'locate',
4450 4450 [
4451 4451 (
4452 4452 b'r',
4453 4453 b'rev',
4454 4454 b'',
4455 4455 _(b'search the repository as it is in REV'),
4456 4456 _(b'REV'),
4457 4457 ),
4458 4458 (
4459 4459 b'0',
4460 4460 b'print0',
4461 4461 None,
4462 4462 _(b'end filenames with NUL, for use with xargs'),
4463 4463 ),
4464 4464 (
4465 4465 b'f',
4466 4466 b'fullpath',
4467 4467 None,
4468 4468 _(b'print complete paths from the filesystem root'),
4469 4469 ),
4470 4470 ]
4471 4471 + walkopts,
4472 4472 _(b'[OPTION]... [PATTERN]...'),
4473 4473 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
4474 4474 )
4475 4475 def locate(ui, repo, *pats, **opts):
4476 4476 """locate files matching specific patterns (DEPRECATED)
4477 4477
4478 4478 Print files under Mercurial control in the working directory whose
4479 4479 names match the given patterns.
4480 4480
4481 4481 By default, this command searches all directories in the working
4482 4482 directory. To search just the current directory and its
4483 4483 subdirectories, use "--include .".
4484 4484
4485 4485 If no patterns are given to match, this command prints the names
4486 4486 of all files under Mercurial control in the working directory.
4487 4487
4488 4488 If you want to feed the output of this command into the "xargs"
4489 4489 command, use the -0 option to both this command and "xargs". This
4490 4490 will avoid the problem of "xargs" treating single filenames that
4491 4491 contain whitespace as multiple filenames.
4492 4492
4493 4493 See :hg:`help files` for a more versatile command.
4494 4494
4495 4495 Returns 0 if a match is found, 1 otherwise.
4496 4496 """
4497 4497 opts = pycompat.byteskwargs(opts)
4498 4498 if opts.get(b'print0'):
4499 4499 end = b'\0'
4500 4500 else:
4501 4501 end = b'\n'
4502 4502 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
4503 4503
4504 4504 ret = 1
4505 4505 m = scmutil.match(
4506 4506 ctx, pats, opts, default=b'relglob', badfn=lambda x, y: False
4507 4507 )
4508 4508
4509 4509 ui.pager(b'locate')
4510 4510 if ctx.rev() is None:
4511 4511 # When run on the working copy, "locate" includes removed files, so
4512 4512 # we get the list of files from the dirstate.
4513 4513 filesgen = sorted(repo.dirstate.matches(m))
4514 4514 else:
4515 4515 filesgen = ctx.matches(m)
4516 4516 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=bool(pats))
4517 4517 for abs in filesgen:
4518 4518 if opts.get(b'fullpath'):
4519 4519 ui.write(repo.wjoin(abs), end)
4520 4520 else:
4521 4521 ui.write(uipathfn(abs), end)
4522 4522 ret = 0
4523 4523
4524 4524 return ret
4525 4525
4526 4526
4527 4527 @command(
4528 4528 b'log|history',
4529 4529 [
4530 4530 (
4531 4531 b'f',
4532 4532 b'follow',
4533 4533 None,
4534 4534 _(
4535 4535 b'follow changeset history, or file history across copies and renames'
4536 4536 ),
4537 4537 ),
4538 4538 (
4539 4539 b'',
4540 4540 b'follow-first',
4541 4541 None,
4542 4542 _(b'only follow the first parent of merge changesets (DEPRECATED)'),
4543 4543 ),
4544 4544 (
4545 4545 b'd',
4546 4546 b'date',
4547 4547 b'',
4548 4548 _(b'show revisions matching date spec'),
4549 4549 _(b'DATE'),
4550 4550 ),
4551 4551 (b'C', b'copies', None, _(b'show copied files')),
4552 4552 (
4553 4553 b'k',
4554 4554 b'keyword',
4555 4555 [],
4556 4556 _(b'do case-insensitive search for a given text'),
4557 4557 _(b'TEXT'),
4558 4558 ),
4559 4559 (
4560 4560 b'r',
4561 4561 b'rev',
4562 4562 [],
4563 4563 _(b'revisions to select or follow from'),
4564 4564 _(b'REV'),
4565 4565 ),
4566 4566 (
4567 4567 b'L',
4568 4568 b'line-range',
4569 4569 [],
4570 4570 _(b'follow line range of specified file (EXPERIMENTAL)'),
4571 4571 _(b'FILE,RANGE'),
4572 4572 ),
4573 4573 (
4574 4574 b'',
4575 4575 b'removed',
4576 4576 None,
4577 4577 _(b'include revisions where files were removed'),
4578 4578 ),
4579 4579 (
4580 4580 b'm',
4581 4581 b'only-merges',
4582 4582 None,
4583 4583 _(b'show only merges (DEPRECATED) (use -r "merge()" instead)'),
4584 4584 ),
4585 4585 (b'u', b'user', [], _(b'revisions committed by user'), _(b'USER')),
4586 4586 (
4587 4587 b'',
4588 4588 b'only-branch',
4589 4589 [],
4590 4590 _(
4591 4591 b'show only changesets within the given named branch (DEPRECATED)'
4592 4592 ),
4593 4593 _(b'BRANCH'),
4594 4594 ),
4595 4595 (
4596 4596 b'b',
4597 4597 b'branch',
4598 4598 [],
4599 4599 _(b'show changesets within the given named branch'),
4600 4600 _(b'BRANCH'),
4601 4601 ),
4602 4602 (
4603 4603 b'B',
4604 4604 b'bookmark',
4605 4605 [],
4606 4606 _(b"show changesets within the given bookmark"),
4607 4607 _(b'BOOKMARK'),
4608 4608 ),
4609 4609 (
4610 4610 b'P',
4611 4611 b'prune',
4612 4612 [],
4613 4613 _(b'do not display revision or any of its ancestors'),
4614 4614 _(b'REV'),
4615 4615 ),
4616 4616 ]
4617 4617 + logopts
4618 4618 + walkopts,
4619 4619 _(b'[OPTION]... [FILE]'),
4620 4620 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
4621 4621 helpbasic=True,
4622 4622 inferrepo=True,
4623 4623 intents={INTENT_READONLY},
4624 4624 )
4625 4625 def log(ui, repo, *pats, **opts):
4626 4626 """show revision history of entire repository or files
4627 4627
4628 4628 Print the revision history of the specified files or the entire
4629 4629 project.
4630 4630
4631 4631 If no revision range is specified, the default is ``tip:0`` unless
4632 4632 --follow is set.
4633 4633
4634 4634 File history is shown without following rename or copy history of
4635 4635 files. Use -f/--follow with a filename to follow history across
4636 4636 renames and copies. --follow without a filename will only show
4637 4637 ancestors of the starting revisions. The starting revisions can be
4638 4638 specified by -r/--rev, which default to the working directory parent.
4639 4639
4640 4640 By default this command prints revision number and changeset id,
4641 4641 tags, non-trivial parents, user, date and time, and a summary for
4642 4642 each commit. When the -v/--verbose switch is used, the list of
4643 4643 changed files and full commit message are shown.
4644 4644
4645 4645 With --graph the revisions are shown as an ASCII art DAG with the most
4646 4646 recent changeset at the top.
4647 4647 'o' is a changeset, '@' is a working directory parent, '%' is a changeset
4648 4648 involved in an unresolved merge conflict, '_' closes a branch,
4649 4649 'x' is obsolete, '*' is unstable, and '+' represents a fork where the
4650 4650 changeset from the lines below is a parent of the 'o' merge on the same
4651 4651 line.
4652 4652 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
4653 4653 of a '|' indicates one or more revisions in a path are omitted.
4654 4654
4655 4655 .. container:: verbose
4656 4656
4657 4657 Use -L/--line-range FILE,M:N options to follow the history of lines
4658 4658 from M to N in FILE. With -p/--patch only diff hunks affecting
4659 4659 specified line range will be shown. This option requires --follow;
4660 4660 it can be specified multiple times. Currently, this option is not
4661 4661 compatible with --graph. This option is experimental.
4662 4662
4663 4663 .. note::
4664 4664
4665 4665 :hg:`log --patch` may generate unexpected diff output for merge
4666 4666 changesets, as it will only compare the merge changeset against
4667 4667 its first parent. Also, only files different from BOTH parents
4668 4668 will appear in files:.
4669 4669
4670 4670 .. note::
4671 4671
4672 4672 For performance reasons, :hg:`log FILE` may omit duplicate changes
4673 4673 made on branches and will not show removals or mode changes. To
4674 4674 see all such changes, use the --removed switch.
4675 4675
4676 4676 .. container:: verbose
4677 4677
4678 4678 .. note::
4679 4679
4680 4680 The history resulting from -L/--line-range options depends on diff
4681 4681 options; for instance if white-spaces are ignored, respective changes
4682 4682 with only white-spaces in specified line range will not be listed.
4683 4683
4684 4684 .. container:: verbose
4685 4685
4686 4686 Some examples:
4687 4687
4688 4688 - changesets with full descriptions and file lists::
4689 4689
4690 4690 hg log -v
4691 4691
4692 4692 - changesets ancestral to the working directory::
4693 4693
4694 4694 hg log -f
4695 4695
4696 4696 - last 10 commits on the current branch::
4697 4697
4698 4698 hg log -l 10 -b .
4699 4699
4700 4700 - changesets showing all modifications of a file, including removals::
4701 4701
4702 4702 hg log --removed file.c
4703 4703
4704 4704 - all changesets that touch a directory, with diffs, excluding merges::
4705 4705
4706 4706 hg log -Mp lib/
4707 4707
4708 4708 - all revision numbers that match a keyword::
4709 4709
4710 4710 hg log -k bug --template "{rev}\\n"
4711 4711
4712 4712 - the full hash identifier of the working directory parent::
4713 4713
4714 4714 hg log -r . --template "{node}\\n"
4715 4715
4716 4716 - list available log templates::
4717 4717
4718 4718 hg log -T list
4719 4719
4720 4720 - check if a given changeset is included in a tagged release::
4721 4721
4722 4722 hg log -r "a21ccf and ancestor(1.9)"
4723 4723
4724 4724 - find all changesets by some user in a date range::
4725 4725
4726 4726 hg log -k alice -d "may 2008 to jul 2008"
4727 4727
4728 4728 - summary of all changesets after the last tag::
4729 4729
4730 4730 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4731 4731
4732 4732 - changesets touching lines 13 to 23 for file.c::
4733 4733
4734 4734 hg log -L file.c,13:23
4735 4735
4736 4736 - changesets touching lines 13 to 23 for file.c and lines 2 to 6 of
4737 4737 main.c with patch::
4738 4738
4739 4739 hg log -L file.c,13:23 -L main.c,2:6 -p
4740 4740
4741 4741 See :hg:`help dates` for a list of formats valid for -d/--date.
4742 4742
4743 4743 See :hg:`help revisions` for more about specifying and ordering
4744 4744 revisions.
4745 4745
4746 4746 See :hg:`help templates` for more about pre-packaged styles and
4747 4747 specifying custom templates. The default template used by the log
4748 4748 command can be customized via the ``command-templates.log`` configuration
4749 4749 setting.
4750 4750
4751 4751 Returns 0 on success.
4752 4752
4753 4753 """
4754 4754 opts = pycompat.byteskwargs(opts)
4755 4755 linerange = opts.get(b'line_range')
4756 4756
4757 4757 if linerange and not opts.get(b'follow'):
4758 4758 raise error.InputError(_(b'--line-range requires --follow'))
4759 4759
4760 4760 if linerange and pats:
4761 4761 # TODO: take pats as patterns with no line-range filter
4762 4762 raise error.InputError(
4763 4763 _(b'FILE arguments are not compatible with --line-range option')
4764 4764 )
4765 4765
4766 4766 repo = scmutil.unhidehashlikerevs(repo, opts.get(b'rev'), b'nowarn')
4767 4767 walk_opts = logcmdutil.parseopts(ui, pats, opts)
4768 4768 revs, differ = logcmdutil.getrevs(repo, walk_opts)
4769 4769 if linerange:
4770 4770 # TODO: should follow file history from logcmdutil._initialrevs(),
4771 4771 # then filter the result by logcmdutil._makerevset() and --limit
4772 4772 revs, differ = logcmdutil.getlinerangerevs(repo, revs, opts)
4773 4773
4774 4774 getcopies = None
4775 4775 if opts.get(b'copies'):
4776 4776 endrev = None
4777 4777 if revs:
4778 4778 endrev = revs.max() + 1
4779 4779 getcopies = scmutil.getcopiesfn(repo, endrev=endrev)
4780 4780
4781 4781 ui.pager(b'log')
4782 4782 displayer = logcmdutil.changesetdisplayer(
4783 4783 ui, repo, opts, differ, buffered=True
4784 4784 )
4785 4785 if opts.get(b'graph'):
4786 4786 displayfn = logcmdutil.displaygraphrevs
4787 4787 else:
4788 4788 displayfn = logcmdutil.displayrevs
4789 4789 displayfn(ui, repo, revs, displayer, getcopies)
4790 4790
4791 4791
4792 4792 @command(
4793 4793 b'manifest',
4794 4794 [
4795 4795 (b'r', b'rev', b'', _(b'revision to display'), _(b'REV')),
4796 4796 (b'', b'all', False, _(b"list files from all revisions")),
4797 4797 ]
4798 4798 + formatteropts,
4799 4799 _(b'[-r REV]'),
4800 4800 helpcategory=command.CATEGORY_MAINTENANCE,
4801 4801 intents={INTENT_READONLY},
4802 4802 )
4803 4803 def manifest(ui, repo, node=None, rev=None, **opts):
4804 4804 """output the current or given revision of the project manifest
4805 4805
4806 4806 Print a list of version controlled files for the given revision.
4807 4807 If no revision is given, the first parent of the working directory
4808 4808 is used, or the null revision if no revision is checked out.
4809 4809
4810 4810 With -v, print file permissions, symlink and executable bits.
4811 4811 With --debug, print file revision hashes.
4812 4812
4813 4813 If option --all is specified, the list of all files from all revisions
4814 4814 is printed. This includes deleted and renamed files.
4815 4815
4816 4816 Returns 0 on success.
4817 4817 """
4818 4818 opts = pycompat.byteskwargs(opts)
4819 4819 fm = ui.formatter(b'manifest', opts)
4820 4820
4821 4821 if opts.get(b'all'):
4822 4822 if rev or node:
4823 4823 raise error.InputError(_(b"can't specify a revision with --all"))
4824 4824
4825 4825 res = set()
4826 4826 for rev in repo:
4827 4827 ctx = repo[rev]
4828 4828 res |= set(ctx.files())
4829 4829
4830 4830 ui.pager(b'manifest')
4831 4831 for f in sorted(res):
4832 4832 fm.startitem()
4833 4833 fm.write(b"path", b'%s\n', f)
4834 4834 fm.end()
4835 4835 return
4836 4836
4837 4837 if rev and node:
4838 4838 raise error.InputError(_(b"please specify just one revision"))
4839 4839
4840 4840 if not node:
4841 4841 node = rev
4842 4842
4843 4843 char = {b'l': b'@', b'x': b'*', b'': b'', b't': b'd'}
4844 4844 mode = {b'l': b'644', b'x': b'755', b'': b'644', b't': b'755'}
4845 4845 if node:
4846 4846 repo = scmutil.unhidehashlikerevs(repo, [node], b'nowarn')
4847 4847 ctx = logcmdutil.revsingle(repo, node)
4848 4848 mf = ctx.manifest()
4849 4849 ui.pager(b'manifest')
4850 4850 for f in ctx:
4851 4851 fm.startitem()
4852 4852 fm.context(ctx=ctx)
4853 4853 fl = ctx[f].flags()
4854 4854 fm.condwrite(ui.debugflag, b'hash', b'%s ', hex(mf[f]))
4855 4855 fm.condwrite(ui.verbose, b'mode type', b'%s %1s ', mode[fl], char[fl])
4856 4856 fm.write(b'path', b'%s\n', f)
4857 4857 fm.end()
4858 4858
4859 4859
4860 4860 @command(
4861 4861 b'merge',
4862 4862 [
4863 4863 (
4864 4864 b'f',
4865 4865 b'force',
4866 4866 None,
4867 4867 _(b'force a merge including outstanding changes (DEPRECATED)'),
4868 4868 ),
4869 4869 (b'r', b'rev', b'', _(b'revision to merge'), _(b'REV')),
4870 4870 (
4871 4871 b'P',
4872 4872 b'preview',
4873 4873 None,
4874 4874 _(b'review revisions to merge (no merge is performed)'),
4875 4875 ),
4876 4876 (b'', b'abort', None, _(b'abort the ongoing merge')),
4877 4877 ]
4878 4878 + mergetoolopts,
4879 4879 _(b'[-P] [[-r] REV]'),
4880 4880 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
4881 4881 helpbasic=True,
4882 4882 )
4883 4883 def merge(ui, repo, node=None, **opts):
4884 4884 """merge another revision into working directory
4885 4885
4886 4886 The current working directory is updated with all changes made in
4887 4887 the requested revision since the last common predecessor revision.
4888 4888
4889 4889 Files that changed between either parent are marked as changed for
4890 4890 the next commit and a commit must be performed before any further
4891 4891 updates to the repository are allowed. The next commit will have
4892 4892 two parents.
4893 4893
4894 4894 ``--tool`` can be used to specify the merge tool used for file
4895 4895 merges. It overrides the HGMERGE environment variable and your
4896 4896 configuration files. See :hg:`help merge-tools` for options.
4897 4897
4898 4898 If no revision is specified, the working directory's parent is a
4899 4899 head revision, and the current branch contains exactly one other
4900 4900 head, the other head is merged with by default. Otherwise, an
4901 4901 explicit revision with which to merge must be provided.
4902 4902
4903 4903 See :hg:`help resolve` for information on handling file conflicts.
4904 4904
4905 4905 To undo an uncommitted merge, use :hg:`merge --abort` which
4906 4906 will check out a clean copy of the original merge parent, losing
4907 4907 all changes.
4908 4908
4909 4909 Returns 0 on success, 1 if there are unresolved files.
4910 4910 """
4911 4911
4912 4912 opts = pycompat.byteskwargs(opts)
4913 4913 abort = opts.get(b'abort')
4914 4914 if abort and repo.dirstate.p2() == repo.nullid:
4915 4915 cmdutil.wrongtooltocontinue(repo, _(b'merge'))
4916 4916 cmdutil.check_incompatible_arguments(opts, b'abort', [b'rev', b'preview'])
4917 4917 if abort:
4918 4918 state = cmdutil.getunfinishedstate(repo)
4919 4919 if state and state._opname != b'merge':
4920 4920 raise error.StateError(
4921 4921 _(b'cannot abort merge with %s in progress') % (state._opname),
4922 4922 hint=state.hint(),
4923 4923 )
4924 4924 if node:
4925 4925 raise error.InputError(_(b"cannot specify a node with --abort"))
4926 4926 return hg.abortmerge(repo.ui, repo)
4927 4927
4928 4928 if opts.get(b'rev') and node:
4929 4929 raise error.InputError(_(b"please specify just one revision"))
4930 4930 if not node:
4931 4931 node = opts.get(b'rev')
4932 4932
4933 4933 if node:
4934 4934 ctx = logcmdutil.revsingle(repo, node)
4935 4935 else:
4936 4936 if ui.configbool(b'commands', b'merge.require-rev'):
4937 4937 raise error.InputError(
4938 4938 _(
4939 4939 b'configuration requires specifying revision to merge '
4940 4940 b'with'
4941 4941 )
4942 4942 )
4943 4943 ctx = repo[destutil.destmerge(repo)]
4944 4944
4945 4945 if ctx.node() is None:
4946 4946 raise error.InputError(
4947 4947 _(b'merging with the working copy has no effect')
4948 4948 )
4949 4949
4950 4950 if opts.get(b'preview'):
4951 4951 # find nodes that are ancestors of p2 but not of p1
4952 4952 p1 = repo[b'.'].node()
4953 4953 p2 = ctx.node()
4954 4954 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4955 4955
4956 4956 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
4957 4957 for node in nodes:
4958 4958 displayer.show(repo[node])
4959 4959 displayer.close()
4960 4960 return 0
4961 4961
4962 4962 # ui.forcemerge is an internal variable, do not document
4963 4963 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
4964 4964 with ui.configoverride(overrides, b'merge'):
4965 4965 force = opts.get(b'force')
4966 4966 labels = [b'working copy', b'merge rev', b'common ancestor']
4967 4967 return hg.merge(ctx, force=force, labels=labels)
4968 4968
4969 4969
4970 4970 statemod.addunfinished(
4971 4971 b'merge',
4972 4972 fname=None,
4973 4973 clearable=True,
4974 4974 allowcommit=True,
4975 4975 cmdmsg=_(b'outstanding uncommitted merge'),
4976 4976 abortfunc=hg.abortmerge,
4977 4977 statushint=_(
4978 4978 b'To continue: hg commit\nTo abort: hg merge --abort'
4979 4979 ),
4980 4980 cmdhint=_(b"use 'hg commit' or 'hg merge --abort'"),
4981 4981 )
4982 4982
4983 4983
4984 4984 @command(
4985 4985 b'outgoing|out',
4986 4986 [
4987 4987 (
4988 4988 b'f',
4989 4989 b'force',
4990 4990 None,
4991 4991 _(b'run even when the destination is unrelated'),
4992 4992 ),
4993 4993 (
4994 4994 b'r',
4995 4995 b'rev',
4996 4996 [],
4997 4997 _(b'a changeset intended to be included in the destination'),
4998 4998 _(b'REV'),
4999 4999 ),
5000 5000 (b'n', b'newest-first', None, _(b'show newest record first')),
5001 5001 (b'B', b'bookmarks', False, _(b'compare bookmarks')),
5002 5002 (
5003 5003 b'b',
5004 5004 b'branch',
5005 5005 [],
5006 5006 _(b'a specific branch you would like to push'),
5007 5007 _(b'BRANCH'),
5008 5008 ),
5009 5009 ]
5010 5010 + logopts
5011 5011 + remoteopts
5012 5012 + subrepoopts,
5013 5013 _(b'[-M] [-p] [-n] [-f] [-r REV]... [DEST]...'),
5014 5014 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5015 5015 )
5016 5016 def outgoing(ui, repo, *dests, **opts):
5017 5017 """show changesets not found in the destination
5018 5018
5019 5019 Show changesets not found in the specified destination repository
5020 5020 or the default push location. These are the changesets that would
5021 5021 be pushed if a push was requested.
5022 5022
5023 5023 See pull for details of valid destination formats.
5024 5024
5025 5025 .. container:: verbose
5026 5026
5027 5027 With -B/--bookmarks, the result of bookmark comparison between
5028 5028 local and remote repositories is displayed. With -v/--verbose,
5029 5029 status is also displayed for each bookmark like below::
5030 5030
5031 5031 BM1 01234567890a added
5032 5032 BM2 deleted
5033 5033 BM3 234567890abc advanced
5034 5034 BM4 34567890abcd diverged
5035 5035 BM5 4567890abcde changed
5036 5036
5037 5037 The action taken when pushing depends on the
5038 5038 status of each bookmark:
5039 5039
5040 5040 :``added``: push with ``-B`` will create it
5041 5041 :``deleted``: push with ``-B`` will delete it
5042 5042 :``advanced``: push will update it
5043 5043 :``diverged``: push with ``-B`` will update it
5044 5044 :``changed``: push with ``-B`` will update it
5045 5045
5046 5046 From the point of view of pushing behavior, bookmarks
5047 5047 existing only in the remote repository are treated as
5048 5048 ``deleted``, even if it is in fact added remotely.
5049 5049
5050 5050 Returns 0 if there are outgoing changes, 1 otherwise.
5051 5051 """
5052 5052 opts = pycompat.byteskwargs(opts)
5053 5053 if opts.get(b'bookmarks'):
5054 5054 for path in urlutil.get_push_paths(repo, ui, dests):
5055 5055 other = hg.peer(repo, opts, path)
5056 5056 try:
5057 5057 if b'bookmarks' not in other.listkeys(b'namespaces'):
5058 5058 ui.warn(_(b"remote doesn't support bookmarks\n"))
5059 5059 return 0
5060 5060 ui.status(
5061 5061 _(b'comparing with %s\n') % urlutil.hidepassword(path.loc)
5062 5062 )
5063 5063 ui.pager(b'outgoing')
5064 5064 return bookmarks.outgoing(ui, repo, other)
5065 5065 finally:
5066 5066 other.close()
5067 5067
5068 5068 return hg.outgoing(ui, repo, dests, opts)
5069 5069
5070 5070
5071 5071 @command(
5072 5072 b'parents',
5073 5073 [
5074 5074 (
5075 5075 b'r',
5076 5076 b'rev',
5077 5077 b'',
5078 5078 _(b'show parents of the specified revision'),
5079 5079 _(b'REV'),
5080 5080 ),
5081 5081 ]
5082 5082 + templateopts,
5083 5083 _(b'[-r REV] [FILE]'),
5084 5084 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
5085 5085 inferrepo=True,
5086 5086 )
5087 5087 def parents(ui, repo, file_=None, **opts):
5088 5088 """show the parents of the working directory or revision (DEPRECATED)
5089 5089
5090 5090 Print the working directory's parent revisions. If a revision is
5091 5091 given via -r/--rev, the parent of that revision will be printed.
5092 5092 If a file argument is given, the revision in which the file was
5093 5093 last changed (before the working directory revision or the
5094 5094 argument to --rev if given) is printed.
5095 5095
5096 5096 This command is equivalent to::
5097 5097
5098 5098 hg log -r "p1()+p2()" or
5099 5099 hg log -r "p1(REV)+p2(REV)" or
5100 5100 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
5101 5101 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
5102 5102
5103 5103 See :hg:`summary` and :hg:`help revsets` for related information.
5104 5104
5105 5105 Returns 0 on success.
5106 5106 """
5107 5107
5108 5108 opts = pycompat.byteskwargs(opts)
5109 5109 rev = opts.get(b'rev')
5110 5110 if rev:
5111 5111 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
5112 5112 ctx = logcmdutil.revsingle(repo, rev, None)
5113 5113
5114 5114 if file_:
5115 5115 m = scmutil.match(ctx, (file_,), opts)
5116 5116 if m.anypats() or len(m.files()) != 1:
5117 5117 raise error.InputError(_(b'can only specify an explicit filename'))
5118 5118 file_ = m.files()[0]
5119 5119 filenodes = []
5120 5120 for cp in ctx.parents():
5121 5121 if not cp:
5122 5122 continue
5123 5123 try:
5124 5124 filenodes.append(cp.filenode(file_))
5125 5125 except error.LookupError:
5126 5126 pass
5127 5127 if not filenodes:
5128 5128 raise error.InputError(_(b"'%s' not found in manifest") % file_)
5129 5129 p = []
5130 5130 for fn in filenodes:
5131 5131 fctx = repo.filectx(file_, fileid=fn)
5132 5132 p.append(fctx.node())
5133 5133 else:
5134 5134 p = [cp.node() for cp in ctx.parents()]
5135 5135
5136 5136 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
5137 5137 for n in p:
5138 5138 if n != repo.nullid:
5139 5139 displayer.show(repo[n])
5140 5140 displayer.close()
5141 5141
5142 5142
5143 5143 @command(
5144 5144 b'paths',
5145 5145 formatteropts,
5146 5146 _(b'[NAME]'),
5147 5147 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5148 5148 optionalrepo=True,
5149 5149 intents={INTENT_READONLY},
5150 5150 )
5151 5151 def paths(ui, repo, search=None, **opts):
5152 5152 """show aliases for remote repositories
5153 5153
5154 5154 Show definition of symbolic path name NAME. If no name is given,
5155 5155 show definition of all available names.
5156 5156
5157 5157 Option -q/--quiet suppresses all output when searching for NAME
5158 5158 and shows only the path names when listing all definitions.
5159 5159
5160 5160 Path names are defined in the [paths] section of your
5161 5161 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
5162 5162 repository, ``.hg/hgrc`` is used, too.
5163 5163
5164 5164 The path names ``default`` and ``default-push`` have a special
5165 5165 meaning. When performing a push or pull operation, they are used
5166 5166 as fallbacks if no location is specified on the command-line.
5167 5167 When ``default-push`` is set, it will be used for push and
5168 5168 ``default`` will be used for pull; otherwise ``default`` is used
5169 5169 as the fallback for both. When cloning a repository, the clone
5170 5170 source is written as ``default`` in ``.hg/hgrc``.
5171 5171
5172 5172 .. note::
5173 5173
5174 5174 ``default`` and ``default-push`` apply to all inbound (e.g.
5175 5175 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
5176 5176 and :hg:`bundle`) operations.
5177 5177
5178 5178 See :hg:`help urls` for more information.
5179 5179
5180 5180 .. container:: verbose
5181 5181
5182 5182 Template:
5183 5183
5184 5184 The following keywords are supported. See also :hg:`help templates`.
5185 5185
5186 5186 :name: String. Symbolic name of the path alias.
5187 5187 :pushurl: String. URL for push operations.
5188 5188 :url: String. URL or directory path for the other operations.
5189 5189
5190 5190 Returns 0 on success.
5191 5191 """
5192 5192
5193 5193 opts = pycompat.byteskwargs(opts)
5194 5194
5195 5195 pathitems = urlutil.list_paths(ui, search)
5196 5196 ui.pager(b'paths')
5197 5197
5198 5198 fm = ui.formatter(b'paths', opts)
5199 5199 if fm.isplain():
5200 5200 hidepassword = urlutil.hidepassword
5201 5201 else:
5202 5202 hidepassword = bytes
5203 5203 if ui.quiet:
5204 5204 namefmt = b'%s\n'
5205 5205 else:
5206 5206 namefmt = b'%s = '
5207 5207 showsubopts = not search and not ui.quiet
5208 5208
5209 5209 for name, path in pathitems:
5210 5210 fm.startitem()
5211 5211 fm.condwrite(not search, b'name', namefmt, name)
5212 5212 fm.condwrite(not ui.quiet, b'url', b'%s\n', hidepassword(path.rawloc))
5213 5213 for subopt, value in sorted(path.suboptions.items()):
5214 5214 assert subopt not in (b'name', b'url')
5215 5215 if showsubopts:
5216 5216 fm.plain(b'%s:%s = ' % (name, subopt))
5217 5217 if isinstance(value, bool):
5218 5218 if value:
5219 5219 value = b'yes'
5220 5220 else:
5221 5221 value = b'no'
5222 5222 fm.condwrite(showsubopts, subopt, b'%s\n', value)
5223 5223
5224 5224 fm.end()
5225 5225
5226 5226 if search and not pathitems:
5227 5227 if not ui.quiet:
5228 5228 ui.warn(_(b"not found!\n"))
5229 5229 return 1
5230 5230 else:
5231 5231 return 0
5232 5232
5233 5233
5234 5234 @command(
5235 5235 b'phase',
5236 5236 [
5237 5237 (b'p', b'public', False, _(b'set changeset phase to public')),
5238 5238 (b'd', b'draft', False, _(b'set changeset phase to draft')),
5239 5239 (b's', b'secret', False, _(b'set changeset phase to secret')),
5240 5240 (b'f', b'force', False, _(b'allow to move boundary backward')),
5241 5241 (b'r', b'rev', [], _(b'target revision'), _(b'REV')),
5242 5242 ],
5243 5243 _(b'[-p|-d|-s] [-f] [-r] [REV...]'),
5244 5244 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
5245 5245 )
5246 5246 def phase(ui, repo, *revs, **opts):
5247 5247 """set or show the current phase name
5248 5248
5249 5249 With no argument, show the phase name of the current revision(s).
5250 5250
5251 5251 With one of -p/--public, -d/--draft or -s/--secret, change the
5252 5252 phase value of the specified revisions.
5253 5253
5254 5254 Unless -f/--force is specified, :hg:`phase` won't move changesets from a
5255 5255 lower phase to a higher phase. Phases are ordered as follows::
5256 5256
5257 5257 public < draft < secret
5258 5258
5259 5259 Returns 0 on success, 1 if some phases could not be changed.
5260 5260
5261 5261 (For more information about the phases concept, see :hg:`help phases`.)
5262 5262 """
5263 5263 opts = pycompat.byteskwargs(opts)
5264 5264 # search for a unique phase argument
5265 5265 targetphase = None
5266 5266 for idx, name in enumerate(phases.cmdphasenames):
5267 5267 if opts[name]:
5268 5268 if targetphase is not None:
5269 5269 raise error.InputError(_(b'only one phase can be specified'))
5270 5270 targetphase = idx
5271 5271
5272 5272 # look for specified revision
5273 5273 revs = list(revs)
5274 5274 revs.extend(opts[b'rev'])
5275 5275 if revs:
5276 5276 revs = logcmdutil.revrange(repo, revs)
5277 5277 else:
5278 5278 # display both parents as the second parent phase can influence
5279 5279 # the phase of a merge commit
5280 5280 revs = [c.rev() for c in repo[None].parents()]
5281 5281
5282 5282 ret = 0
5283 5283 if targetphase is None:
5284 5284 # display
5285 5285 for r in revs:
5286 5286 ctx = repo[r]
5287 5287 ui.write(b'%i: %s\n' % (ctx.rev(), ctx.phasestr()))
5288 5288 else:
5289 5289 with repo.lock(), repo.transaction(b"phase") as tr:
5290 5290 # set phase
5291 5291 if not revs:
5292 5292 raise error.InputError(_(b'empty revision set'))
5293 5293 nodes = [repo[r].node() for r in revs]
5294 5294 # moving revision from public to draft may hide them
5295 5295 # We have to check result on an unfiltered repository
5296 5296 unfi = repo.unfiltered()
5297 5297 getphase = unfi._phasecache.phase
5298 5298 olddata = [getphase(unfi, r) for r in unfi]
5299 5299 phases.advanceboundary(repo, tr, targetphase, nodes)
5300 5300 if opts[b'force']:
5301 5301 phases.retractboundary(repo, tr, targetphase, nodes)
5302 5302 getphase = unfi._phasecache.phase
5303 5303 newdata = [getphase(unfi, r) for r in unfi]
5304 5304 changes = sum(newdata[r] != olddata[r] for r in unfi)
5305 5305 cl = unfi.changelog
5306 5306 rejected = [n for n in nodes if newdata[cl.rev(n)] < targetphase]
5307 5307 if rejected:
5308 5308 ui.warn(
5309 5309 _(
5310 5310 b'cannot move %i changesets to a higher '
5311 5311 b'phase, use --force\n'
5312 5312 )
5313 5313 % len(rejected)
5314 5314 )
5315 5315 ret = 1
5316 5316 if changes:
5317 5317 msg = _(b'phase changed for %i changesets\n') % changes
5318 5318 if ret:
5319 5319 ui.status(msg)
5320 5320 else:
5321 5321 ui.note(msg)
5322 5322 else:
5323 5323 ui.warn(_(b'no phases changed\n'))
5324 5324 return ret
5325 5325
5326 5326
5327 5327 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
5328 5328 """Run after a changegroup has been added via pull/unbundle
5329 5329
5330 5330 This takes arguments below:
5331 5331
5332 5332 :modheads: change of heads by pull/unbundle
5333 5333 :optupdate: updating working directory is needed or not
5334 5334 :checkout: update destination revision (or None to default destination)
5335 5335 :brev: a name, which might be a bookmark to be activated after updating
5336 5336
5337 5337 return True if update raise any conflict, False otherwise.
5338 5338 """
5339 5339 if modheads == 0:
5340 5340 return False
5341 5341 if optupdate:
5342 5342 try:
5343 5343 return hg.updatetotally(ui, repo, checkout, brev)
5344 5344 except error.UpdateAbort as inst:
5345 5345 msg = _(b"not updating: %s") % stringutil.forcebytestr(inst)
5346 5346 hint = inst.hint
5347 5347 raise error.UpdateAbort(msg, hint=hint)
5348 5348 if modheads is not None and modheads > 1:
5349 5349 currentbranchheads = len(repo.branchheads())
5350 5350 if currentbranchheads == modheads:
5351 5351 ui.status(
5352 5352 _(b"(run 'hg heads' to see heads, 'hg merge' to merge)\n")
5353 5353 )
5354 5354 elif currentbranchheads > 1:
5355 5355 ui.status(
5356 5356 _(b"(run 'hg heads .' to see heads, 'hg merge' to merge)\n")
5357 5357 )
5358 5358 else:
5359 5359 ui.status(_(b"(run 'hg heads' to see heads)\n"))
5360 5360 elif not ui.configbool(b'commands', b'update.requiredest'):
5361 5361 ui.status(_(b"(run 'hg update' to get a working copy)\n"))
5362 5362 return False
5363 5363
5364 5364
5365 5365 @command(
5366 5366 b'pull',
5367 5367 [
5368 5368 (
5369 5369 b'u',
5370 5370 b'update',
5371 5371 None,
5372 5372 _(b'update to new branch head if new descendants were pulled'),
5373 5373 ),
5374 5374 (
5375 5375 b'f',
5376 5376 b'force',
5377 5377 None,
5378 5378 _(b'run even when remote repository is unrelated'),
5379 5379 ),
5380 5380 (
5381 5381 b'',
5382 5382 b'confirm',
5383 5383 None,
5384 5384 _(b'confirm pull before applying changes'),
5385 5385 ),
5386 5386 (
5387 5387 b'r',
5388 5388 b'rev',
5389 5389 [],
5390 5390 _(b'a remote changeset intended to be added'),
5391 5391 _(b'REV'),
5392 5392 ),
5393 5393 (b'B', b'bookmark', [], _(b"bookmark to pull"), _(b'BOOKMARK')),
5394 5394 (
5395 5395 b'b',
5396 5396 b'branch',
5397 5397 [],
5398 5398 _(b'a specific branch you would like to pull'),
5399 5399 _(b'BRANCH'),
5400 5400 ),
5401 5401 ]
5402 5402 + remoteopts,
5403 5403 _(b'[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]...'),
5404 5404 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5405 5405 helpbasic=True,
5406 5406 )
5407 5407 def pull(ui, repo, *sources, **opts):
5408 5408 """pull changes from the specified source
5409 5409
5410 5410 Pull changes from a remote repository to a local one.
5411 5411
5412 5412 This finds all changes from the repository at the specified path
5413 5413 or URL and adds them to a local repository (the current one unless
5414 5414 -R is specified). By default, this does not update the copy of the
5415 5415 project in the working directory.
5416 5416
5417 5417 When cloning from servers that support it, Mercurial may fetch
5418 5418 pre-generated data. When this is done, hooks operating on incoming
5419 5419 changesets and changegroups may fire more than once, once for each
5420 5420 pre-generated bundle and as well as for any additional remaining
5421 5421 data. See :hg:`help -e clonebundles` for more.
5422 5422
5423 5423 Use :hg:`incoming` if you want to see what would have been added
5424 5424 by a pull at the time you issued this command. If you then decide
5425 5425 to add those changes to the repository, you should use :hg:`pull
5426 5426 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5427 5427
5428 5428 If SOURCE is omitted, the 'default' path will be used.
5429 5429 See :hg:`help urls` for more information.
5430 5430
5431 5431 If multiple sources are specified, they will be pulled sequentially as if
5432 5432 the command was run multiple time. If --update is specify and the command
5433 5433 will stop at the first failed --update.
5434 5434
5435 5435 Specifying bookmark as ``.`` is equivalent to specifying the active
5436 5436 bookmark's name.
5437 5437
5438 5438 Returns 0 on success, 1 if an update had unresolved files.
5439 5439 """
5440 5440
5441 5441 opts = pycompat.byteskwargs(opts)
5442 5442 if ui.configbool(b'commands', b'update.requiredest') and opts.get(
5443 5443 b'update'
5444 5444 ):
5445 5445 msg = _(b'update destination required by configuration')
5446 5446 hint = _(b'use hg pull followed by hg update DEST')
5447 5447 raise error.InputError(msg, hint=hint)
5448 5448
5449 5449 for path in urlutil.get_pull_paths(repo, ui, sources):
5450 5450 ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(path.loc))
5451 5451 ui.flush()
5452 5452 other = hg.peer(repo, opts, path)
5453 5453 update_conflict = None
5454 5454 try:
5455 5455 branches = (path.branch, opts.get(b'branch', []))
5456 5456 revs, checkout = hg.addbranchrevs(
5457 5457 repo, other, branches, opts.get(b'rev')
5458 5458 )
5459 5459
5460 5460 pullopargs = {}
5461 5461
5462 5462 nodes = None
5463 5463 if opts.get(b'bookmark') or revs:
5464 5464 # The list of bookmark used here is the same used to actually update
5465 5465 # the bookmark names, to avoid the race from issue 4689 and we do
5466 5466 # all lookup and bookmark queries in one go so they see the same
5467 5467 # version of the server state (issue 4700).
5468 5468 nodes = []
5469 5469 fnodes = []
5470 5470 revs = revs or []
5471 5471 if revs and not other.capable(b'lookup'):
5472 5472 err = _(
5473 5473 b"other repository doesn't support revision lookup, "
5474 5474 b"so a rev cannot be specified."
5475 5475 )
5476 5476 raise error.Abort(err)
5477 5477 with other.commandexecutor() as e:
5478 5478 fremotebookmarks = e.callcommand(
5479 5479 b'listkeys', {b'namespace': b'bookmarks'}
5480 5480 )
5481 5481 for r in revs:
5482 5482 fnodes.append(e.callcommand(b'lookup', {b'key': r}))
5483 5483 remotebookmarks = fremotebookmarks.result()
5484 5484 remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks)
5485 5485 pullopargs[b'remotebookmarks'] = remotebookmarks
5486 5486 for b in opts.get(b'bookmark', []):
5487 5487 b = repo._bookmarks.expandname(b)
5488 5488 if b not in remotebookmarks:
5489 5489 raise error.InputError(
5490 5490 _(b'remote bookmark %s not found!') % b
5491 5491 )
5492 5492 nodes.append(remotebookmarks[b])
5493 5493 for i, rev in enumerate(revs):
5494 5494 node = fnodes[i].result()
5495 5495 nodes.append(node)
5496 5496 if rev == checkout:
5497 5497 checkout = node
5498 5498
5499 5499 wlock = util.nullcontextmanager()
5500 5500 if opts.get(b'update'):
5501 5501 wlock = repo.wlock()
5502 5502 with wlock:
5503 5503 pullopargs.update(opts.get(b'opargs', {}))
5504 5504 modheads = exchange.pull(
5505 5505 repo,
5506 5506 other,
5507 5507 path=path,
5508 5508 heads=nodes,
5509 5509 force=opts.get(b'force'),
5510 5510 bookmarks=opts.get(b'bookmark', ()),
5511 5511 opargs=pullopargs,
5512 5512 confirm=opts.get(b'confirm'),
5513 5513 ).cgresult
5514 5514
5515 5515 # brev is a name, which might be a bookmark to be activated at
5516 5516 # the end of the update. In other words, it is an explicit
5517 5517 # destination of the update
5518 5518 brev = None
5519 5519
5520 5520 if checkout:
5521 5521 checkout = repo.unfiltered().changelog.rev(checkout)
5522 5522
5523 5523 # order below depends on implementation of
5524 5524 # hg.addbranchrevs(). opts['bookmark'] is ignored,
5525 5525 # because 'checkout' is determined without it.
5526 5526 if opts.get(b'rev'):
5527 5527 brev = opts[b'rev'][0]
5528 5528 elif opts.get(b'branch'):
5529 5529 brev = opts[b'branch'][0]
5530 5530 else:
5531 5531 brev = path.branch
5532 5532
5533 5533 # XXX path: we are losing the `path` object here. Keeping it
5534 5534 # would be valuable. For example as a "variant" as we do
5535 5535 # for pushes.
5536 5536 repo._subtoppath = path.loc
5537 5537 try:
5538 5538 update_conflict = postincoming(
5539 5539 ui, repo, modheads, opts.get(b'update'), checkout, brev
5540 5540 )
5541 5541 except error.FilteredRepoLookupError as exc:
5542 5542 msg = _(b'cannot update to target: %s') % exc.args[0]
5543 5543 exc.args = (msg,) + exc.args[1:]
5544 5544 raise
5545 5545 finally:
5546 5546 del repo._subtoppath
5547 5547
5548 5548 finally:
5549 5549 other.close()
5550 5550 # skip the remaining pull source if they are some conflict.
5551 5551 if update_conflict:
5552 5552 break
5553 5553 if update_conflict:
5554 5554 return 1
5555 5555 else:
5556 5556 return 0
5557 5557
5558 5558
5559 5559 @command(
5560 5560 b'purge|clean',
5561 5561 [
5562 5562 (b'a', b'abort-on-err', None, _(b'abort if an error occurs')),
5563 5563 (b'', b'all', None, _(b'purge ignored files too')),
5564 5564 (b'i', b'ignored', None, _(b'purge only ignored files')),
5565 5565 (b'', b'dirs', None, _(b'purge empty directories')),
5566 5566 (b'', b'files', None, _(b'purge files')),
5567 5567 (b'p', b'print', None, _(b'print filenames instead of deleting them')),
5568 5568 (
5569 5569 b'0',
5570 5570 b'print0',
5571 5571 None,
5572 5572 _(
5573 5573 b'end filenames with NUL, for use with xargs'
5574 5574 b' (implies -p/--print)'
5575 5575 ),
5576 5576 ),
5577 5577 (b'', b'confirm', None, _(b'ask before permanently deleting files')),
5578 5578 ]
5579 5579 + cmdutil.walkopts,
5580 5580 _(b'hg purge [OPTION]... [DIR]...'),
5581 5581 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5582 5582 )
5583 5583 def purge(ui, repo, *dirs, **opts):
5584 5584 """removes files not tracked by Mercurial
5585 5585
5586 5586 Delete files not known to Mercurial. This is useful to test local
5587 5587 and uncommitted changes in an otherwise-clean source tree.
5588 5588
5589 5589 This means that purge will delete the following by default:
5590 5590
5591 5591 - Unknown files: files marked with "?" by :hg:`status`
5592 5592 - Empty directories: in fact Mercurial ignores directories unless
5593 5593 they contain files under source control management
5594 5594
5595 5595 But it will leave untouched:
5596 5596
5597 5597 - Modified and unmodified tracked files
5598 5598 - Ignored files (unless -i or --all is specified)
5599 5599 - New files added to the repository (with :hg:`add`)
5600 5600
5601 5601 The --files and --dirs options can be used to direct purge to delete
5602 5602 only files, only directories, or both. If neither option is given,
5603 5603 both will be deleted.
5604 5604
5605 5605 If directories are given on the command line, only files in these
5606 5606 directories are considered.
5607 5607
5608 5608 Be careful with purge, as you could irreversibly delete some files
5609 5609 you forgot to add to the repository. If you only want to print the
5610 5610 list of files that this program would delete, use the --print
5611 5611 option.
5612 5612 """
5613 5613 opts = pycompat.byteskwargs(opts)
5614 5614 cmdutil.check_at_most_one_arg(opts, b'all', b'ignored')
5615 5615
5616 5616 act = not opts.get(b'print')
5617 5617 eol = b'\n'
5618 5618 if opts.get(b'print0'):
5619 5619 eol = b'\0'
5620 5620 act = False # --print0 implies --print
5621 5621 if opts.get(b'all', False):
5622 5622 ignored = True
5623 5623 unknown = True
5624 5624 else:
5625 5625 ignored = opts.get(b'ignored', False)
5626 5626 unknown = not ignored
5627 5627
5628 5628 removefiles = opts.get(b'files')
5629 5629 removedirs = opts.get(b'dirs')
5630 5630 confirm = opts.get(b'confirm')
5631 5631 if confirm is None:
5632 5632 try:
5633 5633 extensions.find(b'purge')
5634 5634 confirm = False
5635 5635 except KeyError:
5636 5636 confirm = True
5637 5637
5638 5638 if not removefiles and not removedirs:
5639 5639 removefiles = True
5640 5640 removedirs = True
5641 5641
5642 5642 match = scmutil.match(repo[None], dirs, opts)
5643 5643
5644 5644 paths = mergemod.purge(
5645 5645 repo,
5646 5646 match,
5647 5647 unknown=unknown,
5648 5648 ignored=ignored,
5649 5649 removeemptydirs=removedirs,
5650 5650 removefiles=removefiles,
5651 5651 abortonerror=opts.get(b'abort_on_err'),
5652 5652 noop=not act,
5653 5653 confirm=confirm,
5654 5654 )
5655 5655
5656 5656 for path in paths:
5657 5657 if not act:
5658 5658 ui.write(b'%s%s' % (path, eol))
5659 5659
5660 5660
5661 5661 @command(
5662 5662 b'push',
5663 5663 [
5664 5664 (b'f', b'force', None, _(b'force push')),
5665 5665 (
5666 5666 b'r',
5667 5667 b'rev',
5668 5668 [],
5669 5669 _(b'a changeset intended to be included in the destination'),
5670 5670 _(b'REV'),
5671 5671 ),
5672 5672 (b'B', b'bookmark', [], _(b"bookmark to push"), _(b'BOOKMARK')),
5673 5673 (b'', b'all-bookmarks', None, _(b"push all bookmarks (EXPERIMENTAL)")),
5674 5674 (
5675 5675 b'b',
5676 5676 b'branch',
5677 5677 [],
5678 5678 _(b'a specific branch you would like to push'),
5679 5679 _(b'BRANCH'),
5680 5680 ),
5681 5681 (b'', b'new-branch', False, _(b'allow pushing a new branch')),
5682 5682 (
5683 5683 b'',
5684 5684 b'pushvars',
5685 5685 [],
5686 5686 _(b'variables that can be sent to server (ADVANCED)'),
5687 5687 ),
5688 5688 (
5689 5689 b'',
5690 5690 b'publish',
5691 5691 False,
5692 5692 _(b'push the changeset as public (EXPERIMENTAL)'),
5693 5693 ),
5694 5694 ]
5695 5695 + remoteopts,
5696 5696 _(b'[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]...'),
5697 5697 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5698 5698 helpbasic=True,
5699 5699 )
5700 5700 def push(ui, repo, *dests, **opts):
5701 5701 """push changes to the specified destination
5702 5702
5703 5703 Push changesets from the local repository to the specified
5704 5704 destination.
5705 5705
5706 5706 This operation is symmetrical to pull: it is identical to a pull
5707 5707 in the destination repository from the current one.
5708 5708
5709 5709 By default, push will not allow creation of new heads at the
5710 5710 destination, since multiple heads would make it unclear which head
5711 5711 to use. In this situation, it is recommended to pull and merge
5712 5712 before pushing.
5713 5713
5714 5714 Use --new-branch if you want to allow push to create a new named
5715 5715 branch that is not present at the destination. This allows you to
5716 5716 only create a new branch without forcing other changes.
5717 5717
5718 5718 .. note::
5719 5719
5720 5720 Extra care should be taken with the -f/--force option,
5721 5721 which will push all new heads on all branches, an action which will
5722 5722 almost always cause confusion for collaborators.
5723 5723
5724 5724 If -r/--rev is used, the specified revision and all its ancestors
5725 5725 will be pushed to the remote repository.
5726 5726
5727 5727 If -B/--bookmark is used, the specified bookmarked revision, its
5728 5728 ancestors, and the bookmark will be pushed to the remote
5729 5729 repository. Specifying ``.`` is equivalent to specifying the active
5730 5730 bookmark's name. Use the --all-bookmarks option for pushing all
5731 5731 current bookmarks.
5732 5732
5733 5733 Please see :hg:`help urls` for important details about ``ssh://``
5734 5734 URLs. If DESTINATION is omitted, a default path will be used.
5735 5735
5736 5736 When passed multiple destinations, push will process them one after the
5737 5737 other, but stop should an error occur.
5738 5738
5739 5739 .. container:: verbose
5740 5740
5741 5741 The --pushvars option sends strings to the server that become
5742 5742 environment variables prepended with ``HG_USERVAR_``. For example,
5743 5743 ``--pushvars ENABLE_FEATURE=true``, provides the server side hooks with
5744 5744 ``HG_USERVAR_ENABLE_FEATURE=true`` as part of their environment.
5745 5745
5746 5746 pushvars can provide for user-overridable hooks as well as set debug
5747 5747 levels. One example is having a hook that blocks commits containing
5748 5748 conflict markers, but enables the user to override the hook if the file
5749 5749 is using conflict markers for testing purposes or the file format has
5750 5750 strings that look like conflict markers.
5751 5751
5752 5752 By default, servers will ignore `--pushvars`. To enable it add the
5753 5753 following to your configuration file::
5754 5754
5755 5755 [push]
5756 5756 pushvars.server = true
5757 5757
5758 5758 Returns 0 if push was successful, 1 if nothing to push.
5759 5759 """
5760 5760
5761 5761 opts = pycompat.byteskwargs(opts)
5762 5762
5763 5763 if opts.get(b'all_bookmarks'):
5764 5764 cmdutil.check_incompatible_arguments(
5765 5765 opts,
5766 5766 b'all_bookmarks',
5767 5767 [b'bookmark', b'rev'],
5768 5768 )
5769 5769 opts[b'bookmark'] = list(repo._bookmarks)
5770 5770
5771 5771 if opts.get(b'bookmark'):
5772 5772 ui.setconfig(b'bookmarks', b'pushing', opts[b'bookmark'], b'push')
5773 5773 for b in opts[b'bookmark']:
5774 5774 # translate -B options to -r so changesets get pushed
5775 5775 b = repo._bookmarks.expandname(b)
5776 5776 if b in repo._bookmarks:
5777 5777 opts.setdefault(b'rev', []).append(b)
5778 5778 else:
5779 5779 # if we try to push a deleted bookmark, translate it to null
5780 5780 # this lets simultaneous -r, -b options continue working
5781 5781 opts.setdefault(b'rev', []).append(b"null")
5782 5782
5783 5783 some_pushed = False
5784 5784 result = 0
5785 5785 for path in urlutil.get_push_paths(repo, ui, dests):
5786 5786 dest = path.loc
5787 5787 branches = (path.branch, opts.get(b'branch') or [])
5788 5788 ui.status(_(b'pushing to %s\n') % urlutil.hidepassword(dest))
5789 5789 revs, checkout = hg.addbranchrevs(
5790 5790 repo, repo, branches, opts.get(b'rev')
5791 5791 )
5792 5792 other = hg.peer(repo, opts, dest)
5793 5793
5794 5794 try:
5795 5795 if revs:
5796 5796 revs = [repo[r].node() for r in logcmdutil.revrange(repo, revs)]
5797 5797 if not revs:
5798 5798 raise error.InputError(
5799 5799 _(b"specified revisions evaluate to an empty set"),
5800 5800 hint=_(b"use different revision arguments"),
5801 5801 )
5802 5802 elif path.pushrev:
5803 5803 # It doesn't make any sense to specify ancestor revisions. So limit
5804 5804 # to DAG heads to make discovery simpler.
5805 5805 expr = revsetlang.formatspec(b'heads(%r)', path.pushrev)
5806 5806 revs = scmutil.revrange(repo, [expr])
5807 5807 revs = [repo[rev].node() for rev in revs]
5808 5808 if not revs:
5809 5809 raise error.InputError(
5810 5810 _(
5811 5811 b'default push revset for path evaluates to an empty set'
5812 5812 )
5813 5813 )
5814 5814 elif ui.configbool(b'commands', b'push.require-revs'):
5815 5815 raise error.InputError(
5816 5816 _(b'no revisions specified to push'),
5817 5817 hint=_(b'did you mean "hg push -r ."?'),
5818 5818 )
5819 5819
5820 5820 repo._subtoppath = dest
5821 5821 try:
5822 5822 # push subrepos depth-first for coherent ordering
5823 5823 c = repo[b'.']
5824 5824 subs = c.substate # only repos that are committed
5825 5825 for s in sorted(subs):
5826 5826 sub_result = c.sub(s).push(opts)
5827 5827 if sub_result == 0:
5828 5828 return 1
5829 5829 finally:
5830 5830 del repo._subtoppath
5831 5831
5832 5832 opargs = dict(
5833 5833 opts.get(b'opargs', {})
5834 5834 ) # copy opargs since we may mutate it
5835 5835 opargs.setdefault(b'pushvars', []).extend(opts.get(b'pushvars', []))
5836 5836
5837 5837 pushop = exchange.push(
5838 5838 repo,
5839 5839 other,
5840 5840 opts.get(b'force'),
5841 5841 revs=revs,
5842 5842 newbranch=opts.get(b'new_branch'),
5843 5843 bookmarks=opts.get(b'bookmark', ()),
5844 5844 publish=opts.get(b'publish'),
5845 5845 opargs=opargs,
5846 5846 )
5847 5847
5848 5848 if pushop.cgresult == 0:
5849 5849 result = 1
5850 5850 elif pushop.cgresult is not None:
5851 5851 some_pushed = True
5852 5852
5853 5853 if pushop.bkresult is not None:
5854 5854 if pushop.bkresult == 2:
5855 5855 result = 2
5856 5856 elif not result and pushop.bkresult:
5857 5857 result = 2
5858 5858
5859 5859 if result:
5860 5860 break
5861 5861
5862 5862 finally:
5863 5863 other.close()
5864 5864 if result == 0 and not some_pushed:
5865 5865 result = 1
5866 5866 return result
5867 5867
5868 5868
5869 5869 @command(
5870 5870 b'recover',
5871 5871 [
5872 5872 (b'', b'verify', False, b"run `hg verify` after successful recover"),
5873 5873 ],
5874 5874 helpcategory=command.CATEGORY_MAINTENANCE,
5875 5875 )
5876 5876 def recover(ui, repo, **opts):
5877 5877 """roll back an interrupted transaction
5878 5878
5879 5879 Recover from an interrupted commit or pull.
5880 5880
5881 5881 This command tries to fix the repository status after an
5882 5882 interrupted operation. It should only be necessary when Mercurial
5883 5883 suggests it.
5884 5884
5885 5885 Returns 0 if successful, 1 if nothing to recover or verify fails.
5886 5886 """
5887 5887 ret = repo.recover()
5888 5888 if ret:
5889 5889 if opts['verify']:
5890 5890 return hg.verify(repo)
5891 5891 else:
5892 5892 msg = _(
5893 5893 b"(verify step skipped, run `hg verify` to check your "
5894 5894 b"repository content)\n"
5895 5895 )
5896 5896 ui.warn(msg)
5897 5897 return 0
5898 5898 return 1
5899 5899
5900 5900
5901 5901 @command(
5902 5902 b'remove|rm',
5903 5903 [
5904 5904 (b'A', b'after', None, _(b'record delete for missing files')),
5905 5905 (b'f', b'force', None, _(b'forget added files, delete modified files')),
5906 5906 ]
5907 5907 + subrepoopts
5908 5908 + walkopts
5909 5909 + dryrunopts,
5910 5910 _(b'[OPTION]... FILE...'),
5911 5911 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5912 5912 helpbasic=True,
5913 5913 inferrepo=True,
5914 5914 )
5915 5915 def remove(ui, repo, *pats, **opts):
5916 5916 """remove the specified files on the next commit
5917 5917
5918 5918 Schedule the indicated files for removal from the current branch.
5919 5919
5920 5920 This command schedules the files to be removed at the next commit.
5921 5921 To undo a remove before that, see :hg:`revert`. To undo added
5922 5922 files, see :hg:`forget`.
5923 5923
5924 5924 .. container:: verbose
5925 5925
5926 5926 -A/--after can be used to remove only files that have already
5927 5927 been deleted, -f/--force can be used to force deletion, and -Af
5928 5928 can be used to remove files from the next revision without
5929 5929 deleting them from the working directory.
5930 5930
5931 5931 The following table details the behavior of remove for different
5932 5932 file states (columns) and option combinations (rows). The file
5933 5933 states are Added [A], Clean [C], Modified [M] and Missing [!]
5934 5934 (as reported by :hg:`status`). The actions are Warn, Remove
5935 5935 (from branch) and Delete (from disk):
5936 5936
5937 5937 ========= == == == ==
5938 5938 opt/state A C M !
5939 5939 ========= == == == ==
5940 5940 none W RD W R
5941 5941 -f R RD RD R
5942 5942 -A W W W R
5943 5943 -Af R R R R
5944 5944 ========= == == == ==
5945 5945
5946 5946 .. note::
5947 5947
5948 5948 :hg:`remove` never deletes files in Added [A] state from the
5949 5949 working directory, not even if ``--force`` is specified.
5950 5950
5951 5951 Returns 0 on success, 1 if any warnings encountered.
5952 5952 """
5953 5953
5954 5954 opts = pycompat.byteskwargs(opts)
5955 5955 after, force = opts.get(b'after'), opts.get(b'force')
5956 5956 dryrun = opts.get(b'dry_run')
5957 5957 if not pats and not after:
5958 5958 raise error.InputError(_(b'no files specified'))
5959 5959
5960 5960 with repo.wlock(), repo.dirstate.changing_files(repo):
5961 5961 m = scmutil.match(repo[None], pats, opts)
5962 5962 subrepos = opts.get(b'subrepos')
5963 5963 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
5964 5964 return cmdutil.remove(
5965 5965 ui, repo, m, b"", uipathfn, after, force, subrepos, dryrun=dryrun
5966 5966 )
5967 5967
5968 5968
5969 5969 @command(
5970 5970 b'rename|move|mv',
5971 5971 [
5972 5972 (b'', b'forget', None, _(b'unmark a destination file as renamed')),
5973 5973 (b'A', b'after', None, _(b'record a rename that has already occurred')),
5974 5974 (
5975 5975 b'',
5976 5976 b'at-rev',
5977 5977 b'',
5978 5978 _(b'(un)mark renames in the given revision (EXPERIMENTAL)'),
5979 5979 _(b'REV'),
5980 5980 ),
5981 5981 (
5982 5982 b'f',
5983 5983 b'force',
5984 5984 None,
5985 5985 _(b'forcibly move over an existing managed file'),
5986 5986 ),
5987 5987 ]
5988 5988 + walkopts
5989 5989 + dryrunopts,
5990 5990 _(b'[OPTION]... SOURCE... DEST'),
5991 5991 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5992 5992 )
5993 5993 def rename(ui, repo, *pats, **opts):
5994 5994 """rename files; equivalent of copy + remove
5995 5995
5996 5996 Mark dest as copies of sources; mark sources for deletion. If dest
5997 5997 is a directory, copies are put in that directory. If dest is a
5998 5998 file, there can only be one source.
5999 5999
6000 6000 By default, this command copies the contents of files as they
6001 6001 exist in the working directory. If invoked with -A/--after, the
6002 6002 operation is recorded, but no copying is performed.
6003 6003
6004 6004 To undo marking a destination file as renamed, use --forget. With that
6005 6005 option, all given (positional) arguments are unmarked as renames. The
6006 6006 destination file(s) will be left in place (still tracked). The source
6007 6007 file(s) will not be restored. Note that :hg:`rename --forget` behaves
6008 6008 the same way as :hg:`copy --forget`.
6009 6009
6010 6010 This command takes effect with the next commit by default.
6011 6011
6012 6012 Returns 0 on success, 1 if errors are encountered.
6013 6013 """
6014 6014 opts = pycompat.byteskwargs(opts)
6015 context = repo.dirstate.changing_files
6015 context = lambda repo: repo.dirstate.changing_files(repo)
6016 6016 rev = opts.get(b'at_rev')
6017 6017 ctx = None
6018 6018 if rev:
6019 6019 ctx = logcmdutil.revsingle(repo, rev)
6020 6020 if ctx.rev() is not None:
6021 6021
6022 6022 def context(repo):
6023 6023 return util.nullcontextmanager()
6024 6024
6025 6025 opts[b'at_rev'] = ctx.rev()
6026 6026 with repo.wlock(), context(repo):
6027 6027 return cmdutil.copy(ui, repo, pats, opts, rename=True)
6028 6028
6029 6029
6030 6030 @command(
6031 6031 b'resolve',
6032 6032 [
6033 6033 (b'a', b'all', None, _(b'select all unresolved files')),
6034 6034 (b'l', b'list', None, _(b'list state of files needing merge')),
6035 6035 (b'm', b'mark', None, _(b'mark files as resolved')),
6036 6036 (b'u', b'unmark', None, _(b'mark files as unresolved')),
6037 6037 (b'n', b'no-status', None, _(b'hide status prefix')),
6038 6038 (b'', b're-merge', None, _(b're-merge files')),
6039 6039 ]
6040 6040 + mergetoolopts
6041 6041 + walkopts
6042 6042 + formatteropts,
6043 6043 _(b'[OPTION]... [FILE]...'),
6044 6044 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6045 6045 inferrepo=True,
6046 6046 )
6047 6047 def resolve(ui, repo, *pats, **opts):
6048 6048 """redo merges or set/view the merge status of files
6049 6049
6050 6050 Merges with unresolved conflicts are often the result of
6051 6051 non-interactive merging using the ``internal:merge`` configuration
6052 6052 setting, or a command-line merge tool like ``diff3``. The resolve
6053 6053 command is used to manage the files involved in a merge, after
6054 6054 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
6055 6055 working directory must have two parents). See :hg:`help
6056 6056 merge-tools` for information on configuring merge tools.
6057 6057
6058 6058 The resolve command can be used in the following ways:
6059 6059
6060 6060 - :hg:`resolve [--re-merge] [--tool TOOL] FILE...`: attempt to re-merge
6061 6061 the specified files, discarding any previous merge attempts. Re-merging
6062 6062 is not performed for files already marked as resolved. Use ``--all/-a``
6063 6063 to select all unresolved files. ``--tool`` can be used to specify
6064 6064 the merge tool used for the given files. It overrides the HGMERGE
6065 6065 environment variable and your configuration files. Previous file
6066 6066 contents are saved with a ``.orig`` suffix.
6067 6067
6068 6068 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
6069 6069 (e.g. after having manually fixed-up the files). The default is
6070 6070 to mark all unresolved files.
6071 6071
6072 6072 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
6073 6073 default is to mark all resolved files.
6074 6074
6075 6075 - :hg:`resolve -l`: list files which had or still have conflicts.
6076 6076 In the printed list, ``U`` = unresolved and ``R`` = resolved.
6077 6077 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
6078 6078 the list. See :hg:`help filesets` for details.
6079 6079
6080 6080 .. note::
6081 6081
6082 6082 Mercurial will not let you commit files with unresolved merge
6083 6083 conflicts. You must use :hg:`resolve -m ...` before you can
6084 6084 commit after a conflicting merge.
6085 6085
6086 6086 .. container:: verbose
6087 6087
6088 6088 Template:
6089 6089
6090 6090 The following keywords are supported in addition to the common template
6091 6091 keywords and functions. See also :hg:`help templates`.
6092 6092
6093 6093 :mergestatus: String. Character denoting merge conflicts, ``U`` or ``R``.
6094 6094 :path: String. Repository-absolute path of the file.
6095 6095
6096 6096 Returns 0 on success, 1 if any files fail a resolve attempt.
6097 6097 """
6098 6098
6099 6099 opts = pycompat.byteskwargs(opts)
6100 6100 confirm = ui.configbool(b'commands', b'resolve.confirm')
6101 6101 flaglist = b'all mark unmark list no_status re_merge'.split()
6102 6102 all, mark, unmark, show, nostatus, remerge = [opts.get(o) for o in flaglist]
6103 6103
6104 6104 actioncount = len(list(filter(None, [show, mark, unmark, remerge])))
6105 6105 if actioncount > 1:
6106 6106 raise error.InputError(_(b"too many actions specified"))
6107 6107 elif actioncount == 0 and ui.configbool(
6108 6108 b'commands', b'resolve.explicit-re-merge'
6109 6109 ):
6110 6110 hint = _(b'use --mark, --unmark, --list or --re-merge')
6111 6111 raise error.InputError(_(b'no action specified'), hint=hint)
6112 6112 if pats and all:
6113 6113 raise error.InputError(_(b"can't specify --all and patterns"))
6114 6114 if not (all or pats or show or mark or unmark):
6115 6115 raise error.InputError(
6116 6116 _(b'no files or directories specified'),
6117 6117 hint=b'use --all to re-merge all unresolved files',
6118 6118 )
6119 6119
6120 6120 if confirm:
6121 6121 if all:
6122 6122 if ui.promptchoice(
6123 6123 _(b're-merge all unresolved files (yn)?$$ &Yes $$ &No')
6124 6124 ):
6125 6125 raise error.CanceledError(_(b'user quit'))
6126 6126 if mark and not pats:
6127 6127 if ui.promptchoice(
6128 6128 _(
6129 6129 b'mark all unresolved files as resolved (yn)?'
6130 6130 b'$$ &Yes $$ &No'
6131 6131 )
6132 6132 ):
6133 6133 raise error.CanceledError(_(b'user quit'))
6134 6134 if unmark and not pats:
6135 6135 if ui.promptchoice(
6136 6136 _(
6137 6137 b'mark all resolved files as unresolved (yn)?'
6138 6138 b'$$ &Yes $$ &No'
6139 6139 )
6140 6140 ):
6141 6141 raise error.CanceledError(_(b'user quit'))
6142 6142
6143 6143 uipathfn = scmutil.getuipathfn(repo)
6144 6144
6145 6145 if show:
6146 6146 ui.pager(b'resolve')
6147 6147 fm = ui.formatter(b'resolve', opts)
6148 6148 ms = mergestatemod.mergestate.read(repo)
6149 6149 wctx = repo[None]
6150 6150 m = scmutil.match(wctx, pats, opts)
6151 6151
6152 6152 # Labels and keys based on merge state. Unresolved path conflicts show
6153 6153 # as 'P'. Resolved path conflicts show as 'R', the same as normal
6154 6154 # resolved conflicts.
6155 6155 mergestateinfo = {
6156 6156 mergestatemod.MERGE_RECORD_UNRESOLVED: (
6157 6157 b'resolve.unresolved',
6158 6158 b'U',
6159 6159 ),
6160 6160 mergestatemod.MERGE_RECORD_RESOLVED: (b'resolve.resolved', b'R'),
6161 6161 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH: (
6162 6162 b'resolve.unresolved',
6163 6163 b'P',
6164 6164 ),
6165 6165 mergestatemod.MERGE_RECORD_RESOLVED_PATH: (
6166 6166 b'resolve.resolved',
6167 6167 b'R',
6168 6168 ),
6169 6169 }
6170 6170
6171 6171 for f in ms:
6172 6172 if not m(f):
6173 6173 continue
6174 6174
6175 6175 label, key = mergestateinfo[ms[f]]
6176 6176 fm.startitem()
6177 6177 fm.context(ctx=wctx)
6178 6178 fm.condwrite(not nostatus, b'mergestatus', b'%s ', key, label=label)
6179 6179 fm.data(path=f)
6180 6180 fm.plain(b'%s\n' % uipathfn(f), label=label)
6181 6181 fm.end()
6182 6182 return 0
6183 6183
6184 6184 with repo.wlock():
6185 6185 ms = mergestatemod.mergestate.read(repo)
6186 6186
6187 6187 if not (ms.active() or repo.dirstate.p2() != repo.nullid):
6188 6188 raise error.StateError(
6189 6189 _(b'resolve command not applicable when not merging')
6190 6190 )
6191 6191
6192 6192 wctx = repo[None]
6193 6193 m = scmutil.match(wctx, pats, opts)
6194 6194 ret = 0
6195 6195 didwork = False
6196 6196
6197 6197 hasconflictmarkers = []
6198 6198 if mark:
6199 6199 markcheck = ui.config(b'commands', b'resolve.mark-check')
6200 6200 if markcheck not in [b'warn', b'abort']:
6201 6201 # Treat all invalid / unrecognized values as 'none'.
6202 6202 markcheck = False
6203 6203 for f in ms:
6204 6204 if not m(f):
6205 6205 continue
6206 6206
6207 6207 didwork = True
6208 6208
6209 6209 # path conflicts must be resolved manually
6210 6210 if ms[f] in (
6211 6211 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
6212 6212 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
6213 6213 ):
6214 6214 if mark:
6215 6215 ms.mark(f, mergestatemod.MERGE_RECORD_RESOLVED_PATH)
6216 6216 elif unmark:
6217 6217 ms.mark(f, mergestatemod.MERGE_RECORD_UNRESOLVED_PATH)
6218 6218 elif ms[f] == mergestatemod.MERGE_RECORD_UNRESOLVED_PATH:
6219 6219 ui.warn(
6220 6220 _(b'%s: path conflict must be resolved manually\n')
6221 6221 % uipathfn(f)
6222 6222 )
6223 6223 continue
6224 6224
6225 6225 if mark:
6226 6226 if markcheck:
6227 6227 fdata = repo.wvfs.tryread(f)
6228 6228 if (
6229 6229 filemerge.hasconflictmarkers(fdata)
6230 6230 and ms[f] != mergestatemod.MERGE_RECORD_RESOLVED
6231 6231 ):
6232 6232 hasconflictmarkers.append(f)
6233 6233 ms.mark(f, mergestatemod.MERGE_RECORD_RESOLVED)
6234 6234 elif unmark:
6235 6235 ms.mark(f, mergestatemod.MERGE_RECORD_UNRESOLVED)
6236 6236 else:
6237 6237 # backup pre-resolve (merge uses .orig for its own purposes)
6238 6238 a = repo.wjoin(f)
6239 6239 try:
6240 6240 util.copyfile(a, a + b".resolve")
6241 6241 except FileNotFoundError:
6242 6242 pass
6243 6243
6244 6244 try:
6245 6245 # preresolve file
6246 6246 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
6247 6247 with ui.configoverride(overrides, b'resolve'):
6248 6248 r = ms.resolve(f, wctx)
6249 6249 if r:
6250 6250 ret = 1
6251 6251 finally:
6252 6252 ms.commit()
6253 6253
6254 6254 # replace filemerge's .orig file with our resolve file
6255 6255 try:
6256 6256 util.rename(
6257 6257 a + b".resolve", scmutil.backuppath(ui, repo, f)
6258 6258 )
6259 6259 except FileNotFoundError:
6260 6260 pass
6261 6261
6262 6262 if hasconflictmarkers:
6263 6263 ui.warn(
6264 6264 _(
6265 6265 b'warning: the following files still have conflict '
6266 6266 b'markers:\n'
6267 6267 )
6268 6268 + b''.join(
6269 6269 b' ' + uipathfn(f) + b'\n' for f in hasconflictmarkers
6270 6270 )
6271 6271 )
6272 6272 if markcheck == b'abort' and not all and not pats:
6273 6273 raise error.StateError(
6274 6274 _(b'conflict markers detected'),
6275 6275 hint=_(b'use --all to mark anyway'),
6276 6276 )
6277 6277
6278 6278 ms.commit()
6279 6279 branchmerge = repo.dirstate.p2() != repo.nullid
6280 6280 # resolve is not doing a parent change here, however, `record updates`
6281 6281 # will call some dirstate API that at intended for parent changes call.
6282 6282 # Ideally we would not need this and could implement a lighter version
6283 6283 # of the recordupdateslogic that will not have to deal with the part
6284 6284 # related to parent changes. However this would requires that:
6285 6285 # - we are sure we passed around enough information at update/merge
6286 6286 # time to no longer needs it at `hg resolve time`
6287 6287 # - we are sure we store that information well enough to be able to reuse it
6288 6288 # - we are the necessary logic to reuse it right.
6289 6289 #
6290 6290 # All this should eventually happens, but in the mean time, we use this
6291 6291 # context manager slightly out of the context it should be.
6292 6292 with repo.dirstate.changing_parents(repo):
6293 6293 mergestatemod.recordupdates(repo, ms.actions(), branchmerge, None)
6294 6294
6295 6295 if not didwork and pats:
6296 6296 hint = None
6297 6297 if not any([p for p in pats if p.find(b':') >= 0]):
6298 6298 pats = [b'path:%s' % p for p in pats]
6299 6299 m = scmutil.match(wctx, pats, opts)
6300 6300 for f in ms:
6301 6301 if not m(f):
6302 6302 continue
6303 6303
6304 6304 def flag(o):
6305 6305 if o == b're_merge':
6306 6306 return b'--re-merge '
6307 6307 return b'-%s ' % o[0:1]
6308 6308
6309 6309 flags = b''.join([flag(o) for o in flaglist if opts.get(o)])
6310 6310 hint = _(b"(try: hg resolve %s%s)\n") % (
6311 6311 flags,
6312 6312 b' '.join(pats),
6313 6313 )
6314 6314 break
6315 6315 ui.warn(_(b"arguments do not match paths that need resolving\n"))
6316 6316 if hint:
6317 6317 ui.warn(hint)
6318 6318
6319 6319 unresolvedf = ms.unresolvedcount()
6320 6320 if not unresolvedf:
6321 6321 ui.status(_(b'(no more unresolved files)\n'))
6322 6322 cmdutil.checkafterresolved(repo)
6323 6323
6324 6324 return ret
6325 6325
6326 6326
6327 6327 @command(
6328 6328 b'revert',
6329 6329 [
6330 6330 (b'a', b'all', None, _(b'revert all changes when no arguments given')),
6331 6331 (b'd', b'date', b'', _(b'tipmost revision matching date'), _(b'DATE')),
6332 6332 (b'r', b'rev', b'', _(b'revert to the specified revision'), _(b'REV')),
6333 6333 (b'C', b'no-backup', None, _(b'do not save backup copies of files')),
6334 6334 (b'i', b'interactive', None, _(b'interactively select the changes')),
6335 6335 ]
6336 6336 + walkopts
6337 6337 + dryrunopts,
6338 6338 _(b'[OPTION]... [-r REV] [NAME]...'),
6339 6339 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6340 6340 )
6341 6341 def revert(ui, repo, *pats, **opts):
6342 6342 """restore files to their checkout state
6343 6343
6344 6344 .. note::
6345 6345
6346 6346 To check out earlier revisions, you should use :hg:`update REV`.
6347 6347 To cancel an uncommitted merge (and lose your changes),
6348 6348 use :hg:`merge --abort`.
6349 6349
6350 6350 With no revision specified, revert the specified files or directories
6351 6351 to the contents they had in the parent of the working directory.
6352 6352 This restores the contents of files to an unmodified
6353 6353 state and unschedules adds, removes, copies, and renames. If the
6354 6354 working directory has two parents, you must explicitly specify a
6355 6355 revision.
6356 6356
6357 6357 Using the -r/--rev or -d/--date options, revert the given files or
6358 6358 directories to their states as of a specific revision. Because
6359 6359 revert does not change the working directory parents, this will
6360 6360 cause these files to appear modified. This can be helpful to "back
6361 6361 out" some or all of an earlier change. See :hg:`backout` for a
6362 6362 related method.
6363 6363
6364 6364 Modified files are saved with a .orig suffix before reverting.
6365 6365 To disable these backups, use --no-backup. It is possible to store
6366 6366 the backup files in a custom directory relative to the root of the
6367 6367 repository by setting the ``ui.origbackuppath`` configuration
6368 6368 option.
6369 6369
6370 6370 See :hg:`help dates` for a list of formats valid for -d/--date.
6371 6371
6372 6372 See :hg:`help backout` for a way to reverse the effect of an
6373 6373 earlier changeset.
6374 6374
6375 6375 Returns 0 on success.
6376 6376 """
6377 6377
6378 6378 opts = pycompat.byteskwargs(opts)
6379 6379 if opts.get(b"date"):
6380 6380 cmdutil.check_incompatible_arguments(opts, b'date', [b'rev'])
6381 6381 opts[b"rev"] = cmdutil.finddate(ui, repo, opts[b"date"])
6382 6382
6383 6383 parent, p2 = repo.dirstate.parents()
6384 6384 if not opts.get(b'rev') and p2 != repo.nullid:
6385 6385 # revert after merge is a trap for new users (issue2915)
6386 6386 raise error.InputError(
6387 6387 _(b'uncommitted merge with no revision specified'),
6388 6388 hint=_(b"use 'hg update' or see 'hg help revert'"),
6389 6389 )
6390 6390
6391 6391 rev = opts.get(b'rev')
6392 6392 if rev:
6393 6393 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
6394 6394 ctx = logcmdutil.revsingle(repo, rev)
6395 6395
6396 6396 if not (
6397 6397 pats
6398 6398 or opts.get(b'include')
6399 6399 or opts.get(b'exclude')
6400 6400 or opts.get(b'all')
6401 6401 or opts.get(b'interactive')
6402 6402 ):
6403 6403 msg = _(b"no files or directories specified")
6404 6404 if p2 != repo.nullid:
6405 6405 hint = _(
6406 6406 b"uncommitted merge, use --all to discard all changes,"
6407 6407 b" or 'hg update -C .' to abort the merge"
6408 6408 )
6409 6409 raise error.InputError(msg, hint=hint)
6410 6410 dirty = any(repo.status())
6411 6411 node = ctx.node()
6412 6412 if node != parent:
6413 6413 if dirty:
6414 6414 hint = (
6415 6415 _(
6416 6416 b"uncommitted changes, use --all to discard all"
6417 6417 b" changes, or 'hg update %d' to update"
6418 6418 )
6419 6419 % ctx.rev()
6420 6420 )
6421 6421 else:
6422 6422 hint = (
6423 6423 _(
6424 6424 b"use --all to revert all files,"
6425 6425 b" or 'hg update %d' to update"
6426 6426 )
6427 6427 % ctx.rev()
6428 6428 )
6429 6429 elif dirty:
6430 6430 hint = _(b"uncommitted changes, use --all to discard all changes")
6431 6431 else:
6432 6432 hint = _(b"use --all to revert all files")
6433 6433 raise error.InputError(msg, hint=hint)
6434 6434
6435 6435 return cmdutil.revert(ui, repo, ctx, *pats, **pycompat.strkwargs(opts))
6436 6436
6437 6437
6438 6438 @command(
6439 6439 b'rollback',
6440 6440 dryrunopts + [(b'f', b'force', False, _(b'ignore safety measures'))],
6441 6441 helpcategory=command.CATEGORY_MAINTENANCE,
6442 6442 )
6443 6443 def rollback(ui, repo, **opts):
6444 6444 """roll back the last transaction (DANGEROUS) (DEPRECATED)
6445 6445
6446 6446 Please use :hg:`commit --amend` instead of rollback to correct
6447 6447 mistakes in the last commit.
6448 6448
6449 6449 This command should be used with care. There is only one level of
6450 6450 rollback, and there is no way to undo a rollback. It will also
6451 6451 restore the dirstate at the time of the last transaction, losing
6452 6452 any dirstate changes since that time. This command does not alter
6453 6453 the working directory.
6454 6454
6455 6455 Transactions are used to encapsulate the effects of all commands
6456 6456 that create new changesets or propagate existing changesets into a
6457 6457 repository.
6458 6458
6459 6459 .. container:: verbose
6460 6460
6461 6461 For example, the following commands are transactional, and their
6462 6462 effects can be rolled back:
6463 6463
6464 6464 - commit
6465 6465 - import
6466 6466 - pull
6467 6467 - push (with this repository as the destination)
6468 6468 - unbundle
6469 6469
6470 6470 To avoid permanent data loss, rollback will refuse to rollback a
6471 6471 commit transaction if it isn't checked out. Use --force to
6472 6472 override this protection.
6473 6473
6474 6474 The rollback command can be entirely disabled by setting the
6475 6475 ``ui.rollback`` configuration setting to false. If you're here
6476 6476 because you want to use rollback and it's disabled, you can
6477 6477 re-enable the command by setting ``ui.rollback`` to true.
6478 6478
6479 6479 This command is not intended for use on public repositories. Once
6480 6480 changes are visible for pull by other users, rolling a transaction
6481 6481 back locally is ineffective (someone else may already have pulled
6482 6482 the changes). Furthermore, a race is possible with readers of the
6483 6483 repository; for example an in-progress pull from the repository
6484 6484 may fail if a rollback is performed.
6485 6485
6486 6486 Returns 0 on success, 1 if no rollback data is available.
6487 6487 """
6488 6488 if not ui.configbool(b'ui', b'rollback'):
6489 6489 raise error.Abort(
6490 6490 _(b'rollback is disabled because it is unsafe'),
6491 6491 hint=b'see `hg help -v rollback` for information',
6492 6492 )
6493 6493 return repo.rollback(dryrun=opts.get('dry_run'), force=opts.get('force'))
6494 6494
6495 6495
6496 6496 @command(
6497 6497 b'root',
6498 6498 [] + formatteropts,
6499 6499 intents={INTENT_READONLY},
6500 6500 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6501 6501 )
6502 6502 def root(ui, repo, **opts):
6503 6503 """print the root (top) of the current working directory
6504 6504
6505 6505 Print the root directory of the current repository.
6506 6506
6507 6507 .. container:: verbose
6508 6508
6509 6509 Template:
6510 6510
6511 6511 The following keywords are supported in addition to the common template
6512 6512 keywords and functions. See also :hg:`help templates`.
6513 6513
6514 6514 :hgpath: String. Path to the .hg directory.
6515 6515 :storepath: String. Path to the directory holding versioned data.
6516 6516
6517 6517 Returns 0 on success.
6518 6518 """
6519 6519 opts = pycompat.byteskwargs(opts)
6520 6520 with ui.formatter(b'root', opts) as fm:
6521 6521 fm.startitem()
6522 6522 fm.write(b'reporoot', b'%s\n', repo.root)
6523 6523 fm.data(hgpath=repo.path, storepath=repo.spath)
6524 6524
6525 6525
6526 6526 @command(
6527 6527 b'serve',
6528 6528 [
6529 6529 (
6530 6530 b'A',
6531 6531 b'accesslog',
6532 6532 b'',
6533 6533 _(b'name of access log file to write to'),
6534 6534 _(b'FILE'),
6535 6535 ),
6536 6536 (b'd', b'daemon', None, _(b'run server in background')),
6537 6537 (b'', b'daemon-postexec', [], _(b'used internally by daemon mode')),
6538 6538 (
6539 6539 b'E',
6540 6540 b'errorlog',
6541 6541 b'',
6542 6542 _(b'name of error log file to write to'),
6543 6543 _(b'FILE'),
6544 6544 ),
6545 6545 # use string type, then we can check if something was passed
6546 6546 (
6547 6547 b'p',
6548 6548 b'port',
6549 6549 b'',
6550 6550 _(b'port to listen on (default: 8000)'),
6551 6551 _(b'PORT'),
6552 6552 ),
6553 6553 (
6554 6554 b'a',
6555 6555 b'address',
6556 6556 b'',
6557 6557 _(b'address to listen on (default: all interfaces)'),
6558 6558 _(b'ADDR'),
6559 6559 ),
6560 6560 (
6561 6561 b'',
6562 6562 b'prefix',
6563 6563 b'',
6564 6564 _(b'prefix path to serve from (default: server root)'),
6565 6565 _(b'PREFIX'),
6566 6566 ),
6567 6567 (
6568 6568 b'n',
6569 6569 b'name',
6570 6570 b'',
6571 6571 _(b'name to show in web pages (default: working directory)'),
6572 6572 _(b'NAME'),
6573 6573 ),
6574 6574 (
6575 6575 b'',
6576 6576 b'web-conf',
6577 6577 b'',
6578 6578 _(b"name of the hgweb config file (see 'hg help hgweb')"),
6579 6579 _(b'FILE'),
6580 6580 ),
6581 6581 (
6582 6582 b'',
6583 6583 b'webdir-conf',
6584 6584 b'',
6585 6585 _(b'name of the hgweb config file (DEPRECATED)'),
6586 6586 _(b'FILE'),
6587 6587 ),
6588 6588 (
6589 6589 b'',
6590 6590 b'pid-file',
6591 6591 b'',
6592 6592 _(b'name of file to write process ID to'),
6593 6593 _(b'FILE'),
6594 6594 ),
6595 6595 (b'', b'stdio', None, _(b'for remote clients (ADVANCED)')),
6596 6596 (
6597 6597 b'',
6598 6598 b'cmdserver',
6599 6599 b'',
6600 6600 _(b'for remote clients (ADVANCED)'),
6601 6601 _(b'MODE'),
6602 6602 ),
6603 6603 (b't', b'templates', b'', _(b'web templates to use'), _(b'TEMPLATE')),
6604 6604 (b'', b'style', b'', _(b'template style to use'), _(b'STYLE')),
6605 6605 (b'6', b'ipv6', None, _(b'use IPv6 in addition to IPv4')),
6606 6606 (b'', b'certificate', b'', _(b'SSL certificate file'), _(b'FILE')),
6607 6607 (b'', b'print-url', None, _(b'start and print only the URL')),
6608 6608 ]
6609 6609 + subrepoopts,
6610 6610 _(b'[OPTION]...'),
6611 6611 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
6612 6612 helpbasic=True,
6613 6613 optionalrepo=True,
6614 6614 )
6615 6615 def serve(ui, repo, **opts):
6616 6616 """start stand-alone webserver
6617 6617
6618 6618 Start a local HTTP repository browser and pull server. You can use
6619 6619 this for ad-hoc sharing and browsing of repositories. It is
6620 6620 recommended to use a real web server to serve a repository for
6621 6621 longer periods of time.
6622 6622
6623 6623 Please note that the server does not implement access control.
6624 6624 This means that, by default, anybody can read from the server and
6625 6625 nobody can write to it by default. Set the ``web.allow-push``
6626 6626 option to ``*`` to allow everybody to push to the server. You
6627 6627 should use a real web server if you need to authenticate users.
6628 6628
6629 6629 By default, the server logs accesses to stdout and errors to
6630 6630 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
6631 6631 files.
6632 6632
6633 6633 To have the server choose a free port number to listen on, specify
6634 6634 a port number of 0; in this case, the server will print the port
6635 6635 number it uses.
6636 6636
6637 6637 Returns 0 on success.
6638 6638 """
6639 6639
6640 6640 cmdutil.check_incompatible_arguments(opts, 'stdio', ['cmdserver'])
6641 6641 opts = pycompat.byteskwargs(opts)
6642 6642 if opts[b"print_url"] and ui.verbose:
6643 6643 raise error.InputError(_(b"cannot use --print-url with --verbose"))
6644 6644
6645 6645 if opts[b"stdio"]:
6646 6646 if repo is None:
6647 6647 raise error.RepoError(
6648 6648 _(b"there is no Mercurial repository here (.hg not found)")
6649 6649 )
6650 6650 s = wireprotoserver.sshserver(ui, repo)
6651 6651 s.serve_forever()
6652 6652 return
6653 6653
6654 6654 service = server.createservice(ui, repo, opts)
6655 6655 return server.runservice(opts, initfn=service.init, runfn=service.run)
6656 6656
6657 6657
6658 6658 @command(
6659 6659 b'shelve',
6660 6660 [
6661 6661 (
6662 6662 b'A',
6663 6663 b'addremove',
6664 6664 None,
6665 6665 _(b'mark new/missing files as added/removed before shelving'),
6666 6666 ),
6667 6667 (b'u', b'unknown', None, _(b'store unknown files in the shelve')),
6668 6668 (b'', b'cleanup', None, _(b'delete all shelved changes')),
6669 6669 (
6670 6670 b'',
6671 6671 b'date',
6672 6672 b'',
6673 6673 _(b'shelve with the specified commit date'),
6674 6674 _(b'DATE'),
6675 6675 ),
6676 6676 (b'd', b'delete', None, _(b'delete the named shelved change(s)')),
6677 6677 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
6678 6678 (
6679 6679 b'k',
6680 6680 b'keep',
6681 6681 False,
6682 6682 _(b'shelve, but keep changes in the working directory'),
6683 6683 ),
6684 6684 (b'l', b'list', None, _(b'list current shelves')),
6685 6685 (b'm', b'message', b'', _(b'use text as shelve message'), _(b'TEXT')),
6686 6686 (
6687 6687 b'n',
6688 6688 b'name',
6689 6689 b'',
6690 6690 _(b'use the given name for the shelved commit'),
6691 6691 _(b'NAME'),
6692 6692 ),
6693 6693 (
6694 6694 b'p',
6695 6695 b'patch',
6696 6696 None,
6697 6697 _(
6698 6698 b'output patches for changes (provide the names of the shelved '
6699 6699 b'changes as positional arguments)'
6700 6700 ),
6701 6701 ),
6702 6702 (b'i', b'interactive', None, _(b'interactive mode')),
6703 6703 (
6704 6704 b'',
6705 6705 b'stat',
6706 6706 None,
6707 6707 _(
6708 6708 b'output diffstat-style summary of changes (provide the names of '
6709 6709 b'the shelved changes as positional arguments)'
6710 6710 ),
6711 6711 ),
6712 6712 ]
6713 6713 + cmdutil.walkopts,
6714 6714 _(b'hg shelve [OPTION]... [FILE]...'),
6715 6715 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6716 6716 )
6717 6717 def shelve(ui, repo, *pats, **opts):
6718 6718 """save and set aside changes from the working directory
6719 6719
6720 6720 Shelving takes files that "hg status" reports as not clean, saves
6721 6721 the modifications to a bundle (a shelved change), and reverts the
6722 6722 files so that their state in the working directory becomes clean.
6723 6723
6724 6724 To restore these changes to the working directory, using "hg
6725 6725 unshelve"; this will work even if you switch to a different
6726 6726 commit.
6727 6727
6728 6728 When no files are specified, "hg shelve" saves all not-clean
6729 6729 files. If specific files or directories are named, only changes to
6730 6730 those files are shelved.
6731 6731
6732 6732 In bare shelve (when no files are specified, without interactive,
6733 6733 include and exclude option), shelving remembers information if the
6734 6734 working directory was on newly created branch, in other words working
6735 6735 directory was on different branch than its first parent. In this
6736 6736 situation unshelving restores branch information to the working directory.
6737 6737
6738 6738 Each shelved change has a name that makes it easier to find later.
6739 6739 The name of a shelved change defaults to being based on the active
6740 6740 bookmark, or if there is no active bookmark, the current named
6741 6741 branch. To specify a different name, use ``--name``.
6742 6742
6743 6743 To see a list of existing shelved changes, use the ``--list``
6744 6744 option. For each shelved change, this will print its name, age,
6745 6745 and description; use ``--patch`` or ``--stat`` for more details.
6746 6746
6747 6747 To delete specific shelved changes, use ``--delete``. To delete
6748 6748 all shelved changes, use ``--cleanup``.
6749 6749 """
6750 6750 opts = pycompat.byteskwargs(opts)
6751 6751 allowables = [
6752 6752 (b'addremove', {b'create'}), # 'create' is pseudo action
6753 6753 (b'unknown', {b'create'}),
6754 6754 (b'cleanup', {b'cleanup'}),
6755 6755 # ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
6756 6756 (b'delete', {b'delete'}),
6757 6757 (b'edit', {b'create'}),
6758 6758 (b'keep', {b'create'}),
6759 6759 (b'list', {b'list'}),
6760 6760 (b'message', {b'create'}),
6761 6761 (b'name', {b'create'}),
6762 6762 (b'patch', {b'patch', b'list'}),
6763 6763 (b'stat', {b'stat', b'list'}),
6764 6764 ]
6765 6765
6766 6766 def checkopt(opt):
6767 6767 if opts.get(opt):
6768 6768 for i, allowable in allowables:
6769 6769 if opts[i] and opt not in allowable:
6770 6770 raise error.InputError(
6771 6771 _(
6772 6772 b"options '--%s' and '--%s' may not be "
6773 6773 b"used together"
6774 6774 )
6775 6775 % (opt, i)
6776 6776 )
6777 6777 return True
6778 6778
6779 6779 if checkopt(b'cleanup'):
6780 6780 if pats:
6781 6781 raise error.InputError(
6782 6782 _(b"cannot specify names when using '--cleanup'")
6783 6783 )
6784 6784 return shelvemod.cleanupcmd(ui, repo)
6785 6785 elif checkopt(b'delete'):
6786 6786 return shelvemod.deletecmd(ui, repo, pats)
6787 6787 elif checkopt(b'list'):
6788 6788 return shelvemod.listcmd(ui, repo, pats, opts)
6789 6789 elif checkopt(b'patch') or checkopt(b'stat'):
6790 6790 return shelvemod.patchcmds(ui, repo, pats, opts)
6791 6791 else:
6792 6792 return shelvemod.createcmd(ui, repo, pats, opts)
6793 6793
6794 6794
6795 6795 _NOTTERSE = b'nothing'
6796 6796
6797 6797
6798 6798 @command(
6799 6799 b'status|st',
6800 6800 [
6801 6801 (b'A', b'all', None, _(b'show status of all files')),
6802 6802 (b'm', b'modified', None, _(b'show only modified files')),
6803 6803 (b'a', b'added', None, _(b'show only added files')),
6804 6804 (b'r', b'removed', None, _(b'show only removed files')),
6805 6805 (b'd', b'deleted', None, _(b'show only missing files')),
6806 6806 (b'c', b'clean', None, _(b'show only files without changes')),
6807 6807 (b'u', b'unknown', None, _(b'show only unknown (not tracked) files')),
6808 6808 (b'i', b'ignored', None, _(b'show only ignored files')),
6809 6809 (b'n', b'no-status', None, _(b'hide status prefix')),
6810 6810 (b't', b'terse', _NOTTERSE, _(b'show the terse output (EXPERIMENTAL)')),
6811 6811 (
6812 6812 b'C',
6813 6813 b'copies',
6814 6814 None,
6815 6815 _(b'show source of copied files (DEFAULT: ui.statuscopies)'),
6816 6816 ),
6817 6817 (
6818 6818 b'0',
6819 6819 b'print0',
6820 6820 None,
6821 6821 _(b'end filenames with NUL, for use with xargs'),
6822 6822 ),
6823 6823 (b'', b'rev', [], _(b'show difference from revision'), _(b'REV')),
6824 6824 (
6825 6825 b'',
6826 6826 b'change',
6827 6827 b'',
6828 6828 _(b'list the changed files of a revision'),
6829 6829 _(b'REV'),
6830 6830 ),
6831 6831 ]
6832 6832 + walkopts
6833 6833 + subrepoopts
6834 6834 + formatteropts,
6835 6835 _(b'[OPTION]... [FILE]...'),
6836 6836 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6837 6837 helpbasic=True,
6838 6838 inferrepo=True,
6839 6839 intents={INTENT_READONLY},
6840 6840 )
6841 6841 def status(ui, repo, *pats, **opts):
6842 6842 """show changed files in the working directory
6843 6843
6844 6844 Show status of files in the repository. If names are given, only
6845 6845 files that match are shown. Files that are clean or ignored or
6846 6846 the source of a copy/move operation, are not listed unless
6847 6847 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
6848 6848 Unless options described with "show only ..." are given, the
6849 6849 options -mardu are used.
6850 6850
6851 6851 Option -q/--quiet hides untracked (unknown and ignored) files
6852 6852 unless explicitly requested with -u/--unknown or -i/--ignored.
6853 6853
6854 6854 .. note::
6855 6855
6856 6856 :hg:`status` may appear to disagree with diff if permissions have
6857 6857 changed or a merge has occurred. The standard diff format does
6858 6858 not report permission changes and diff only reports changes
6859 6859 relative to one merge parent.
6860 6860
6861 6861 If one revision is given, it is used as the base revision.
6862 6862 If two revisions are given, the differences between them are
6863 6863 shown. The --change option can also be used as a shortcut to list
6864 6864 the changed files of a revision from its first parent.
6865 6865
6866 6866 The codes used to show the status of files are::
6867 6867
6868 6868 M = modified
6869 6869 A = added
6870 6870 R = removed
6871 6871 C = clean
6872 6872 ! = missing (deleted by non-hg command, but still tracked)
6873 6873 ? = not tracked
6874 6874 I = ignored
6875 6875 = origin of the previous file (with --copies)
6876 6876
6877 6877 .. container:: verbose
6878 6878
6879 6879 The -t/--terse option abbreviates the output by showing only the directory
6880 6880 name if all the files in it share the same status. The option takes an
6881 6881 argument indicating the statuses to abbreviate: 'm' for 'modified', 'a'
6882 6882 for 'added', 'r' for 'removed', 'd' for 'deleted', 'u' for 'unknown', 'i'
6883 6883 for 'ignored' and 'c' for clean.
6884 6884
6885 6885 It abbreviates only those statuses which are passed. Note that clean and
6886 6886 ignored files are not displayed with '--terse ic' unless the -c/--clean
6887 6887 and -i/--ignored options are also used.
6888 6888
6889 6889 The -v/--verbose option shows information when the repository is in an
6890 6890 unfinished merge, shelve, rebase state etc. You can have this behavior
6891 6891 turned on by default by enabling the ``commands.status.verbose`` option.
6892 6892
6893 6893 You can skip displaying some of these states by setting
6894 6894 ``commands.status.skipstates`` to one or more of: 'bisect', 'graft',
6895 6895 'histedit', 'merge', 'rebase', or 'unshelve'.
6896 6896
6897 6897 Template:
6898 6898
6899 6899 The following keywords are supported in addition to the common template
6900 6900 keywords and functions. See also :hg:`help templates`.
6901 6901
6902 6902 :path: String. Repository-absolute path of the file.
6903 6903 :source: String. Repository-absolute path of the file originated from.
6904 6904 Available if ``--copies`` is specified.
6905 6905 :status: String. Character denoting file's status.
6906 6906
6907 6907 Examples:
6908 6908
6909 6909 - show changes in the working directory relative to a
6910 6910 changeset::
6911 6911
6912 6912 hg status --rev 9353
6913 6913
6914 6914 - show changes in the working directory relative to the
6915 6915 current directory (see :hg:`help patterns` for more information)::
6916 6916
6917 6917 hg status re:
6918 6918
6919 6919 - show all changes including copies in an existing changeset::
6920 6920
6921 6921 hg status --copies --change 9353
6922 6922
6923 6923 - get a NUL separated list of added files, suitable for xargs::
6924 6924
6925 6925 hg status -an0
6926 6926
6927 6927 - show more information about the repository status, abbreviating
6928 6928 added, removed, modified, deleted, and untracked paths::
6929 6929
6930 6930 hg status -v -t mardu
6931 6931
6932 6932 Returns 0 on success.
6933 6933
6934 6934 """
6935 6935
6936 6936 cmdutil.check_at_most_one_arg(opts, 'rev', 'change')
6937 6937 opts = pycompat.byteskwargs(opts)
6938 6938 revs = opts.get(b'rev', [])
6939 6939 change = opts.get(b'change', b'')
6940 6940 terse = opts.get(b'terse', _NOTTERSE)
6941 6941 if terse is _NOTTERSE:
6942 6942 if revs:
6943 6943 terse = b''
6944 6944 else:
6945 6945 terse = ui.config(b'commands', b'status.terse')
6946 6946
6947 6947 if revs and terse:
6948 6948 msg = _(b'cannot use --terse with --rev')
6949 6949 raise error.InputError(msg)
6950 6950 elif change:
6951 6951 repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
6952 6952 ctx2 = logcmdutil.revsingle(repo, change, None)
6953 6953 ctx1 = ctx2.p1()
6954 6954 else:
6955 6955 repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn')
6956 6956 ctx1, ctx2 = logcmdutil.revpair(repo, revs)
6957 6957
6958 6958 forcerelativevalue = None
6959 6959 if ui.hasconfig(b'commands', b'status.relative'):
6960 6960 forcerelativevalue = ui.configbool(b'commands', b'status.relative')
6961 6961 uipathfn = scmutil.getuipathfn(
6962 6962 repo,
6963 6963 legacyrelativevalue=bool(pats),
6964 6964 forcerelativevalue=forcerelativevalue,
6965 6965 )
6966 6966
6967 6967 if opts.get(b'print0'):
6968 6968 end = b'\0'
6969 6969 else:
6970 6970 end = b'\n'
6971 6971 states = b'modified added removed deleted unknown ignored clean'.split()
6972 6972 show = [k for k in states if opts.get(k)]
6973 6973 if opts.get(b'all'):
6974 6974 show += ui.quiet and (states[:4] + [b'clean']) or states
6975 6975
6976 6976 if not show:
6977 6977 if ui.quiet:
6978 6978 show = states[:4]
6979 6979 else:
6980 6980 show = states[:5]
6981 6981
6982 6982 m = scmutil.match(ctx2, pats, opts)
6983 6983 if terse:
6984 6984 # we need to compute clean and unknown to terse
6985 6985 stat = repo.status(
6986 6986 ctx1.node(),
6987 6987 ctx2.node(),
6988 6988 m,
6989 6989 b'ignored' in show or b'i' in terse,
6990 6990 clean=True,
6991 6991 unknown=True,
6992 6992 listsubrepos=opts.get(b'subrepos'),
6993 6993 )
6994 6994
6995 6995 stat = cmdutil.tersedir(stat, terse)
6996 6996 else:
6997 6997 stat = repo.status(
6998 6998 ctx1.node(),
6999 6999 ctx2.node(),
7000 7000 m,
7001 7001 b'ignored' in show,
7002 7002 b'clean' in show,
7003 7003 b'unknown' in show,
7004 7004 opts.get(b'subrepos'),
7005 7005 )
7006 7006
7007 7007 changestates = zip(
7008 7008 states,
7009 7009 pycompat.iterbytestr(b'MAR!?IC'),
7010 7010 [getattr(stat, s.decode('utf8')) for s in states],
7011 7011 )
7012 7012
7013 7013 copy = {}
7014 7014 show_copies = ui.configbool(b'ui', b'statuscopies')
7015 7015 if opts.get(b'copies') is not None:
7016 7016 show_copies = opts.get(b'copies')
7017 7017 show_copies = (show_copies or opts.get(b'all')) and not opts.get(
7018 7018 b'no_status'
7019 7019 )
7020 7020 if show_copies:
7021 7021 copy = copies.pathcopies(ctx1, ctx2, m)
7022 7022
7023 7023 morestatus = None
7024 7024 if (
7025 7025 (ui.verbose or ui.configbool(b'commands', b'status.verbose'))
7026 7026 and not ui.plain()
7027 7027 and not opts.get(b'print0')
7028 7028 ):
7029 7029 morestatus = cmdutil.readmorestatus(repo)
7030 7030
7031 7031 ui.pager(b'status')
7032 7032 fm = ui.formatter(b'status', opts)
7033 7033 fmt = b'%s' + end
7034 7034 showchar = not opts.get(b'no_status')
7035 7035
7036 7036 for state, char, files in changestates:
7037 7037 if state in show:
7038 7038 label = b'status.' + state
7039 7039 for f in files:
7040 7040 fm.startitem()
7041 7041 fm.context(ctx=ctx2)
7042 7042 fm.data(itemtype=b'file', path=f)
7043 7043 fm.condwrite(showchar, b'status', b'%s ', char, label=label)
7044 7044 fm.plain(fmt % uipathfn(f), label=label)
7045 7045 if f in copy:
7046 7046 fm.data(source=copy[f])
7047 7047 fm.plain(
7048 7048 (b' %s' + end) % uipathfn(copy[f]),
7049 7049 label=b'status.copied',
7050 7050 )
7051 7051 if morestatus:
7052 7052 morestatus.formatfile(f, fm)
7053 7053
7054 7054 if morestatus:
7055 7055 morestatus.formatfooter(fm)
7056 7056 fm.end()
7057 7057
7058 7058
7059 7059 @command(
7060 7060 b'summary|sum',
7061 7061 [(b'', b'remote', None, _(b'check for push and pull'))],
7062 7062 b'[--remote]',
7063 7063 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
7064 7064 helpbasic=True,
7065 7065 intents={INTENT_READONLY},
7066 7066 )
7067 7067 def summary(ui, repo, **opts):
7068 7068 """summarize working directory state
7069 7069
7070 7070 This generates a brief summary of the working directory state,
7071 7071 including parents, branch, commit status, phase and available updates.
7072 7072
7073 7073 With the --remote option, this will check the default paths for
7074 7074 incoming and outgoing changes. This can be time-consuming.
7075 7075
7076 7076 Returns 0 on success.
7077 7077 """
7078 7078
7079 7079 opts = pycompat.byteskwargs(opts)
7080 7080 ui.pager(b'summary')
7081 7081 ctx = repo[None]
7082 7082 parents = ctx.parents()
7083 7083 pnode = parents[0].node()
7084 7084 marks = []
7085 7085
7086 7086 try:
7087 7087 ms = mergestatemod.mergestate.read(repo)
7088 7088 except error.UnsupportedMergeRecords as e:
7089 7089 s = b' '.join(e.recordtypes)
7090 7090 ui.warn(
7091 7091 _(b'warning: merge state has unsupported record types: %s\n') % s
7092 7092 )
7093 7093 unresolved = []
7094 7094 else:
7095 7095 unresolved = list(ms.unresolved())
7096 7096
7097 7097 for p in parents:
7098 7098 # label with log.changeset (instead of log.parent) since this
7099 7099 # shows a working directory parent *changeset*:
7100 7100 # i18n: column positioning for "hg summary"
7101 7101 ui.write(
7102 7102 _(b'parent: %d:%s ') % (p.rev(), p),
7103 7103 label=logcmdutil.changesetlabels(p),
7104 7104 )
7105 7105 ui.write(b' '.join(p.tags()), label=b'log.tag')
7106 7106 if p.bookmarks():
7107 7107 marks.extend(p.bookmarks())
7108 7108 if p.rev() == -1:
7109 7109 if not len(repo):
7110 7110 ui.write(_(b' (empty repository)'))
7111 7111 else:
7112 7112 ui.write(_(b' (no revision checked out)'))
7113 7113 if p.obsolete():
7114 7114 ui.write(_(b' (obsolete)'))
7115 7115 if p.isunstable():
7116 7116 instabilities = (
7117 7117 ui.label(instability, b'trouble.%s' % instability)
7118 7118 for instability in p.instabilities()
7119 7119 )
7120 7120 ui.write(b' (' + b', '.join(instabilities) + b')')
7121 7121 ui.write(b'\n')
7122 7122 if p.description():
7123 7123 ui.status(
7124 7124 b' ' + p.description().splitlines()[0].strip() + b'\n',
7125 7125 label=b'log.summary',
7126 7126 )
7127 7127
7128 7128 branch = ctx.branch()
7129 7129 bheads = repo.branchheads(branch)
7130 7130 # i18n: column positioning for "hg summary"
7131 7131 m = _(b'branch: %s\n') % branch
7132 7132 if branch != b'default':
7133 7133 ui.write(m, label=b'log.branch')
7134 7134 else:
7135 7135 ui.status(m, label=b'log.branch')
7136 7136
7137 7137 if marks:
7138 7138 active = repo._activebookmark
7139 7139 # i18n: column positioning for "hg summary"
7140 7140 ui.write(_(b'bookmarks:'), label=b'log.bookmark')
7141 7141 if active is not None:
7142 7142 if active in marks:
7143 7143 ui.write(b' *' + active, label=bookmarks.activebookmarklabel)
7144 7144 marks.remove(active)
7145 7145 else:
7146 7146 ui.write(b' [%s]' % active, label=bookmarks.activebookmarklabel)
7147 7147 for m in marks:
7148 7148 ui.write(b' ' + m, label=b'log.bookmark')
7149 7149 ui.write(b'\n', label=b'log.bookmark')
7150 7150
7151 7151 status = repo.status(unknown=True)
7152 7152
7153 7153 c = repo.dirstate.copies()
7154 7154 copied, renamed = [], []
7155 7155 for d, s in c.items():
7156 7156 if s in status.removed:
7157 7157 status.removed.remove(s)
7158 7158 renamed.append(d)
7159 7159 else:
7160 7160 copied.append(d)
7161 7161 if d in status.added:
7162 7162 status.added.remove(d)
7163 7163
7164 7164 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
7165 7165
7166 7166 labels = [
7167 7167 (ui.label(_(b'%d modified'), b'status.modified'), status.modified),
7168 7168 (ui.label(_(b'%d added'), b'status.added'), status.added),
7169 7169 (ui.label(_(b'%d removed'), b'status.removed'), status.removed),
7170 7170 (ui.label(_(b'%d renamed'), b'status.copied'), renamed),
7171 7171 (ui.label(_(b'%d copied'), b'status.copied'), copied),
7172 7172 (ui.label(_(b'%d deleted'), b'status.deleted'), status.deleted),
7173 7173 (ui.label(_(b'%d unknown'), b'status.unknown'), status.unknown),
7174 7174 (ui.label(_(b'%d unresolved'), b'resolve.unresolved'), unresolved),
7175 7175 (ui.label(_(b'%d subrepos'), b'status.modified'), subs),
7176 7176 ]
7177 7177 t = []
7178 7178 for l, s in labels:
7179 7179 if s:
7180 7180 t.append(l % len(s))
7181 7181
7182 7182 t = b', '.join(t)
7183 7183 cleanworkdir = False
7184 7184
7185 7185 if repo.vfs.exists(b'graftstate'):
7186 7186 t += _(b' (graft in progress)')
7187 7187 if repo.vfs.exists(b'updatestate'):
7188 7188 t += _(b' (interrupted update)')
7189 7189 elif len(parents) > 1:
7190 7190 t += _(b' (merge)')
7191 7191 elif branch != parents[0].branch():
7192 7192 t += _(b' (new branch)')
7193 7193 elif parents[0].closesbranch() and pnode in repo.branchheads(
7194 7194 branch, closed=True
7195 7195 ):
7196 7196 t += _(b' (head closed)')
7197 7197 elif not (
7198 7198 status.modified
7199 7199 or status.added
7200 7200 or status.removed
7201 7201 or renamed
7202 7202 or copied
7203 7203 or subs
7204 7204 ):
7205 7205 t += _(b' (clean)')
7206 7206 cleanworkdir = True
7207 7207 elif pnode not in bheads:
7208 7208 t += _(b' (new branch head)')
7209 7209
7210 7210 if parents:
7211 7211 pendingphase = max(p.phase() for p in parents)
7212 7212 else:
7213 7213 pendingphase = phases.public
7214 7214
7215 7215 if pendingphase > phases.newcommitphase(ui):
7216 7216 t += b' (%s)' % phases.phasenames[pendingphase]
7217 7217
7218 7218 if cleanworkdir:
7219 7219 # i18n: column positioning for "hg summary"
7220 7220 ui.status(_(b'commit: %s\n') % t.strip())
7221 7221 else:
7222 7222 # i18n: column positioning for "hg summary"
7223 7223 ui.write(_(b'commit: %s\n') % t.strip())
7224 7224
7225 7225 # all ancestors of branch heads - all ancestors of parent = new csets
7226 7226 new = len(
7227 7227 repo.changelog.findmissing([pctx.node() for pctx in parents], bheads)
7228 7228 )
7229 7229
7230 7230 if new == 0:
7231 7231 # i18n: column positioning for "hg summary"
7232 7232 ui.status(_(b'update: (current)\n'))
7233 7233 elif pnode not in bheads:
7234 7234 # i18n: column positioning for "hg summary"
7235 7235 ui.write(_(b'update: %d new changesets (update)\n') % new)
7236 7236 else:
7237 7237 # i18n: column positioning for "hg summary"
7238 7238 ui.write(
7239 7239 _(b'update: %d new changesets, %d branch heads (merge)\n')
7240 7240 % (new, len(bheads))
7241 7241 )
7242 7242
7243 7243 t = []
7244 7244 draft = len(repo.revs(b'draft()'))
7245 7245 if draft:
7246 7246 t.append(_(b'%d draft') % draft)
7247 7247 secret = len(repo.revs(b'secret()'))
7248 7248 if secret:
7249 7249 t.append(_(b'%d secret') % secret)
7250 7250
7251 7251 if draft or secret:
7252 7252 ui.status(_(b'phases: %s\n') % b', '.join(t))
7253 7253
7254 7254 if obsolete.isenabled(repo, obsolete.createmarkersopt):
7255 7255 for trouble in (b"orphan", b"contentdivergent", b"phasedivergent"):
7256 7256 numtrouble = len(repo.revs(trouble + b"()"))
7257 7257 # We write all the possibilities to ease translation
7258 7258 troublemsg = {
7259 7259 b"orphan": _(b"orphan: %d changesets"),
7260 7260 b"contentdivergent": _(b"content-divergent: %d changesets"),
7261 7261 b"phasedivergent": _(b"phase-divergent: %d changesets"),
7262 7262 }
7263 7263 if numtrouble > 0:
7264 7264 ui.status(troublemsg[trouble] % numtrouble + b"\n")
7265 7265
7266 7266 cmdutil.summaryhooks(ui, repo)
7267 7267
7268 7268 if opts.get(b'remote'):
7269 7269 needsincoming, needsoutgoing = True, True
7270 7270 else:
7271 7271 needsincoming, needsoutgoing = False, False
7272 7272 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
7273 7273 if i:
7274 7274 needsincoming = True
7275 7275 if o:
7276 7276 needsoutgoing = True
7277 7277 if not needsincoming and not needsoutgoing:
7278 7278 return
7279 7279
7280 7280 def getincoming():
7281 7281 # XXX We should actually skip this if no default is specified, instead
7282 7282 # of passing "default" which will resolve as "./default/" if no default
7283 7283 # path is defined.
7284 7284 path = urlutil.get_unique_pull_path_obj(b'summary', ui, b'default')
7285 7285 sbranch = path.branch
7286 7286 try:
7287 7287 other = hg.peer(repo, {}, path)
7288 7288 except error.RepoError:
7289 7289 if opts.get(b'remote'):
7290 7290 raise
7291 7291 return path.loc, sbranch, None, None, None
7292 7292 branches = (path.branch, [])
7293 7293 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
7294 7294 if revs:
7295 7295 revs = [other.lookup(rev) for rev in revs]
7296 7296 ui.debug(b'comparing with %s\n' % urlutil.hidepassword(path.loc))
7297 7297 with repo.ui.silent():
7298 7298 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
7299 7299 return path.loc, sbranch, other, commoninc, commoninc[1]
7300 7300
7301 7301 if needsincoming:
7302 7302 source, sbranch, sother, commoninc, incoming = getincoming()
7303 7303 else:
7304 7304 source = sbranch = sother = commoninc = incoming = None
7305 7305
7306 7306 def getoutgoing():
7307 7307 # XXX We should actually skip this if no default is specified, instead
7308 7308 # of passing "default" which will resolve as "./default/" if no default
7309 7309 # path is defined.
7310 7310 d = None
7311 7311 if b'default-push' in ui.paths:
7312 7312 d = b'default-push'
7313 7313 elif b'default' in ui.paths:
7314 7314 d = b'default'
7315 7315 path = None
7316 7316 if d is not None:
7317 7317 path = urlutil.get_unique_push_path(b'summary', repo, ui, d)
7318 7318 dest = path.loc
7319 7319 dbranch = path.branch
7320 7320 else:
7321 7321 dest = b'default'
7322 7322 dbranch = None
7323 7323 revs, checkout = hg.addbranchrevs(repo, repo, (dbranch, []), None)
7324 7324 if source != dest:
7325 7325 try:
7326 7326 dother = hg.peer(repo, {}, path if path is not None else dest)
7327 7327 except error.RepoError:
7328 7328 if opts.get(b'remote'):
7329 7329 raise
7330 7330 return dest, dbranch, None, None
7331 7331 ui.debug(b'comparing with %s\n' % urlutil.hidepassword(dest))
7332 7332 elif sother is None:
7333 7333 # there is no explicit destination peer, but source one is invalid
7334 7334 return dest, dbranch, None, None
7335 7335 else:
7336 7336 dother = sother
7337 7337 if source != dest or (sbranch is not None and sbranch != dbranch):
7338 7338 common = None
7339 7339 else:
7340 7340 common = commoninc
7341 7341 if revs:
7342 7342 revs = [repo.lookup(rev) for rev in revs]
7343 7343 with repo.ui.silent():
7344 7344 outgoing = discovery.findcommonoutgoing(
7345 7345 repo, dother, onlyheads=revs, commoninc=common
7346 7346 )
7347 7347 return dest, dbranch, dother, outgoing
7348 7348
7349 7349 if needsoutgoing:
7350 7350 dest, dbranch, dother, outgoing = getoutgoing()
7351 7351 else:
7352 7352 dest = dbranch = dother = outgoing = None
7353 7353
7354 7354 if opts.get(b'remote'):
7355 7355 # Help pytype. --remote sets both `needsincoming` and `needsoutgoing`.
7356 7356 # The former always sets `sother` (or raises an exception if it can't);
7357 7357 # the latter always sets `outgoing`.
7358 7358 assert sother is not None
7359 7359 assert outgoing is not None
7360 7360
7361 7361 t = []
7362 7362 if incoming:
7363 7363 t.append(_(b'1 or more incoming'))
7364 7364 o = outgoing.missing
7365 7365 if o:
7366 7366 t.append(_(b'%d outgoing') % len(o))
7367 7367 other = dother or sother
7368 7368 if b'bookmarks' in other.listkeys(b'namespaces'):
7369 7369 counts = bookmarks.summary(repo, other)
7370 7370 if counts[0] > 0:
7371 7371 t.append(_(b'%d incoming bookmarks') % counts[0])
7372 7372 if counts[1] > 0:
7373 7373 t.append(_(b'%d outgoing bookmarks') % counts[1])
7374 7374
7375 7375 if t:
7376 7376 # i18n: column positioning for "hg summary"
7377 7377 ui.write(_(b'remote: %s\n') % (b', '.join(t)))
7378 7378 else:
7379 7379 # i18n: column positioning for "hg summary"
7380 7380 ui.status(_(b'remote: (synced)\n'))
7381 7381
7382 7382 cmdutil.summaryremotehooks(
7383 7383 ui,
7384 7384 repo,
7385 7385 opts,
7386 7386 (
7387 7387 (source, sbranch, sother, commoninc),
7388 7388 (dest, dbranch, dother, outgoing),
7389 7389 ),
7390 7390 )
7391 7391
7392 7392
7393 7393 @command(
7394 7394 b'tag',
7395 7395 [
7396 7396 (b'f', b'force', None, _(b'force tag')),
7397 7397 (b'l', b'local', None, _(b'make the tag local')),
7398 7398 (b'r', b'rev', b'', _(b'revision to tag'), _(b'REV')),
7399 7399 (b'', b'remove', None, _(b'remove a tag')),
7400 7400 # -l/--local is already there, commitopts cannot be used
7401 7401 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
7402 7402 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
7403 7403 ]
7404 7404 + commitopts2,
7405 7405 _(b'[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'),
7406 7406 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
7407 7407 )
7408 7408 def tag(ui, repo, name1, *names, **opts):
7409 7409 """add one or more tags for the current or given revision
7410 7410
7411 7411 Name a particular revision using <name>.
7412 7412
7413 7413 Tags are used to name particular revisions of the repository and are
7414 7414 very useful to compare different revisions, to go back to significant
7415 7415 earlier versions or to mark branch points as releases, etc. Changing
7416 7416 an existing tag is normally disallowed; use -f/--force to override.
7417 7417
7418 7418 If no revision is given, the parent of the working directory is
7419 7419 used.
7420 7420
7421 7421 To facilitate version control, distribution, and merging of tags,
7422 7422 they are stored as a file named ".hgtags" which is managed similarly
7423 7423 to other project files and can be hand-edited if necessary. This
7424 7424 also means that tagging creates a new commit. The file
7425 7425 ".hg/localtags" is used for local tags (not shared among
7426 7426 repositories).
7427 7427
7428 7428 Tag commits are usually made at the head of a branch. If the parent
7429 7429 of the working directory is not a branch head, :hg:`tag` aborts; use
7430 7430 -f/--force to force the tag commit to be based on a non-head
7431 7431 changeset.
7432 7432
7433 7433 See :hg:`help dates` for a list of formats valid for -d/--date.
7434 7434
7435 7435 Since tag names have priority over branch names during revision
7436 7436 lookup, using an existing branch name as a tag name is discouraged.
7437 7437
7438 7438 Returns 0 on success.
7439 7439 """
7440 7440 cmdutil.check_incompatible_arguments(opts, 'remove', ['rev'])
7441 7441 opts = pycompat.byteskwargs(opts)
7442 7442 with repo.wlock(), repo.lock():
7443 7443 rev_ = b"."
7444 7444 names = [t.strip() for t in (name1,) + names]
7445 7445 if len(names) != len(set(names)):
7446 7446 raise error.InputError(_(b'tag names must be unique'))
7447 7447 for n in names:
7448 7448 scmutil.checknewlabel(repo, n, b'tag')
7449 7449 if not n:
7450 7450 raise error.InputError(
7451 7451 _(b'tag names cannot consist entirely of whitespace')
7452 7452 )
7453 7453 if opts.get(b'rev'):
7454 7454 rev_ = opts[b'rev']
7455 7455 message = opts.get(b'message')
7456 7456 if opts.get(b'remove'):
7457 7457 if opts.get(b'local'):
7458 7458 expectedtype = b'local'
7459 7459 else:
7460 7460 expectedtype = b'global'
7461 7461
7462 7462 for n in names:
7463 7463 if repo.tagtype(n) == b'global':
7464 7464 alltags = tagsmod.findglobaltags(ui, repo)
7465 7465 if alltags[n][0] == repo.nullid:
7466 7466 raise error.InputError(
7467 7467 _(b"tag '%s' is already removed") % n
7468 7468 )
7469 7469 if not repo.tagtype(n):
7470 7470 raise error.InputError(_(b"tag '%s' does not exist") % n)
7471 7471 if repo.tagtype(n) != expectedtype:
7472 7472 if expectedtype == b'global':
7473 7473 raise error.InputError(
7474 7474 _(b"tag '%s' is not a global tag") % n
7475 7475 )
7476 7476 else:
7477 7477 raise error.InputError(
7478 7478 _(b"tag '%s' is not a local tag") % n
7479 7479 )
7480 7480 rev_ = b'null'
7481 7481 if not message:
7482 7482 # we don't translate commit messages
7483 7483 message = b'Removed tag %s' % b', '.join(names)
7484 7484 elif not opts.get(b'force'):
7485 7485 for n in names:
7486 7486 if n in repo.tags():
7487 7487 raise error.InputError(
7488 7488 _(b"tag '%s' already exists (use -f to force)") % n
7489 7489 )
7490 7490 if not opts.get(b'local'):
7491 7491 p1, p2 = repo.dirstate.parents()
7492 7492 if p2 != repo.nullid:
7493 7493 raise error.StateError(_(b'uncommitted merge'))
7494 7494 bheads = repo.branchheads()
7495 7495 if not opts.get(b'force') and bheads and p1 not in bheads:
7496 7496 raise error.InputError(
7497 7497 _(
7498 7498 b'working directory is not at a branch head '
7499 7499 b'(use -f to force)'
7500 7500 )
7501 7501 )
7502 7502 node = logcmdutil.revsingle(repo, rev_).node()
7503 7503
7504 7504 # don't allow tagging the null rev or the working directory
7505 7505 if node is None:
7506 7506 raise error.InputError(_(b"cannot tag working directory"))
7507 7507 elif not opts.get(b'remove') and node == nullid:
7508 7508 raise error.InputError(_(b"cannot tag null revision"))
7509 7509
7510 7510 if not message:
7511 7511 # we don't translate commit messages
7512 7512 message = b'Added tag %s for changeset %s' % (
7513 7513 b', '.join(names),
7514 7514 short(node),
7515 7515 )
7516 7516
7517 7517 date = opts.get(b'date')
7518 7518 if date:
7519 7519 date = dateutil.parsedate(date)
7520 7520
7521 7521 if opts.get(b'remove'):
7522 7522 editform = b'tag.remove'
7523 7523 else:
7524 7524 editform = b'tag.add'
7525 7525 editor = cmdutil.getcommiteditor(
7526 7526 editform=editform, **pycompat.strkwargs(opts)
7527 7527 )
7528 7528
7529 7529 tagsmod.tag(
7530 7530 repo,
7531 7531 names,
7532 7532 node,
7533 7533 message,
7534 7534 opts.get(b'local'),
7535 7535 opts.get(b'user'),
7536 7536 date,
7537 7537 editor=editor,
7538 7538 )
7539 7539
7540 7540
7541 7541 @command(
7542 7542 b'tags',
7543 7543 formatteropts,
7544 7544 b'',
7545 7545 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
7546 7546 intents={INTENT_READONLY},
7547 7547 )
7548 7548 def tags(ui, repo, **opts):
7549 7549 """list repository tags
7550 7550
7551 7551 This lists both regular and local tags. When the -v/--verbose
7552 7552 switch is used, a third column "local" is printed for local tags.
7553 7553 When the -q/--quiet switch is used, only the tag name is printed.
7554 7554
7555 7555 .. container:: verbose
7556 7556
7557 7557 Template:
7558 7558
7559 7559 The following keywords are supported in addition to the common template
7560 7560 keywords and functions such as ``{tag}``. See also
7561 7561 :hg:`help templates`.
7562 7562
7563 7563 :type: String. ``local`` for local tags.
7564 7564
7565 7565 Returns 0 on success.
7566 7566 """
7567 7567
7568 7568 opts = pycompat.byteskwargs(opts)
7569 7569 ui.pager(b'tags')
7570 7570 fm = ui.formatter(b'tags', opts)
7571 7571 hexfunc = fm.hexfunc
7572 7572
7573 7573 for t, n in reversed(repo.tagslist()):
7574 7574 hn = hexfunc(n)
7575 7575 label = b'tags.normal'
7576 7576 tagtype = repo.tagtype(t)
7577 7577 if not tagtype or tagtype == b'global':
7578 7578 tagtype = b''
7579 7579 else:
7580 7580 label = b'tags.' + tagtype
7581 7581
7582 7582 fm.startitem()
7583 7583 fm.context(repo=repo)
7584 7584 fm.write(b'tag', b'%s', t, label=label)
7585 7585 fmt = b" " * (30 - encoding.colwidth(t)) + b' %5d:%s'
7586 7586 fm.condwrite(
7587 7587 not ui.quiet,
7588 7588 b'rev node',
7589 7589 fmt,
7590 7590 repo.changelog.rev(n),
7591 7591 hn,
7592 7592 label=label,
7593 7593 )
7594 7594 fm.condwrite(
7595 7595 ui.verbose and tagtype, b'type', b' %s', tagtype, label=label
7596 7596 )
7597 7597 fm.plain(b'\n')
7598 7598 fm.end()
7599 7599
7600 7600
7601 7601 @command(
7602 7602 b'tip',
7603 7603 [
7604 7604 (b'p', b'patch', None, _(b'show patch')),
7605 7605 (b'g', b'git', None, _(b'use git extended diff format')),
7606 7606 ]
7607 7607 + templateopts,
7608 7608 _(b'[-p] [-g]'),
7609 7609 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
7610 7610 )
7611 7611 def tip(ui, repo, **opts):
7612 7612 """show the tip revision (DEPRECATED)
7613 7613
7614 7614 The tip revision (usually just called the tip) is the changeset
7615 7615 most recently added to the repository (and therefore the most
7616 7616 recently changed head).
7617 7617
7618 7618 If you have just made a commit, that commit will be the tip. If
7619 7619 you have just pulled changes from another repository, the tip of
7620 7620 that repository becomes the current tip. The "tip" tag is special
7621 7621 and cannot be renamed or assigned to a different changeset.
7622 7622
7623 7623 This command is deprecated, please use :hg:`heads` instead.
7624 7624
7625 7625 Returns 0 on success.
7626 7626 """
7627 7627 opts = pycompat.byteskwargs(opts)
7628 7628 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
7629 7629 displayer.show(repo[b'tip'])
7630 7630 displayer.close()
7631 7631
7632 7632
7633 7633 @command(
7634 7634 b'unbundle',
7635 7635 [
7636 7636 (
7637 7637 b'u',
7638 7638 b'update',
7639 7639 None,
7640 7640 _(b'update to new branch head if changesets were unbundled'),
7641 7641 )
7642 7642 ],
7643 7643 _(b'[-u] FILE...'),
7644 7644 helpcategory=command.CATEGORY_IMPORT_EXPORT,
7645 7645 )
7646 7646 def unbundle(ui, repo, fname1, *fnames, **opts):
7647 7647 """apply one or more bundle files
7648 7648
7649 7649 Apply one or more bundle files generated by :hg:`bundle`.
7650 7650
7651 7651 Returns 0 on success, 1 if an update has unresolved files.
7652 7652 """
7653 7653 fnames = (fname1,) + fnames
7654 7654
7655 7655 with repo.lock():
7656 7656 for fname in fnames:
7657 7657 f = hg.openpath(ui, fname)
7658 7658 gen = exchange.readbundle(ui, f, fname)
7659 7659 if isinstance(gen, streamclone.streamcloneapplier):
7660 7660 raise error.InputError(
7661 7661 _(
7662 7662 b'packed bundles cannot be applied with '
7663 7663 b'"hg unbundle"'
7664 7664 ),
7665 7665 hint=_(b'use "hg debugapplystreamclonebundle"'),
7666 7666 )
7667 7667 url = b'bundle:' + fname
7668 7668 try:
7669 7669 txnname = b'unbundle'
7670 7670 if not isinstance(gen, bundle2.unbundle20):
7671 7671 txnname = b'unbundle\n%s' % urlutil.hidepassword(url)
7672 7672 with repo.transaction(txnname) as tr:
7673 7673 op = bundle2.applybundle(
7674 7674 repo, gen, tr, source=b'unbundle', url=url
7675 7675 )
7676 7676 except error.BundleUnknownFeatureError as exc:
7677 7677 raise error.Abort(
7678 7678 _(b'%s: unknown bundle feature, %s') % (fname, exc),
7679 7679 hint=_(
7680 7680 b"see https://mercurial-scm.org/"
7681 7681 b"wiki/BundleFeature for more "
7682 7682 b"information"
7683 7683 ),
7684 7684 )
7685 7685 modheads = bundle2.combinechangegroupresults(op)
7686 7686
7687 7687 if postincoming(ui, repo, modheads, opts.get('update'), None, None):
7688 7688 return 1
7689 7689 else:
7690 7690 return 0
7691 7691
7692 7692
7693 7693 @command(
7694 7694 b'unshelve',
7695 7695 [
7696 7696 (b'a', b'abort', None, _(b'abort an incomplete unshelve operation')),
7697 7697 (
7698 7698 b'c',
7699 7699 b'continue',
7700 7700 None,
7701 7701 _(b'continue an incomplete unshelve operation'),
7702 7702 ),
7703 7703 (b'i', b'interactive', None, _(b'use interactive mode (EXPERIMENTAL)')),
7704 7704 (b'k', b'keep', None, _(b'keep shelve after unshelving')),
7705 7705 (
7706 7706 b'n',
7707 7707 b'name',
7708 7708 b'',
7709 7709 _(b'restore shelved change with given name'),
7710 7710 _(b'NAME'),
7711 7711 ),
7712 7712 (b't', b'tool', b'', _(b'specify merge tool')),
7713 7713 (
7714 7714 b'',
7715 7715 b'date',
7716 7716 b'',
7717 7717 _(b'set date for temporary commits (DEPRECATED)'),
7718 7718 _(b'DATE'),
7719 7719 ),
7720 7720 ],
7721 7721 _(b'hg unshelve [OPTION]... [[-n] SHELVED]'),
7722 7722 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
7723 7723 )
7724 7724 def unshelve(ui, repo, *shelved, **opts):
7725 7725 """restore a shelved change to the working directory
7726 7726
7727 7727 This command accepts an optional name of a shelved change to
7728 7728 restore. If none is given, the most recent shelved change is used.
7729 7729
7730 7730 If a shelved change is applied successfully, the bundle that
7731 7731 contains the shelved changes is moved to a backup location
7732 7732 (.hg/shelve-backup).
7733 7733
7734 7734 Since you can restore a shelved change on top of an arbitrary
7735 7735 commit, it is possible that unshelving will result in a conflict
7736 7736 between your changes and the commits you are unshelving onto. If
7737 7737 this occurs, you must resolve the conflict, then use
7738 7738 ``--continue`` to complete the unshelve operation. (The bundle
7739 7739 will not be moved until you successfully complete the unshelve.)
7740 7740
7741 7741 (Alternatively, you can use ``--abort`` to abandon an unshelve
7742 7742 that causes a conflict. This reverts the unshelved changes, and
7743 7743 leaves the bundle in place.)
7744 7744
7745 7745 If bare shelved change (without interactive, include and exclude
7746 7746 option) was done on newly created branch it would restore branch
7747 7747 information to the working directory.
7748 7748
7749 7749 After a successful unshelve, the shelved changes are stored in a
7750 7750 backup directory. Only the N most recent backups are kept. N
7751 7751 defaults to 10 but can be overridden using the ``shelve.maxbackups``
7752 7752 configuration option.
7753 7753
7754 7754 .. container:: verbose
7755 7755
7756 7756 Timestamp in seconds is used to decide order of backups. More
7757 7757 than ``maxbackups`` backups are kept, if same timestamp
7758 7758 prevents from deciding exact order of them, for safety.
7759 7759
7760 7760 Selected changes can be unshelved with ``--interactive`` flag.
7761 7761 The working directory is updated with the selected changes, and
7762 7762 only the unselected changes remain shelved.
7763 7763 Note: The whole shelve is applied to working directory first before
7764 7764 running interactively. So, this will bring up all the conflicts between
7765 7765 working directory and the shelve, irrespective of which changes will be
7766 7766 unshelved.
7767 7767 """
7768 7768 with repo.wlock():
7769 7769 return shelvemod.unshelvecmd(ui, repo, *shelved, **opts)
7770 7770
7771 7771
7772 7772 statemod.addunfinished(
7773 7773 b'unshelve',
7774 7774 fname=b'shelvedstate',
7775 7775 continueflag=True,
7776 7776 abortfunc=shelvemod.hgabortunshelve,
7777 7777 continuefunc=shelvemod.hgcontinueunshelve,
7778 7778 cmdmsg=_(b'unshelve already in progress'),
7779 7779 )
7780 7780
7781 7781
7782 7782 @command(
7783 7783 b'update|up|checkout|co',
7784 7784 [
7785 7785 (b'C', b'clean', None, _(b'discard uncommitted changes (no backup)')),
7786 7786 (b'c', b'check', None, _(b'require clean working directory')),
7787 7787 (b'm', b'merge', None, _(b'merge uncommitted changes')),
7788 7788 (b'd', b'date', b'', _(b'tipmost revision matching date'), _(b'DATE')),
7789 7789 (b'r', b'rev', b'', _(b'revision'), _(b'REV')),
7790 7790 ]
7791 7791 + mergetoolopts,
7792 7792 _(b'[-C|-c|-m] [-d DATE] [[-r] REV]'),
7793 7793 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
7794 7794 helpbasic=True,
7795 7795 )
7796 7796 def update(ui, repo, node=None, **opts):
7797 7797 """update working directory (or switch revisions)
7798 7798
7799 7799 Update the repository's working directory to the specified
7800 7800 changeset. If no changeset is specified, update to the tip of the
7801 7801 current named branch and move the active bookmark (see :hg:`help
7802 7802 bookmarks`).
7803 7803
7804 7804 Update sets the working directory's parent revision to the specified
7805 7805 changeset (see :hg:`help parents`).
7806 7806
7807 7807 If the changeset is not a descendant or ancestor of the working
7808 7808 directory's parent and there are uncommitted changes, the update is
7809 7809 aborted. With the -c/--check option, the working directory is checked
7810 7810 for uncommitted changes; if none are found, the working directory is
7811 7811 updated to the specified changeset.
7812 7812
7813 7813 .. container:: verbose
7814 7814
7815 7815 The -C/--clean, -c/--check, and -m/--merge options control what
7816 7816 happens if the working directory contains uncommitted changes.
7817 7817 At most of one of them can be specified.
7818 7818
7819 7819 1. If no option is specified, and if
7820 7820 the requested changeset is an ancestor or descendant of
7821 7821 the working directory's parent, the uncommitted changes
7822 7822 are merged into the requested changeset and the merged
7823 7823 result is left uncommitted. If the requested changeset is
7824 7824 not an ancestor or descendant (that is, it is on another
7825 7825 branch), the update is aborted and the uncommitted changes
7826 7826 are preserved.
7827 7827
7828 7828 2. With the -m/--merge option, the update is allowed even if the
7829 7829 requested changeset is not an ancestor or descendant of
7830 7830 the working directory's parent.
7831 7831
7832 7832 3. With the -c/--check option, the update is aborted and the
7833 7833 uncommitted changes are preserved.
7834 7834
7835 7835 4. With the -C/--clean option, uncommitted changes are discarded and
7836 7836 the working directory is updated to the requested changeset.
7837 7837
7838 7838 To cancel an uncommitted merge (and lose your changes), use
7839 7839 :hg:`merge --abort`.
7840 7840
7841 7841 Use null as the changeset to remove the working directory (like
7842 7842 :hg:`clone -U`).
7843 7843
7844 7844 If you want to revert just one file to an older revision, use
7845 7845 :hg:`revert [-r REV] NAME`.
7846 7846
7847 7847 See :hg:`help dates` for a list of formats valid for -d/--date.
7848 7848
7849 7849 Returns 0 on success, 1 if there are unresolved files.
7850 7850 """
7851 7851 cmdutil.check_at_most_one_arg(opts, 'clean', 'check', 'merge')
7852 7852 rev = opts.get('rev')
7853 7853 date = opts.get('date')
7854 7854 clean = opts.get('clean')
7855 7855 check = opts.get('check')
7856 7856 merge = opts.get('merge')
7857 7857 if rev and node:
7858 7858 raise error.InputError(_(b"please specify just one revision"))
7859 7859
7860 7860 if ui.configbool(b'commands', b'update.requiredest'):
7861 7861 if not node and not rev and not date:
7862 7862 raise error.InputError(
7863 7863 _(b'you must specify a destination'),
7864 7864 hint=_(b'for example: hg update ".::"'),
7865 7865 )
7866 7866
7867 7867 if rev is None or rev == b'':
7868 7868 rev = node
7869 7869
7870 7870 if date and rev is not None:
7871 7871 raise error.InputError(_(b"you can't specify a revision and a date"))
7872 7872
7873 7873 updatecheck = None
7874 7874 if check or merge is not None and not merge:
7875 7875 updatecheck = b'abort'
7876 7876 elif merge or check is not None and not check:
7877 7877 updatecheck = b'none'
7878 7878
7879 7879 with repo.wlock():
7880 7880 cmdutil.clearunfinished(repo)
7881 7881 if date:
7882 7882 rev = cmdutil.finddate(ui, repo, date)
7883 7883
7884 7884 # if we defined a bookmark, we have to remember the original name
7885 7885 brev = rev
7886 7886 if rev:
7887 7887 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
7888 7888 ctx = logcmdutil.revsingle(repo, rev, default=None)
7889 7889 rev = ctx.rev()
7890 7890 hidden = ctx.hidden()
7891 7891 overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')}
7892 7892 with ui.configoverride(overrides, b'update'):
7893 7893 ret = hg.updatetotally(
7894 7894 ui, repo, rev, brev, clean=clean, updatecheck=updatecheck
7895 7895 )
7896 7896 if hidden:
7897 7897 ctxstr = ctx.hex()[:12]
7898 7898 ui.warn(_(b"updated to hidden changeset %s\n") % ctxstr)
7899 7899
7900 7900 if ctx.obsolete():
7901 7901 obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
7902 7902 ui.warn(b"(%s)\n" % obsfatemsg)
7903 7903 return ret
7904 7904
7905 7905
7906 7906 @command(
7907 7907 b'verify',
7908 7908 [(b'', b'full', False, b'perform more checks (EXPERIMENTAL)')],
7909 7909 helpcategory=command.CATEGORY_MAINTENANCE,
7910 7910 )
7911 7911 def verify(ui, repo, **opts):
7912 7912 """verify the integrity of the repository
7913 7913
7914 7914 Verify the integrity of the current repository.
7915 7915
7916 7916 This will perform an extensive check of the repository's
7917 7917 integrity, validating the hashes and checksums of each entry in
7918 7918 the changelog, manifest, and tracked files, as well as the
7919 7919 integrity of their crosslinks and indices.
7920 7920
7921 7921 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
7922 7922 for more information about recovery from corruption of the
7923 7923 repository.
7924 7924
7925 7925 Returns 0 on success, 1 if errors are encountered.
7926 7926 """
7927 7927 opts = pycompat.byteskwargs(opts)
7928 7928
7929 7929 level = None
7930 7930 if opts[b'full']:
7931 7931 level = verifymod.VERIFY_FULL
7932 7932 return hg.verify(repo, level)
7933 7933
7934 7934
7935 7935 @command(
7936 7936 b'version',
7937 7937 [] + formatteropts,
7938 7938 helpcategory=command.CATEGORY_HELP,
7939 7939 norepo=True,
7940 7940 intents={INTENT_READONLY},
7941 7941 )
7942 7942 def version_(ui, **opts):
7943 7943 """output version and copyright information
7944 7944
7945 7945 .. container:: verbose
7946 7946
7947 7947 Template:
7948 7948
7949 7949 The following keywords are supported. See also :hg:`help templates`.
7950 7950
7951 7951 :extensions: List of extensions.
7952 7952 :ver: String. Version number.
7953 7953
7954 7954 And each entry of ``{extensions}`` provides the following sub-keywords
7955 7955 in addition to ``{ver}``.
7956 7956
7957 7957 :bundled: Boolean. True if included in the release.
7958 7958 :name: String. Extension name.
7959 7959 """
7960 7960 opts = pycompat.byteskwargs(opts)
7961 7961 if ui.verbose:
7962 7962 ui.pager(b'version')
7963 7963 fm = ui.formatter(b"version", opts)
7964 7964 fm.startitem()
7965 7965 fm.write(
7966 7966 b"ver", _(b"Mercurial Distributed SCM (version %s)\n"), util.version()
7967 7967 )
7968 7968 license = _(
7969 7969 b"(see https://mercurial-scm.org for more information)\n"
7970 7970 b"\nCopyright (C) 2005-2023 Olivia Mackall and others\n"
7971 7971 b"This is free software; see the source for copying conditions. "
7972 7972 b"There is NO\nwarranty; "
7973 7973 b"not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
7974 7974 )
7975 7975 if not ui.quiet:
7976 7976 fm.plain(license)
7977 7977
7978 7978 if ui.verbose:
7979 7979 fm.plain(_(b"\nEnabled extensions:\n\n"))
7980 7980 # format names and versions into columns
7981 7981 names = []
7982 7982 vers = []
7983 7983 isinternals = []
7984 7984 for name, module in sorted(extensions.extensions()):
7985 7985 names.append(name)
7986 7986 vers.append(extensions.moduleversion(module) or None)
7987 7987 isinternals.append(extensions.ismoduleinternal(module))
7988 7988 fn = fm.nested(b"extensions", tmpl=b'{name}\n')
7989 7989 if names:
7990 7990 namefmt = b" %%-%ds " % max(len(n) for n in names)
7991 7991 places = [_(b"external"), _(b"internal")]
7992 7992 for n, v, p in zip(names, vers, isinternals):
7993 7993 fn.startitem()
7994 7994 fn.condwrite(ui.verbose, b"name", namefmt, n)
7995 7995 if ui.verbose:
7996 7996 fn.plain(b"%s " % places[p])
7997 7997 fn.data(bundled=p)
7998 7998 fn.condwrite(ui.verbose and v, b"ver", b"%s", v)
7999 7999 if ui.verbose:
8000 8000 fn.plain(b"\n")
8001 8001 fn.end()
8002 8002 fm.end()
8003 8003
8004 8004
8005 8005 def loadcmdtable(ui, name, cmdtable):
8006 8006 """Load command functions from specified cmdtable"""
8007 8007 overrides = [cmd for cmd in cmdtable if cmd in table]
8008 8008 if overrides:
8009 8009 ui.warn(
8010 8010 _(b"extension '%s' overrides commands: %s\n")
8011 8011 % (name, b" ".join(overrides))
8012 8012 )
8013 8013 table.update(cmdtable)
@@ -1,1802 +1,1808 b''
1 1 # dirstate.py - working directory tracking for mercurial
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import collections
10 10 import contextlib
11 11 import os
12 12 import stat
13 13 import uuid
14 14
15 15 from .i18n import _
16 16 from .pycompat import delattr
17 17
18 18 from hgdemandimport import tracing
19 19
20 20 from . import (
21 21 dirstatemap,
22 22 encoding,
23 23 error,
24 24 match as matchmod,
25 25 node,
26 26 pathutil,
27 27 policy,
28 28 pycompat,
29 29 scmutil,
30 30 txnutil,
31 31 util,
32 32 )
33 33
34 34 from .dirstateutils import (
35 35 timestamp,
36 36 )
37 37
38 38 from .interfaces import (
39 39 dirstate as intdirstate,
40 40 util as interfaceutil,
41 41 )
42 42
43 43 parsers = policy.importmod('parsers')
44 44 rustmod = policy.importrust('dirstate')
45 45
46 46 # use to detect lack of a parameter
47 47 SENTINEL = object()
48 48
49 49 HAS_FAST_DIRSTATE_V2 = rustmod is not None
50 50
51 51 propertycache = util.propertycache
52 52 filecache = scmutil.filecache
53 53 _rangemask = dirstatemap.rangemask
54 54
55 55 DirstateItem = dirstatemap.DirstateItem
56 56
57 57
58 58 class repocache(filecache):
59 59 """filecache for files in .hg/"""
60 60
61 61 def join(self, obj, fname):
62 62 return obj._opener.join(fname)
63 63
64 64
65 65 class rootcache(filecache):
66 66 """filecache for files in the repository root"""
67 67
68 68 def join(self, obj, fname):
69 69 return obj._join(fname)
70 70
71 71
72 72 def check_invalidated(func):
73 73 """check that the func is called with a non-invalidated dirstate
74 74
75 75 The dirstate is in an "invalidated state" after an error occured during its
76 76 modification and remains so until we exited the top level scope that framed
77 77 such change.
78 78 """
79 79
80 80 def wrap(self, *args, **kwargs):
81 81 if self._invalidated_context:
82 82 msg = 'calling `%s` after the dirstate was invalidated'
83 83 msg %= func.__name__
84 84 raise error.ProgrammingError(msg)
85 85 return func(self, *args, **kwargs)
86 86
87 87 return wrap
88 88
89 89
90 90 def requires_changing_parents(func):
91 91 def wrap(self, *args, **kwargs):
92 92 if not self.is_changing_parents:
93 93 msg = 'calling `%s` outside of a changing_parents context'
94 94 msg %= func.__name__
95 95 raise error.ProgrammingError(msg)
96 96 return func(self, *args, **kwargs)
97 97
98 98 return check_invalidated(wrap)
99 99
100 100
101 101 def requires_changing_files(func):
102 102 def wrap(self, *args, **kwargs):
103 103 if not self.is_changing_files:
104 104 msg = 'calling `%s` outside of a `changing_files`'
105 105 msg %= func.__name__
106 106 raise error.ProgrammingError(msg)
107 107 return func(self, *args, **kwargs)
108 108
109 109 return check_invalidated(wrap)
110 110
111 111
112 112 def requires_changing_any(func):
113 113 def wrap(self, *args, **kwargs):
114 114 if not self.is_changing_any:
115 115 msg = 'calling `%s` outside of a changing context'
116 116 msg %= func.__name__
117 117 raise error.ProgrammingError(msg)
118 118 return func(self, *args, **kwargs)
119 119
120 120 return check_invalidated(wrap)
121 121
122 122
123 123 def requires_changing_files_or_status(func):
124 124 def wrap(self, *args, **kwargs):
125 125 if not (self.is_changing_files or self._running_status > 0):
126 126 msg = (
127 127 'calling `%s` outside of a changing_files '
128 128 'or running_status context'
129 129 )
130 130 msg %= func.__name__
131 131 raise error.ProgrammingError(msg)
132 132 return func(self, *args, **kwargs)
133 133
134 134 return check_invalidated(wrap)
135 135
136 136
137 137 CHANGE_TYPE_PARENTS = "parents"
138 138 CHANGE_TYPE_FILES = "files"
139 139
140 140
141 141 @interfaceutil.implementer(intdirstate.idirstate)
142 142 class dirstate:
143 143
144 144 # used by largefile to avoid overwritting transaction callback
145 145 _tr_key_suffix = b''
146 146
147 147 def __init__(
148 148 self,
149 149 opener,
150 150 ui,
151 151 root,
152 152 validate,
153 153 sparsematchfn,
154 154 nodeconstants,
155 155 use_dirstate_v2,
156 156 use_tracked_hint=False,
157 157 ):
158 158 """Create a new dirstate object.
159 159
160 160 opener is an open()-like callable that can be used to open the
161 161 dirstate file; root is the root of the directory tracked by
162 162 the dirstate.
163 163 """
164 164 self._use_dirstate_v2 = use_dirstate_v2
165 165 self._use_tracked_hint = use_tracked_hint
166 166 self._nodeconstants = nodeconstants
167 167 self._opener = opener
168 168 self._validate = validate
169 169 self._root = root
170 170 # Either build a sparse-matcher or None if sparse is disabled
171 171 self._sparsematchfn = sparsematchfn
172 172 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
173 173 # UNC path pointing to root share (issue4557)
174 174 self._rootdir = pathutil.normasprefix(root)
175 175 # True is any internal state may be different
176 176 self._dirty = False
177 177 # True if the set of tracked file may be different
178 178 self._dirty_tracked_set = False
179 179 self._ui = ui
180 180 self._filecache = {}
181 181 # nesting level of `changing_parents` context
182 182 self._changing_level = 0
183 183 # the change currently underway
184 184 self._change_type = None
185 185 # number of open _running_status context
186 186 self._running_status = 0
187 187 # True if the current dirstate changing operations have been
188 188 # invalidated (used to make sure all nested contexts have been exited)
189 189 self._invalidated_context = False
190 190 self._attached_to_a_transaction = False
191 191 self._filename = b'dirstate'
192 192 self._filename_th = b'dirstate-tracked-hint'
193 193 self._pendingfilename = b'%s.pending' % self._filename
194 194 self._plchangecallbacks = {}
195 195 self._origpl = None
196 196 self._mapcls = dirstatemap.dirstatemap
197 197 # Access and cache cwd early, so we don't access it for the first time
198 198 # after a working-copy update caused it to not exist (accessing it then
199 199 # raises an exception).
200 200 self._cwd
201 201
202 202 def refresh(self):
203 # XXX if this happens, you likely did not enter the `changing_xxx`
204 # using `repo.dirstate`, so a later `repo.dirstate` accesss might call
205 # `refresh`.
206 if self.is_changing_any:
207 msg = "refreshing the dirstate in the middle of a change"
208 raise error.ProgrammingError(msg)
203 209 if '_branch' in vars(self):
204 210 del self._branch
205 211 if '_map' in vars(self) and self._map.may_need_refresh():
206 212 self.invalidate()
207 213
208 214 def prefetch_parents(self):
209 215 """make sure the parents are loaded
210 216
211 217 Used to avoid a race condition.
212 218 """
213 219 self._pl
214 220
215 221 @contextlib.contextmanager
216 222 @check_invalidated
217 223 def running_status(self, repo):
218 224 """Wrap a status operation
219 225
220 226 This context is not mutally exclusive with the `changing_*` context. It
221 227 also do not warrant for the `wlock` to be taken.
222 228
223 229 If the wlock is taken, this context will behave in a simple way, and
224 230 ensure the data are scheduled for write when leaving the top level
225 231 context.
226 232
227 233 If the lock is not taken, it will only warrant that the data are either
228 234 committed (written) and rolled back (invalidated) when exiting the top
229 235 level context. The write/invalidate action must be performed by the
230 236 wrapped code.
231 237
232 238
233 239 The expected logic is:
234 240
235 241 A: read the dirstate
236 242 B: run status
237 243 This might make the dirstate dirty by updating cache,
238 244 especially in Rust.
239 245 C: do more "post status fixup if relevant
240 246 D: try to take the w-lock (this will invalidate the changes if they were raced)
241 247 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
242 248 E1: elif lock was acquired β†’ write the changes
243 249 E2: else β†’ discard the changes
244 250 """
245 251 has_lock = repo.currentwlock() is not None
246 252 is_changing = self.is_changing_any
247 253 tr = repo.currenttransaction()
248 254 has_tr = tr is not None
249 255 nested = bool(self._running_status)
250 256
251 257 first_and_alone = not (is_changing or has_tr or nested)
252 258
253 259 # enforce no change happened outside of a proper context.
254 260 if first_and_alone and self._dirty:
255 261 has_tr = repo.currenttransaction() is not None
256 262 if not has_tr and self._changing_level == 0 and self._dirty:
257 263 msg = "entering a status context, but dirstate is already dirty"
258 264 raise error.ProgrammingError(msg)
259 265
260 266 should_write = has_lock and not (nested or is_changing)
261 267
262 268 self._running_status += 1
263 269 try:
264 270 yield
265 271 except Exception:
266 272 self.invalidate()
267 273 raise
268 274 finally:
269 275 self._running_status -= 1
270 276 if self._invalidated_context:
271 277 should_write = False
272 278 self.invalidate()
273 279
274 280 if should_write:
275 281 assert repo.currenttransaction() is tr
276 282 self.write(tr)
277 283 elif not has_lock:
278 284 if self._dirty:
279 285 msg = b'dirstate dirty while exiting an isolated status context'
280 286 repo.ui.develwarn(msg)
281 287 self.invalidate()
282 288
283 289 @contextlib.contextmanager
284 290 @check_invalidated
285 291 def _changing(self, repo, change_type):
286 292 if repo.currentwlock() is None:
287 293 msg = b"trying to change the dirstate without holding the wlock"
288 294 raise error.ProgrammingError(msg)
289 295
290 296 has_tr = repo.currenttransaction() is not None
291 297 if not has_tr and self._changing_level == 0 and self._dirty:
292 298 msg = b"entering a changing context, but dirstate is already dirty"
293 299 repo.ui.develwarn(msg)
294 300
295 301 assert self._changing_level >= 0
296 302 # different type of change are mutually exclusive
297 303 if self._change_type is None:
298 304 assert self._changing_level == 0
299 305 self._change_type = change_type
300 306 elif self._change_type != change_type:
301 307 msg = (
302 308 'trying to open "%s" dirstate-changing context while a "%s" is'
303 309 ' already open'
304 310 )
305 311 msg %= (change_type, self._change_type)
306 312 raise error.ProgrammingError(msg)
307 313 should_write = False
308 314 self._changing_level += 1
309 315 try:
310 316 yield
311 317 except: # re-raises
312 318 self.invalidate() # this will set `_invalidated_context`
313 319 raise
314 320 finally:
315 321 assert self._changing_level > 0
316 322 self._changing_level -= 1
317 323 # If the dirstate is being invalidated, call invalidate again.
318 324 # This will throw away anything added by a upper context and
319 325 # reset the `_invalidated_context` flag when relevant
320 326 if self._changing_level <= 0:
321 327 self._change_type = None
322 328 assert self._changing_level == 0
323 329 if self._invalidated_context:
324 330 # make sure we invalidate anything an upper context might
325 331 # have changed.
326 332 self.invalidate()
327 333 else:
328 334 should_write = self._changing_level <= 0
329 335 tr = repo.currenttransaction()
330 336 if has_tr != (tr is not None):
331 337 if has_tr:
332 338 m = "transaction vanished while changing dirstate"
333 339 else:
334 340 m = "transaction appeared while changing dirstate"
335 341 raise error.ProgrammingError(m)
336 342 if should_write:
337 343 self.write(tr)
338 344
339 345 @contextlib.contextmanager
340 346 def changing_parents(self, repo):
341 347 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
342 348 yield c
343 349
344 350 @contextlib.contextmanager
345 351 def changing_files(self, repo):
346 352 with self._changing(repo, CHANGE_TYPE_FILES) as c:
347 353 yield c
348 354
349 355 # here to help migration to the new code
350 356 def parentchange(self):
351 357 msg = (
352 358 "Mercurial 6.4 and later requires call to "
353 359 "`dirstate.changing_parents(repo)`"
354 360 )
355 361 raise error.ProgrammingError(msg)
356 362
357 363 @property
358 364 def is_changing_any(self):
359 365 """Returns true if the dirstate is in the middle of a set of changes.
360 366
361 367 This returns True for any kind of change.
362 368 """
363 369 return self._changing_level > 0
364 370
365 371 def pendingparentchange(self):
366 372 return self.is_changing_parent()
367 373
368 374 def is_changing_parent(self):
369 375 """Returns true if the dirstate is in the middle of a set of changes
370 376 that modify the dirstate parent.
371 377 """
372 378 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
373 379 return self.is_changing_parents
374 380
375 381 @property
376 382 def is_changing_parents(self):
377 383 """Returns true if the dirstate is in the middle of a set of changes
378 384 that modify the dirstate parent.
379 385 """
380 386 if self._changing_level <= 0:
381 387 return False
382 388 return self._change_type == CHANGE_TYPE_PARENTS
383 389
384 390 @property
385 391 def is_changing_files(self):
386 392 """Returns true if the dirstate is in the middle of a set of changes
387 393 that modify the files tracked or their sources.
388 394 """
389 395 if self._changing_level <= 0:
390 396 return False
391 397 return self._change_type == CHANGE_TYPE_FILES
392 398
393 399 @propertycache
394 400 def _map(self):
395 401 """Return the dirstate contents (see documentation for dirstatemap)."""
396 402 return self._mapcls(
397 403 self._ui,
398 404 self._opener,
399 405 self._root,
400 406 self._nodeconstants,
401 407 self._use_dirstate_v2,
402 408 )
403 409
404 410 @property
405 411 def _sparsematcher(self):
406 412 """The matcher for the sparse checkout.
407 413
408 414 The working directory may not include every file from a manifest. The
409 415 matcher obtained by this property will match a path if it is to be
410 416 included in the working directory.
411 417
412 418 When sparse if disabled, return None.
413 419 """
414 420 if self._sparsematchfn is None:
415 421 return None
416 422 # TODO there is potential to cache this property. For now, the matcher
417 423 # is resolved on every access. (But the called function does use a
418 424 # cache to keep the lookup fast.)
419 425 return self._sparsematchfn()
420 426
421 427 @repocache(b'branch')
422 428 def _branch(self):
423 429 f = None
424 430 data = b''
425 431 try:
426 432 f, mode = txnutil.trypending(self._root, self._opener, b'branch')
427 433 data = f.read().strip()
428 434 except FileNotFoundError:
429 435 pass
430 436 finally:
431 437 if f is not None:
432 438 f.close()
433 439 if not data:
434 440 return b"default"
435 441 return data
436 442
437 443 @property
438 444 def _pl(self):
439 445 return self._map.parents()
440 446
441 447 def hasdir(self, d):
442 448 return self._map.hastrackeddir(d)
443 449
444 450 @rootcache(b'.hgignore')
445 451 def _ignore(self):
446 452 files = self._ignorefiles()
447 453 if not files:
448 454 return matchmod.never()
449 455
450 456 pats = [b'include:%s' % f for f in files]
451 457 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
452 458
453 459 @propertycache
454 460 def _slash(self):
455 461 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
456 462
457 463 @propertycache
458 464 def _checklink(self):
459 465 return util.checklink(self._root)
460 466
461 467 @propertycache
462 468 def _checkexec(self):
463 469 return bool(util.checkexec(self._root))
464 470
465 471 @propertycache
466 472 def _checkcase(self):
467 473 return not util.fscasesensitive(self._join(b'.hg'))
468 474
469 475 def _join(self, f):
470 476 # much faster than os.path.join()
471 477 # it's safe because f is always a relative path
472 478 return self._rootdir + f
473 479
474 480 def flagfunc(self, buildfallback):
475 481 """build a callable that returns flags associated with a filename
476 482
477 483 The information is extracted from three possible layers:
478 484 1. the file system if it supports the information
479 485 2. the "fallback" information stored in the dirstate if any
480 486 3. a more expensive mechanism inferring the flags from the parents.
481 487 """
482 488
483 489 # small hack to cache the result of buildfallback()
484 490 fallback_func = []
485 491
486 492 def get_flags(x):
487 493 entry = None
488 494 fallback_value = None
489 495 try:
490 496 st = os.lstat(self._join(x))
491 497 except OSError:
492 498 return b''
493 499
494 500 if self._checklink:
495 501 if util.statislink(st):
496 502 return b'l'
497 503 else:
498 504 entry = self.get_entry(x)
499 505 if entry.has_fallback_symlink:
500 506 if entry.fallback_symlink:
501 507 return b'l'
502 508 else:
503 509 if not fallback_func:
504 510 fallback_func.append(buildfallback())
505 511 fallback_value = fallback_func[0](x)
506 512 if b'l' in fallback_value:
507 513 return b'l'
508 514
509 515 if self._checkexec:
510 516 if util.statisexec(st):
511 517 return b'x'
512 518 else:
513 519 if entry is None:
514 520 entry = self.get_entry(x)
515 521 if entry.has_fallback_exec:
516 522 if entry.fallback_exec:
517 523 return b'x'
518 524 else:
519 525 if fallback_value is None:
520 526 if not fallback_func:
521 527 fallback_func.append(buildfallback())
522 528 fallback_value = fallback_func[0](x)
523 529 if b'x' in fallback_value:
524 530 return b'x'
525 531 return b''
526 532
527 533 return get_flags
528 534
529 535 @propertycache
530 536 def _cwd(self):
531 537 # internal config: ui.forcecwd
532 538 forcecwd = self._ui.config(b'ui', b'forcecwd')
533 539 if forcecwd:
534 540 return forcecwd
535 541 return encoding.getcwd()
536 542
537 543 def getcwd(self):
538 544 """Return the path from which a canonical path is calculated.
539 545
540 546 This path should be used to resolve file patterns or to convert
541 547 canonical paths back to file paths for display. It shouldn't be
542 548 used to get real file paths. Use vfs functions instead.
543 549 """
544 550 cwd = self._cwd
545 551 if cwd == self._root:
546 552 return b''
547 553 # self._root ends with a path separator if self._root is '/' or 'C:\'
548 554 rootsep = self._root
549 555 if not util.endswithsep(rootsep):
550 556 rootsep += pycompat.ossep
551 557 if cwd.startswith(rootsep):
552 558 return cwd[len(rootsep) :]
553 559 else:
554 560 # we're outside the repo. return an absolute path.
555 561 return cwd
556 562
557 563 def pathto(self, f, cwd=None):
558 564 if cwd is None:
559 565 cwd = self.getcwd()
560 566 path = util.pathto(self._root, cwd, f)
561 567 if self._slash:
562 568 return util.pconvert(path)
563 569 return path
564 570
565 571 def get_entry(self, path):
566 572 """return a DirstateItem for the associated path"""
567 573 entry = self._map.get(path)
568 574 if entry is None:
569 575 return DirstateItem()
570 576 return entry
571 577
572 578 def __contains__(self, key):
573 579 return key in self._map
574 580
575 581 def __iter__(self):
576 582 return iter(sorted(self._map))
577 583
578 584 def items(self):
579 585 return self._map.items()
580 586
581 587 iteritems = items
582 588
583 589 def parents(self):
584 590 return [self._validate(p) for p in self._pl]
585 591
586 592 def p1(self):
587 593 return self._validate(self._pl[0])
588 594
589 595 def p2(self):
590 596 return self._validate(self._pl[1])
591 597
592 598 @property
593 599 def in_merge(self):
594 600 """True if a merge is in progress"""
595 601 return self._pl[1] != self._nodeconstants.nullid
596 602
597 603 def branch(self):
598 604 return encoding.tolocal(self._branch)
599 605
600 606 @requires_changing_parents
601 607 def setparents(self, p1, p2=None):
602 608 """Set dirstate parents to p1 and p2.
603 609
604 610 When moving from two parents to one, "merged" entries a
605 611 adjusted to normal and previous copy records discarded and
606 612 returned by the call.
607 613
608 614 See localrepo.setparents()
609 615 """
610 616 if p2 is None:
611 617 p2 = self._nodeconstants.nullid
612 618 if self._changing_level == 0:
613 619 raise ValueError(
614 620 b"cannot set dirstate parent outside of "
615 621 b"dirstate.changing_parents context manager"
616 622 )
617 623
618 624 self._dirty = True
619 625 oldp2 = self._pl[1]
620 626 if self._origpl is None:
621 627 self._origpl = self._pl
622 628 nullid = self._nodeconstants.nullid
623 629 # True if we need to fold p2 related state back to a linear case
624 630 fold_p2 = oldp2 != nullid and p2 == nullid
625 631 return self._map.setparents(p1, p2, fold_p2=fold_p2)
626 632
627 633 def setbranch(self, branch, transaction=SENTINEL):
628 634 self.__class__._branch.set(self, encoding.fromlocal(branch))
629 635 if transaction is SENTINEL:
630 636 msg = b"setbranch needs a `transaction` argument"
631 637 self._ui.deprecwarn(msg, b'6.5')
632 638 transaction = None
633 639 if transaction is not None:
634 640 self._setup_tr_abort(transaction)
635 641 transaction.addfilegenerator(
636 642 b'dirstate-3-branch%s' % self._tr_key_suffix,
637 643 (b'branch',),
638 644 self._write_branch,
639 645 location=b'plain',
640 646 post_finalize=True,
641 647 )
642 648 return
643 649
644 650 vfs = self._opener
645 651 with vfs(b'branch', b'w', atomictemp=True, checkambig=True) as f:
646 652 self._write_branch(f)
647 653 # make sure filecache has the correct stat info for _branch after
648 654 # replacing the underlying file
649 655 #
650 656 # XXX do we actually need this,
651 657 # refreshing the attribute is quite cheap
652 658 ce = self._filecache[b'_branch']
653 659 if ce:
654 660 ce.refresh()
655 661
656 662 def _write_branch(self, file_obj):
657 663 file_obj.write(self._branch + b'\n')
658 664
659 665 def invalidate(self):
660 666 """Causes the next access to reread the dirstate.
661 667
662 668 This is different from localrepo.invalidatedirstate() because it always
663 669 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
664 670 check whether the dirstate has changed before rereading it."""
665 671
666 672 for a in ("_map", "_branch", "_ignore"):
667 673 if a in self.__dict__:
668 674 delattr(self, a)
669 675 self._dirty = False
670 676 self._dirty_tracked_set = False
671 677 self._invalidated_context = bool(
672 678 self._changing_level > 0
673 679 or self._attached_to_a_transaction
674 680 or self._running_status
675 681 )
676 682 self._origpl = None
677 683
678 684 @requires_changing_any
679 685 def copy(self, source, dest):
680 686 """Mark dest as a copy of source. Unmark dest if source is None."""
681 687 if source == dest:
682 688 return
683 689 self._dirty = True
684 690 if source is not None:
685 691 self._check_sparse(source)
686 692 self._map.copymap[dest] = source
687 693 else:
688 694 self._map.copymap.pop(dest, None)
689 695
690 696 def copied(self, file):
691 697 return self._map.copymap.get(file, None)
692 698
693 699 def copies(self):
694 700 return self._map.copymap
695 701
696 702 @requires_changing_files
697 703 def set_tracked(self, filename, reset_copy=False):
698 704 """a "public" method for generic code to mark a file as tracked
699 705
700 706 This function is to be called outside of "update/merge" case. For
701 707 example by a command like `hg add X`.
702 708
703 709 if reset_copy is set, any existing copy information will be dropped.
704 710
705 711 return True the file was previously untracked, False otherwise.
706 712 """
707 713 self._dirty = True
708 714 entry = self._map.get(filename)
709 715 if entry is None or not entry.tracked:
710 716 self._check_new_tracked_filename(filename)
711 717 pre_tracked = self._map.set_tracked(filename)
712 718 if reset_copy:
713 719 self._map.copymap.pop(filename, None)
714 720 if pre_tracked:
715 721 self._dirty_tracked_set = True
716 722 return pre_tracked
717 723
718 724 @requires_changing_files
719 725 def set_untracked(self, filename):
720 726 """a "public" method for generic code to mark a file as untracked
721 727
722 728 This function is to be called outside of "update/merge" case. For
723 729 example by a command like `hg remove X`.
724 730
725 731 return True the file was previously tracked, False otherwise.
726 732 """
727 733 ret = self._map.set_untracked(filename)
728 734 if ret:
729 735 self._dirty = True
730 736 self._dirty_tracked_set = True
731 737 return ret
732 738
733 739 @requires_changing_files_or_status
734 740 def set_clean(self, filename, parentfiledata):
735 741 """record that the current state of the file on disk is known to be clean"""
736 742 self._dirty = True
737 743 if not self._map[filename].tracked:
738 744 self._check_new_tracked_filename(filename)
739 745 (mode, size, mtime) = parentfiledata
740 746 self._map.set_clean(filename, mode, size, mtime)
741 747
742 748 @requires_changing_files_or_status
743 749 def set_possibly_dirty(self, filename):
744 750 """record that the current state of the file on disk is unknown"""
745 751 self._dirty = True
746 752 self._map.set_possibly_dirty(filename)
747 753
748 754 @requires_changing_parents
749 755 def update_file_p1(
750 756 self,
751 757 filename,
752 758 p1_tracked,
753 759 ):
754 760 """Set a file as tracked in the parent (or not)
755 761
756 762 This is to be called when adjust the dirstate to a new parent after an history
757 763 rewriting operation.
758 764
759 765 It should not be called during a merge (p2 != nullid) and only within
760 766 a `with dirstate.changing_parents(repo):` context.
761 767 """
762 768 if self.in_merge:
763 769 msg = b'update_file_reference should not be called when merging'
764 770 raise error.ProgrammingError(msg)
765 771 entry = self._map.get(filename)
766 772 if entry is None:
767 773 wc_tracked = False
768 774 else:
769 775 wc_tracked = entry.tracked
770 776 if not (p1_tracked or wc_tracked):
771 777 # the file is no longer relevant to anyone
772 778 if self._map.get(filename) is not None:
773 779 self._map.reset_state(filename)
774 780 self._dirty = True
775 781 elif (not p1_tracked) and wc_tracked:
776 782 if entry is not None and entry.added:
777 783 return # avoid dropping copy information (maybe?)
778 784
779 785 self._map.reset_state(
780 786 filename,
781 787 wc_tracked,
782 788 p1_tracked,
783 789 # the underlying reference might have changed, we will have to
784 790 # check it.
785 791 has_meaningful_mtime=False,
786 792 )
787 793
788 794 @requires_changing_parents
789 795 def update_file(
790 796 self,
791 797 filename,
792 798 wc_tracked,
793 799 p1_tracked,
794 800 p2_info=False,
795 801 possibly_dirty=False,
796 802 parentfiledata=None,
797 803 ):
798 804 """update the information about a file in the dirstate
799 805
800 806 This is to be called when the direstates parent changes to keep track
801 807 of what is the file situation in regards to the working copy and its parent.
802 808
803 809 This function must be called within a `dirstate.changing_parents` context.
804 810
805 811 note: the API is at an early stage and we might need to adjust it
806 812 depending of what information ends up being relevant and useful to
807 813 other processing.
808 814 """
809 815 self._update_file(
810 816 filename=filename,
811 817 wc_tracked=wc_tracked,
812 818 p1_tracked=p1_tracked,
813 819 p2_info=p2_info,
814 820 possibly_dirty=possibly_dirty,
815 821 parentfiledata=parentfiledata,
816 822 )
817 823
818 824 def hacky_extension_update_file(self, *args, **kwargs):
819 825 """NEVER USE THIS, YOU DO NOT NEED IT
820 826
821 827 This function is a variant of "update_file" to be called by a small set
822 828 of extensions, it also adjust the internal state of file, but can be
823 829 called outside an `changing_parents` context.
824 830
825 831 A very small number of extension meddle with the working copy content
826 832 in a way that requires to adjust the dirstate accordingly. At the time
827 833 this command is written they are :
828 834 - keyword,
829 835 - largefile,
830 836 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
831 837
832 838 This function could probably be replaced by more semantic one (like
833 839 "adjust expected size" or "always revalidate file content", etc)
834 840 however at the time where this is writen, this is too much of a detour
835 841 to be considered.
836 842 """
837 843 if not (self._changing_level > 0 or self._running_status > 0):
838 844 msg = "requires a changes context"
839 845 raise error.ProgrammingError(msg)
840 846 self._update_file(
841 847 *args,
842 848 **kwargs,
843 849 )
844 850
845 851 def _update_file(
846 852 self,
847 853 filename,
848 854 wc_tracked,
849 855 p1_tracked,
850 856 p2_info=False,
851 857 possibly_dirty=False,
852 858 parentfiledata=None,
853 859 ):
854 860
855 861 # note: I do not think we need to double check name clash here since we
856 862 # are in a update/merge case that should already have taken care of
857 863 # this. The test agrees
858 864
859 865 self._dirty = True
860 866 old_entry = self._map.get(filename)
861 867 if old_entry is None:
862 868 prev_tracked = False
863 869 else:
864 870 prev_tracked = old_entry.tracked
865 871 if prev_tracked != wc_tracked:
866 872 self._dirty_tracked_set = True
867 873
868 874 self._map.reset_state(
869 875 filename,
870 876 wc_tracked,
871 877 p1_tracked,
872 878 p2_info=p2_info,
873 879 has_meaningful_mtime=not possibly_dirty,
874 880 parentfiledata=parentfiledata,
875 881 )
876 882
877 883 def _check_new_tracked_filename(self, filename):
878 884 scmutil.checkfilename(filename)
879 885 if self._map.hastrackeddir(filename):
880 886 msg = _(b'directory %r already in dirstate')
881 887 msg %= pycompat.bytestr(filename)
882 888 raise error.Abort(msg)
883 889 # shadows
884 890 for d in pathutil.finddirs(filename):
885 891 if self._map.hastrackeddir(d):
886 892 break
887 893 entry = self._map.get(d)
888 894 if entry is not None and not entry.removed:
889 895 msg = _(b'file %r in dirstate clashes with %r')
890 896 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
891 897 raise error.Abort(msg)
892 898 self._check_sparse(filename)
893 899
894 900 def _check_sparse(self, filename):
895 901 """Check that a filename is inside the sparse profile"""
896 902 sparsematch = self._sparsematcher
897 903 if sparsematch is not None and not sparsematch.always():
898 904 if not sparsematch(filename):
899 905 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
900 906 hint = _(
901 907 b'include file with `hg debugsparse --include <pattern>` or use '
902 908 b'`hg add -s <file>` to include file directory while adding'
903 909 )
904 910 raise error.Abort(msg % filename, hint=hint)
905 911
906 912 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
907 913 if exists is None:
908 914 exists = os.path.lexists(os.path.join(self._root, path))
909 915 if not exists:
910 916 # Maybe a path component exists
911 917 if not ignoremissing and b'/' in path:
912 918 d, f = path.rsplit(b'/', 1)
913 919 d = self._normalize(d, False, ignoremissing, None)
914 920 folded = d + b"/" + f
915 921 else:
916 922 # No path components, preserve original case
917 923 folded = path
918 924 else:
919 925 # recursively normalize leading directory components
920 926 # against dirstate
921 927 if b'/' in normed:
922 928 d, f = normed.rsplit(b'/', 1)
923 929 d = self._normalize(d, False, ignoremissing, True)
924 930 r = self._root + b"/" + d
925 931 folded = d + b"/" + util.fspath(f, r)
926 932 else:
927 933 folded = util.fspath(normed, self._root)
928 934 storemap[normed] = folded
929 935
930 936 return folded
931 937
932 938 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
933 939 normed = util.normcase(path)
934 940 folded = self._map.filefoldmap.get(normed, None)
935 941 if folded is None:
936 942 if isknown:
937 943 folded = path
938 944 else:
939 945 folded = self._discoverpath(
940 946 path, normed, ignoremissing, exists, self._map.filefoldmap
941 947 )
942 948 return folded
943 949
944 950 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
945 951 normed = util.normcase(path)
946 952 folded = self._map.filefoldmap.get(normed, None)
947 953 if folded is None:
948 954 folded = self._map.dirfoldmap.get(normed, None)
949 955 if folded is None:
950 956 if isknown:
951 957 folded = path
952 958 else:
953 959 # store discovered result in dirfoldmap so that future
954 960 # normalizefile calls don't start matching directories
955 961 folded = self._discoverpath(
956 962 path, normed, ignoremissing, exists, self._map.dirfoldmap
957 963 )
958 964 return folded
959 965
960 966 def normalize(self, path, isknown=False, ignoremissing=False):
961 967 """
962 968 normalize the case of a pathname when on a casefolding filesystem
963 969
964 970 isknown specifies whether the filename came from walking the
965 971 disk, to avoid extra filesystem access.
966 972
967 973 If ignoremissing is True, missing path are returned
968 974 unchanged. Otherwise, we try harder to normalize possibly
969 975 existing path components.
970 976
971 977 The normalized case is determined based on the following precedence:
972 978
973 979 - version of name already stored in the dirstate
974 980 - version of name stored on disk
975 981 - version provided via command arguments
976 982 """
977 983
978 984 if self._checkcase:
979 985 return self._normalize(path, isknown, ignoremissing)
980 986 return path
981 987
982 988 # XXX this method is barely used, as a result:
983 989 # - its semantic is unclear
984 990 # - do we really needs it ?
985 991 @requires_changing_parents
986 992 def clear(self):
987 993 self._map.clear()
988 994 self._dirty = True
989 995
990 996 @requires_changing_parents
991 997 def rebuild(self, parent, allfiles, changedfiles=None):
992 998 matcher = self._sparsematcher
993 999 if matcher is not None and not matcher.always():
994 1000 # should not add non-matching files
995 1001 allfiles = [f for f in allfiles if matcher(f)]
996 1002 if changedfiles:
997 1003 changedfiles = [f for f in changedfiles if matcher(f)]
998 1004
999 1005 if changedfiles is not None:
1000 1006 # these files will be deleted from the dirstate when they are
1001 1007 # not found to be in allfiles
1002 1008 dirstatefilestoremove = {f for f in self if not matcher(f)}
1003 1009 changedfiles = dirstatefilestoremove.union(changedfiles)
1004 1010
1005 1011 if changedfiles is None:
1006 1012 # Rebuild entire dirstate
1007 1013 to_lookup = allfiles
1008 1014 to_drop = []
1009 1015 self.clear()
1010 1016 elif len(changedfiles) < 10:
1011 1017 # Avoid turning allfiles into a set, which can be expensive if it's
1012 1018 # large.
1013 1019 to_lookup = []
1014 1020 to_drop = []
1015 1021 for f in changedfiles:
1016 1022 if f in allfiles:
1017 1023 to_lookup.append(f)
1018 1024 else:
1019 1025 to_drop.append(f)
1020 1026 else:
1021 1027 changedfilesset = set(changedfiles)
1022 1028 to_lookup = changedfilesset & set(allfiles)
1023 1029 to_drop = changedfilesset - to_lookup
1024 1030
1025 1031 if self._origpl is None:
1026 1032 self._origpl = self._pl
1027 1033 self._map.setparents(parent, self._nodeconstants.nullid)
1028 1034
1029 1035 for f in to_lookup:
1030 1036 if self.in_merge:
1031 1037 self.set_tracked(f)
1032 1038 else:
1033 1039 self._map.reset_state(
1034 1040 f,
1035 1041 wc_tracked=True,
1036 1042 p1_tracked=True,
1037 1043 )
1038 1044 for f in to_drop:
1039 1045 self._map.reset_state(f)
1040 1046
1041 1047 self._dirty = True
1042 1048
1043 1049 def _setup_tr_abort(self, tr):
1044 1050 """make sure we invalidate the current change on abort"""
1045 1051 if tr is None:
1046 1052 return
1047 1053
1048 1054 def on_abort(tr):
1049 1055 self._attached_to_a_transaction = False
1050 1056 self.invalidate()
1051 1057
1052 1058 tr.addabort(
1053 1059 b'dirstate-invalidate%s' % self._tr_key_suffix,
1054 1060 on_abort,
1055 1061 )
1056 1062
1057 1063 def write(self, tr):
1058 1064 if not self._dirty:
1059 1065 return
1060 1066 # make sure we don't request a write of invalidated content
1061 1067 # XXX move before the dirty check once `unlock` stop calling `write`
1062 1068 assert not self._invalidated_context
1063 1069
1064 1070 write_key = self._use_tracked_hint and self._dirty_tracked_set
1065 1071 if tr:
1066 1072
1067 1073 self._setup_tr_abort(tr)
1068 1074 self._attached_to_a_transaction = True
1069 1075
1070 1076 def on_success(f):
1071 1077 self._attached_to_a_transaction = False
1072 1078 self._writedirstate(tr, f),
1073 1079
1074 1080 # delay writing in-memory changes out
1075 1081 tr.addfilegenerator(
1076 1082 b'dirstate-1-main%s' % self._tr_key_suffix,
1077 1083 (self._filename,),
1078 1084 on_success,
1079 1085 location=b'plain',
1080 1086 post_finalize=True,
1081 1087 )
1082 1088 if write_key:
1083 1089 tr.addfilegenerator(
1084 1090 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1085 1091 (self._filename_th,),
1086 1092 lambda f: self._write_tracked_hint(tr, f),
1087 1093 location=b'plain',
1088 1094 post_finalize=True,
1089 1095 )
1090 1096 return
1091 1097
1092 1098 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1093 1099 with file(self._filename) as f:
1094 1100 self._writedirstate(tr, f)
1095 1101 if write_key:
1096 1102 # we update the key-file after writing to make sure reader have a
1097 1103 # key that match the newly written content
1098 1104 with file(self._filename_th) as f:
1099 1105 self._write_tracked_hint(tr, f)
1100 1106
1101 1107 def delete_tracked_hint(self):
1102 1108 """remove the tracked_hint file
1103 1109
1104 1110 To be used by format downgrades operation"""
1105 1111 self._opener.unlink(self._filename_th)
1106 1112 self._use_tracked_hint = False
1107 1113
1108 1114 def addparentchangecallback(self, category, callback):
1109 1115 """add a callback to be called when the wd parents are changed
1110 1116
1111 1117 Callback will be called with the following arguments:
1112 1118 dirstate, (oldp1, oldp2), (newp1, newp2)
1113 1119
1114 1120 Category is a unique identifier to allow overwriting an old callback
1115 1121 with a newer callback.
1116 1122 """
1117 1123 self._plchangecallbacks[category] = callback
1118 1124
1119 1125 def _writedirstate(self, tr, st):
1120 1126 # make sure we don't write invalidated content
1121 1127 assert not self._invalidated_context
1122 1128 # notify callbacks about parents change
1123 1129 if self._origpl is not None and self._origpl != self._pl:
1124 1130 for c, callback in sorted(self._plchangecallbacks.items()):
1125 1131 callback(self, self._origpl, self._pl)
1126 1132 self._origpl = None
1127 1133 self._map.write(tr, st)
1128 1134 self._dirty = False
1129 1135 self._dirty_tracked_set = False
1130 1136
1131 1137 def _write_tracked_hint(self, tr, f):
1132 1138 key = node.hex(uuid.uuid4().bytes)
1133 1139 f.write(b"1\n%s\n" % key) # 1 is the format version
1134 1140
1135 1141 def _dirignore(self, f):
1136 1142 if self._ignore(f):
1137 1143 return True
1138 1144 for p in pathutil.finddirs(f):
1139 1145 if self._ignore(p):
1140 1146 return True
1141 1147 return False
1142 1148
1143 1149 def _ignorefiles(self):
1144 1150 files = []
1145 1151 if os.path.exists(self._join(b'.hgignore')):
1146 1152 files.append(self._join(b'.hgignore'))
1147 1153 for name, path in self._ui.configitems(b"ui"):
1148 1154 if name == b'ignore' or name.startswith(b'ignore.'):
1149 1155 # we need to use os.path.join here rather than self._join
1150 1156 # because path is arbitrary and user-specified
1151 1157 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1152 1158 return files
1153 1159
1154 1160 def _ignorefileandline(self, f):
1155 1161 files = collections.deque(self._ignorefiles())
1156 1162 visited = set()
1157 1163 while files:
1158 1164 i = files.popleft()
1159 1165 patterns = matchmod.readpatternfile(
1160 1166 i, self._ui.warn, sourceinfo=True
1161 1167 )
1162 1168 for pattern, lineno, line in patterns:
1163 1169 kind, p = matchmod._patsplit(pattern, b'glob')
1164 1170 if kind == b"subinclude":
1165 1171 if p not in visited:
1166 1172 files.append(p)
1167 1173 continue
1168 1174 m = matchmod.match(
1169 1175 self._root, b'', [], [pattern], warn=self._ui.warn
1170 1176 )
1171 1177 if m(f):
1172 1178 return (i, lineno, line)
1173 1179 visited.add(i)
1174 1180 return (None, -1, b"")
1175 1181
1176 1182 def _walkexplicit(self, match, subrepos):
1177 1183 """Get stat data about the files explicitly specified by match.
1178 1184
1179 1185 Return a triple (results, dirsfound, dirsnotfound).
1180 1186 - results is a mapping from filename to stat result. It also contains
1181 1187 listings mapping subrepos and .hg to None.
1182 1188 - dirsfound is a list of files found to be directories.
1183 1189 - dirsnotfound is a list of files that the dirstate thinks are
1184 1190 directories and that were not found."""
1185 1191
1186 1192 def badtype(mode):
1187 1193 kind = _(b'unknown')
1188 1194 if stat.S_ISCHR(mode):
1189 1195 kind = _(b'character device')
1190 1196 elif stat.S_ISBLK(mode):
1191 1197 kind = _(b'block device')
1192 1198 elif stat.S_ISFIFO(mode):
1193 1199 kind = _(b'fifo')
1194 1200 elif stat.S_ISSOCK(mode):
1195 1201 kind = _(b'socket')
1196 1202 elif stat.S_ISDIR(mode):
1197 1203 kind = _(b'directory')
1198 1204 return _(b'unsupported file type (type is %s)') % kind
1199 1205
1200 1206 badfn = match.bad
1201 1207 dmap = self._map
1202 1208 lstat = os.lstat
1203 1209 getkind = stat.S_IFMT
1204 1210 dirkind = stat.S_IFDIR
1205 1211 regkind = stat.S_IFREG
1206 1212 lnkkind = stat.S_IFLNK
1207 1213 join = self._join
1208 1214 dirsfound = []
1209 1215 foundadd = dirsfound.append
1210 1216 dirsnotfound = []
1211 1217 notfoundadd = dirsnotfound.append
1212 1218
1213 1219 if not match.isexact() and self._checkcase:
1214 1220 normalize = self._normalize
1215 1221 else:
1216 1222 normalize = None
1217 1223
1218 1224 files = sorted(match.files())
1219 1225 subrepos.sort()
1220 1226 i, j = 0, 0
1221 1227 while i < len(files) and j < len(subrepos):
1222 1228 subpath = subrepos[j] + b"/"
1223 1229 if files[i] < subpath:
1224 1230 i += 1
1225 1231 continue
1226 1232 while i < len(files) and files[i].startswith(subpath):
1227 1233 del files[i]
1228 1234 j += 1
1229 1235
1230 1236 if not files or b'' in files:
1231 1237 files = [b'']
1232 1238 # constructing the foldmap is expensive, so don't do it for the
1233 1239 # common case where files is ['']
1234 1240 normalize = None
1235 1241 results = dict.fromkeys(subrepos)
1236 1242 results[b'.hg'] = None
1237 1243
1238 1244 for ff in files:
1239 1245 if normalize:
1240 1246 nf = normalize(ff, False, True)
1241 1247 else:
1242 1248 nf = ff
1243 1249 if nf in results:
1244 1250 continue
1245 1251
1246 1252 try:
1247 1253 st = lstat(join(nf))
1248 1254 kind = getkind(st.st_mode)
1249 1255 if kind == dirkind:
1250 1256 if nf in dmap:
1251 1257 # file replaced by dir on disk but still in dirstate
1252 1258 results[nf] = None
1253 1259 foundadd((nf, ff))
1254 1260 elif kind == regkind or kind == lnkkind:
1255 1261 results[nf] = st
1256 1262 else:
1257 1263 badfn(ff, badtype(kind))
1258 1264 if nf in dmap:
1259 1265 results[nf] = None
1260 1266 except (OSError) as inst:
1261 1267 # nf not found on disk - it is dirstate only
1262 1268 if nf in dmap: # does it exactly match a missing file?
1263 1269 results[nf] = None
1264 1270 else: # does it match a missing directory?
1265 1271 if self._map.hasdir(nf):
1266 1272 notfoundadd(nf)
1267 1273 else:
1268 1274 badfn(ff, encoding.strtolocal(inst.strerror))
1269 1275
1270 1276 # match.files() may contain explicitly-specified paths that shouldn't
1271 1277 # be taken; drop them from the list of files found. dirsfound/notfound
1272 1278 # aren't filtered here because they will be tested later.
1273 1279 if match.anypats():
1274 1280 for f in list(results):
1275 1281 if f == b'.hg' or f in subrepos:
1276 1282 # keep sentinel to disable further out-of-repo walks
1277 1283 continue
1278 1284 if not match(f):
1279 1285 del results[f]
1280 1286
1281 1287 # Case insensitive filesystems cannot rely on lstat() failing to detect
1282 1288 # a case-only rename. Prune the stat object for any file that does not
1283 1289 # match the case in the filesystem, if there are multiple files that
1284 1290 # normalize to the same path.
1285 1291 if match.isexact() and self._checkcase:
1286 1292 normed = {}
1287 1293
1288 1294 for f, st in results.items():
1289 1295 if st is None:
1290 1296 continue
1291 1297
1292 1298 nc = util.normcase(f)
1293 1299 paths = normed.get(nc)
1294 1300
1295 1301 if paths is None:
1296 1302 paths = set()
1297 1303 normed[nc] = paths
1298 1304
1299 1305 paths.add(f)
1300 1306
1301 1307 for norm, paths in normed.items():
1302 1308 if len(paths) > 1:
1303 1309 for path in paths:
1304 1310 folded = self._discoverpath(
1305 1311 path, norm, True, None, self._map.dirfoldmap
1306 1312 )
1307 1313 if path != folded:
1308 1314 results[path] = None
1309 1315
1310 1316 return results, dirsfound, dirsnotfound
1311 1317
1312 1318 def walk(self, match, subrepos, unknown, ignored, full=True):
1313 1319 """
1314 1320 Walk recursively through the directory tree, finding all files
1315 1321 matched by match.
1316 1322
1317 1323 If full is False, maybe skip some known-clean files.
1318 1324
1319 1325 Return a dict mapping filename to stat-like object (either
1320 1326 mercurial.osutil.stat instance or return value of os.stat()).
1321 1327
1322 1328 """
1323 1329 # full is a flag that extensions that hook into walk can use -- this
1324 1330 # implementation doesn't use it at all. This satisfies the contract
1325 1331 # because we only guarantee a "maybe".
1326 1332
1327 1333 if ignored:
1328 1334 ignore = util.never
1329 1335 dirignore = util.never
1330 1336 elif unknown:
1331 1337 ignore = self._ignore
1332 1338 dirignore = self._dirignore
1333 1339 else:
1334 1340 # if not unknown and not ignored, drop dir recursion and step 2
1335 1341 ignore = util.always
1336 1342 dirignore = util.always
1337 1343
1338 1344 if self._sparsematchfn is not None:
1339 1345 em = matchmod.exact(match.files())
1340 1346 sm = matchmod.unionmatcher([self._sparsematcher, em])
1341 1347 match = matchmod.intersectmatchers(match, sm)
1342 1348
1343 1349 matchfn = match.matchfn
1344 1350 matchalways = match.always()
1345 1351 matchtdir = match.traversedir
1346 1352 dmap = self._map
1347 1353 listdir = util.listdir
1348 1354 lstat = os.lstat
1349 1355 dirkind = stat.S_IFDIR
1350 1356 regkind = stat.S_IFREG
1351 1357 lnkkind = stat.S_IFLNK
1352 1358 join = self._join
1353 1359
1354 1360 exact = skipstep3 = False
1355 1361 if match.isexact(): # match.exact
1356 1362 exact = True
1357 1363 dirignore = util.always # skip step 2
1358 1364 elif match.prefix(): # match.match, no patterns
1359 1365 skipstep3 = True
1360 1366
1361 1367 if not exact and self._checkcase:
1362 1368 normalize = self._normalize
1363 1369 normalizefile = self._normalizefile
1364 1370 skipstep3 = False
1365 1371 else:
1366 1372 normalize = self._normalize
1367 1373 normalizefile = None
1368 1374
1369 1375 # step 1: find all explicit files
1370 1376 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1371 1377 if matchtdir:
1372 1378 for d in work:
1373 1379 matchtdir(d[0])
1374 1380 for d in dirsnotfound:
1375 1381 matchtdir(d)
1376 1382
1377 1383 skipstep3 = skipstep3 and not (work or dirsnotfound)
1378 1384 work = [d for d in work if not dirignore(d[0])]
1379 1385
1380 1386 # step 2: visit subdirectories
1381 1387 def traverse(work, alreadynormed):
1382 1388 wadd = work.append
1383 1389 while work:
1384 1390 tracing.counter('dirstate.walk work', len(work))
1385 1391 nd = work.pop()
1386 1392 visitentries = match.visitchildrenset(nd)
1387 1393 if not visitentries:
1388 1394 continue
1389 1395 if visitentries == b'this' or visitentries == b'all':
1390 1396 visitentries = None
1391 1397 skip = None
1392 1398 if nd != b'':
1393 1399 skip = b'.hg'
1394 1400 try:
1395 1401 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1396 1402 entries = listdir(join(nd), stat=True, skip=skip)
1397 1403 except (PermissionError, FileNotFoundError) as inst:
1398 1404 match.bad(
1399 1405 self.pathto(nd), encoding.strtolocal(inst.strerror)
1400 1406 )
1401 1407 continue
1402 1408 for f, kind, st in entries:
1403 1409 # Some matchers may return files in the visitentries set,
1404 1410 # instead of 'this', if the matcher explicitly mentions them
1405 1411 # and is not an exactmatcher. This is acceptable; we do not
1406 1412 # make any hard assumptions about file-or-directory below
1407 1413 # based on the presence of `f` in visitentries. If
1408 1414 # visitchildrenset returned a set, we can always skip the
1409 1415 # entries *not* in the set it provided regardless of whether
1410 1416 # they're actually a file or a directory.
1411 1417 if visitentries and f not in visitentries:
1412 1418 continue
1413 1419 if normalizefile:
1414 1420 # even though f might be a directory, we're only
1415 1421 # interested in comparing it to files currently in the
1416 1422 # dmap -- therefore normalizefile is enough
1417 1423 nf = normalizefile(
1418 1424 nd and (nd + b"/" + f) or f, True, True
1419 1425 )
1420 1426 else:
1421 1427 nf = nd and (nd + b"/" + f) or f
1422 1428 if nf not in results:
1423 1429 if kind == dirkind:
1424 1430 if not ignore(nf):
1425 1431 if matchtdir:
1426 1432 matchtdir(nf)
1427 1433 wadd(nf)
1428 1434 if nf in dmap and (matchalways or matchfn(nf)):
1429 1435 results[nf] = None
1430 1436 elif kind == regkind or kind == lnkkind:
1431 1437 if nf in dmap:
1432 1438 if matchalways or matchfn(nf):
1433 1439 results[nf] = st
1434 1440 elif (matchalways or matchfn(nf)) and not ignore(
1435 1441 nf
1436 1442 ):
1437 1443 # unknown file -- normalize if necessary
1438 1444 if not alreadynormed:
1439 1445 nf = normalize(nf, False, True)
1440 1446 results[nf] = st
1441 1447 elif nf in dmap and (matchalways or matchfn(nf)):
1442 1448 results[nf] = None
1443 1449
1444 1450 for nd, d in work:
1445 1451 # alreadynormed means that processwork doesn't have to do any
1446 1452 # expensive directory normalization
1447 1453 alreadynormed = not normalize or nd == d
1448 1454 traverse([d], alreadynormed)
1449 1455
1450 1456 for s in subrepos:
1451 1457 del results[s]
1452 1458 del results[b'.hg']
1453 1459
1454 1460 # step 3: visit remaining files from dmap
1455 1461 if not skipstep3 and not exact:
1456 1462 # If a dmap file is not in results yet, it was either
1457 1463 # a) not matching matchfn b) ignored, c) missing, or d) under a
1458 1464 # symlink directory.
1459 1465 if not results and matchalways:
1460 1466 visit = [f for f in dmap]
1461 1467 else:
1462 1468 visit = [f for f in dmap if f not in results and matchfn(f)]
1463 1469 visit.sort()
1464 1470
1465 1471 if unknown:
1466 1472 # unknown == True means we walked all dirs under the roots
1467 1473 # that wasn't ignored, and everything that matched was stat'ed
1468 1474 # and is already in results.
1469 1475 # The rest must thus be ignored or under a symlink.
1470 1476 audit_path = pathutil.pathauditor(self._root, cached=True)
1471 1477
1472 1478 for nf in iter(visit):
1473 1479 # If a stat for the same file was already added with a
1474 1480 # different case, don't add one for this, since that would
1475 1481 # make it appear as if the file exists under both names
1476 1482 # on disk.
1477 1483 if (
1478 1484 normalizefile
1479 1485 and normalizefile(nf, True, True) in results
1480 1486 ):
1481 1487 results[nf] = None
1482 1488 # Report ignored items in the dmap as long as they are not
1483 1489 # under a symlink directory.
1484 1490 elif audit_path.check(nf):
1485 1491 try:
1486 1492 results[nf] = lstat(join(nf))
1487 1493 # file was just ignored, no links, and exists
1488 1494 except OSError:
1489 1495 # file doesn't exist
1490 1496 results[nf] = None
1491 1497 else:
1492 1498 # It's either missing or under a symlink directory
1493 1499 # which we in this case report as missing
1494 1500 results[nf] = None
1495 1501 else:
1496 1502 # We may not have walked the full directory tree above,
1497 1503 # so stat and check everything we missed.
1498 1504 iv = iter(visit)
1499 1505 for st in util.statfiles([join(i) for i in visit]):
1500 1506 results[next(iv)] = st
1501 1507 return results
1502 1508
1503 1509 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1504 1510 if self._sparsematchfn is not None:
1505 1511 em = matchmod.exact(matcher.files())
1506 1512 sm = matchmod.unionmatcher([self._sparsematcher, em])
1507 1513 matcher = matchmod.intersectmatchers(matcher, sm)
1508 1514 # Force Rayon (Rust parallelism library) to respect the number of
1509 1515 # workers. This is a temporary workaround until Rust code knows
1510 1516 # how to read the config file.
1511 1517 numcpus = self._ui.configint(b"worker", b"numcpus")
1512 1518 if numcpus is not None:
1513 1519 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1514 1520
1515 1521 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1516 1522 if not workers_enabled:
1517 1523 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1518 1524
1519 1525 (
1520 1526 lookup,
1521 1527 modified,
1522 1528 added,
1523 1529 removed,
1524 1530 deleted,
1525 1531 clean,
1526 1532 ignored,
1527 1533 unknown,
1528 1534 warnings,
1529 1535 bad,
1530 1536 traversed,
1531 1537 dirty,
1532 1538 ) = rustmod.status(
1533 1539 self._map._map,
1534 1540 matcher,
1535 1541 self._rootdir,
1536 1542 self._ignorefiles(),
1537 1543 self._checkexec,
1538 1544 bool(list_clean),
1539 1545 bool(list_ignored),
1540 1546 bool(list_unknown),
1541 1547 bool(matcher.traversedir),
1542 1548 )
1543 1549
1544 1550 self._dirty |= dirty
1545 1551
1546 1552 if matcher.traversedir:
1547 1553 for dir in traversed:
1548 1554 matcher.traversedir(dir)
1549 1555
1550 1556 if self._ui.warn:
1551 1557 for item in warnings:
1552 1558 if isinstance(item, tuple):
1553 1559 file_path, syntax = item
1554 1560 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1555 1561 file_path,
1556 1562 syntax,
1557 1563 )
1558 1564 self._ui.warn(msg)
1559 1565 else:
1560 1566 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1561 1567 self._ui.warn(
1562 1568 msg
1563 1569 % (
1564 1570 pathutil.canonpath(
1565 1571 self._rootdir, self._rootdir, item
1566 1572 ),
1567 1573 b"No such file or directory",
1568 1574 )
1569 1575 )
1570 1576
1571 1577 for fn, message in bad:
1572 1578 matcher.bad(fn, encoding.strtolocal(message))
1573 1579
1574 1580 status = scmutil.status(
1575 1581 modified=modified,
1576 1582 added=added,
1577 1583 removed=removed,
1578 1584 deleted=deleted,
1579 1585 unknown=unknown,
1580 1586 ignored=ignored,
1581 1587 clean=clean,
1582 1588 )
1583 1589 return (lookup, status)
1584 1590
1585 1591 def status(self, match, subrepos, ignored, clean, unknown):
1586 1592 """Determine the status of the working copy relative to the
1587 1593 dirstate and return a pair of (unsure, status), where status is of type
1588 1594 scmutil.status and:
1589 1595
1590 1596 unsure:
1591 1597 files that might have been modified since the dirstate was
1592 1598 written, but need to be read to be sure (size is the same
1593 1599 but mtime differs)
1594 1600 status.modified:
1595 1601 files that have definitely been modified since the dirstate
1596 1602 was written (different size or mode)
1597 1603 status.clean:
1598 1604 files that have definitely not been modified since the
1599 1605 dirstate was written
1600 1606 """
1601 1607 if not self._running_status:
1602 1608 msg = "Calling `status` outside a `running_status` context"
1603 1609 raise error.ProgrammingError(msg)
1604 1610 listignored, listclean, listunknown = ignored, clean, unknown
1605 1611 lookup, modified, added, unknown, ignored = [], [], [], [], []
1606 1612 removed, deleted, clean = [], [], []
1607 1613
1608 1614 dmap = self._map
1609 1615 dmap.preload()
1610 1616
1611 1617 use_rust = True
1612 1618
1613 1619 allowed_matchers = (
1614 1620 matchmod.alwaysmatcher,
1615 1621 matchmod.differencematcher,
1616 1622 matchmod.exactmatcher,
1617 1623 matchmod.includematcher,
1618 1624 matchmod.intersectionmatcher,
1619 1625 matchmod.nevermatcher,
1620 1626 matchmod.unionmatcher,
1621 1627 )
1622 1628
1623 1629 if rustmod is None:
1624 1630 use_rust = False
1625 1631 elif self._checkcase:
1626 1632 # Case-insensitive filesystems are not handled yet
1627 1633 use_rust = False
1628 1634 elif subrepos:
1629 1635 use_rust = False
1630 1636 elif not isinstance(match, allowed_matchers):
1631 1637 # Some matchers have yet to be implemented
1632 1638 use_rust = False
1633 1639
1634 1640 # Get the time from the filesystem so we can disambiguate files that
1635 1641 # appear modified in the present or future.
1636 1642 try:
1637 1643 mtime_boundary = timestamp.get_fs_now(self._opener)
1638 1644 except OSError:
1639 1645 # In largefiles or readonly context
1640 1646 mtime_boundary = None
1641 1647
1642 1648 if use_rust:
1643 1649 try:
1644 1650 res = self._rust_status(
1645 1651 match, listclean, listignored, listunknown
1646 1652 )
1647 1653 return res + (mtime_boundary,)
1648 1654 except rustmod.FallbackError:
1649 1655 pass
1650 1656
1651 1657 def noop(f):
1652 1658 pass
1653 1659
1654 1660 dcontains = dmap.__contains__
1655 1661 dget = dmap.__getitem__
1656 1662 ladd = lookup.append # aka "unsure"
1657 1663 madd = modified.append
1658 1664 aadd = added.append
1659 1665 uadd = unknown.append if listunknown else noop
1660 1666 iadd = ignored.append if listignored else noop
1661 1667 radd = removed.append
1662 1668 dadd = deleted.append
1663 1669 cadd = clean.append if listclean else noop
1664 1670 mexact = match.exact
1665 1671 dirignore = self._dirignore
1666 1672 checkexec = self._checkexec
1667 1673 checklink = self._checklink
1668 1674 copymap = self._map.copymap
1669 1675
1670 1676 # We need to do full walks when either
1671 1677 # - we're listing all clean files, or
1672 1678 # - match.traversedir does something, because match.traversedir should
1673 1679 # be called for every dir in the working dir
1674 1680 full = listclean or match.traversedir is not None
1675 1681 for fn, st in self.walk(
1676 1682 match, subrepos, listunknown, listignored, full=full
1677 1683 ).items():
1678 1684 if not dcontains(fn):
1679 1685 if (listignored or mexact(fn)) and dirignore(fn):
1680 1686 if listignored:
1681 1687 iadd(fn)
1682 1688 else:
1683 1689 uadd(fn)
1684 1690 continue
1685 1691
1686 1692 t = dget(fn)
1687 1693 mode = t.mode
1688 1694 size = t.size
1689 1695
1690 1696 if not st and t.tracked:
1691 1697 dadd(fn)
1692 1698 elif t.p2_info:
1693 1699 madd(fn)
1694 1700 elif t.added:
1695 1701 aadd(fn)
1696 1702 elif t.removed:
1697 1703 radd(fn)
1698 1704 elif t.tracked:
1699 1705 if not checklink and t.has_fallback_symlink:
1700 1706 # If the file system does not support symlink, the mode
1701 1707 # might not be correctly stored in the dirstate, so do not
1702 1708 # trust it.
1703 1709 ladd(fn)
1704 1710 elif not checkexec and t.has_fallback_exec:
1705 1711 # If the file system does not support exec bits, the mode
1706 1712 # might not be correctly stored in the dirstate, so do not
1707 1713 # trust it.
1708 1714 ladd(fn)
1709 1715 elif (
1710 1716 size >= 0
1711 1717 and (
1712 1718 (size != st.st_size and size != st.st_size & _rangemask)
1713 1719 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1714 1720 )
1715 1721 or fn in copymap
1716 1722 ):
1717 1723 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1718 1724 # issue6456: Size returned may be longer due to
1719 1725 # encryption on EXT-4 fscrypt, undecided.
1720 1726 ladd(fn)
1721 1727 else:
1722 1728 madd(fn)
1723 1729 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1724 1730 # There might be a change in the future if for example the
1725 1731 # internal clock is off, but this is a case where the issues
1726 1732 # the user would face would be a lot worse and there is
1727 1733 # nothing we can really do.
1728 1734 ladd(fn)
1729 1735 elif listclean:
1730 1736 cadd(fn)
1731 1737 status = scmutil.status(
1732 1738 modified, added, removed, deleted, unknown, ignored, clean
1733 1739 )
1734 1740 return (lookup, status, mtime_boundary)
1735 1741
1736 1742 def matches(self, match):
1737 1743 """
1738 1744 return files in the dirstate (in whatever state) filtered by match
1739 1745 """
1740 1746 dmap = self._map
1741 1747 if rustmod is not None:
1742 1748 dmap = self._map._map
1743 1749
1744 1750 if match.always():
1745 1751 return dmap.keys()
1746 1752 files = match.files()
1747 1753 if match.isexact():
1748 1754 # fast path -- filter the other way around, since typically files is
1749 1755 # much smaller than dmap
1750 1756 return [f for f in files if f in dmap]
1751 1757 if match.prefix() and all(fn in dmap for fn in files):
1752 1758 # fast path -- all the values are known to be files, so just return
1753 1759 # that
1754 1760 return list(files)
1755 1761 return [f for f in dmap if match(f)]
1756 1762
1757 1763 def _actualfilename(self, tr):
1758 1764 if tr:
1759 1765 return self._pendingfilename
1760 1766 else:
1761 1767 return self._filename
1762 1768
1763 1769 def all_file_names(self):
1764 1770 """list all filename currently used by this dirstate
1765 1771
1766 1772 This is only used to do `hg rollback` related backup in the transaction
1767 1773 """
1768 1774 files = [b'branch']
1769 1775 if self._opener.exists(self._filename):
1770 1776 files.append(self._filename)
1771 1777 if self._use_dirstate_v2:
1772 1778 files.append(self._map.docket.data_filename())
1773 1779 return tuple(files)
1774 1780
1775 1781 def verify(self, m1, m2, p1, narrow_matcher=None):
1776 1782 """
1777 1783 check the dirstate contents against the parent manifest and yield errors
1778 1784 """
1779 1785 missing_from_p1 = _(
1780 1786 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1781 1787 )
1782 1788 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1783 1789 missing_from_ps = _(
1784 1790 b"%s marked as modified, but not in either manifest\n"
1785 1791 )
1786 1792 missing_from_ds = _(
1787 1793 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1788 1794 )
1789 1795 for f, entry in self.items():
1790 1796 if entry.p1_tracked:
1791 1797 if entry.modified and f not in m1 and f not in m2:
1792 1798 yield missing_from_ps % f
1793 1799 elif f not in m1:
1794 1800 yield missing_from_p1 % (f, node.short(p1))
1795 1801 if entry.added and f in m1:
1796 1802 yield unexpected_in_p1 % f
1797 1803 for f in m1:
1798 1804 if narrow_matcher is not None and not narrow_matcher(f):
1799 1805 continue
1800 1806 entry = self.get_entry(f)
1801 1807 if not entry.p1_tracked:
1802 1808 yield missing_from_ds % (f, node.short(p1))
@@ -1,3983 +1,3984 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 # coding: utf-8
3 3 #
4 4 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9
10 10 import functools
11 11 import os
12 12 import random
13 13 import re
14 14 import sys
15 15 import time
16 16 import weakref
17 17
18 18 from concurrent import futures
19 19 from typing import (
20 20 Optional,
21 21 )
22 22
23 23 from .i18n import _
24 24 from .node import (
25 25 bin,
26 26 hex,
27 27 nullrev,
28 28 sha1nodeconstants,
29 29 short,
30 30 )
31 31 from .pycompat import (
32 32 delattr,
33 33 getattr,
34 34 )
35 35 from . import (
36 36 bookmarks,
37 37 branchmap,
38 38 bundle2,
39 39 bundlecaches,
40 40 changegroup,
41 41 color,
42 42 commit,
43 43 context,
44 44 dirstate,
45 45 discovery,
46 46 encoding,
47 47 error,
48 48 exchange,
49 49 extensions,
50 50 filelog,
51 51 hook,
52 52 lock as lockmod,
53 53 match as matchmod,
54 54 mergestate as mergestatemod,
55 55 mergeutil,
56 56 namespaces,
57 57 narrowspec,
58 58 obsolete,
59 59 pathutil,
60 60 phases,
61 61 pushkey,
62 62 pycompat,
63 63 rcutil,
64 64 repoview,
65 65 requirements as requirementsmod,
66 66 revlog,
67 67 revset,
68 68 revsetlang,
69 69 scmutil,
70 70 sparse,
71 71 store as storemod,
72 72 subrepoutil,
73 73 tags as tagsmod,
74 74 transaction,
75 75 txnutil,
76 76 util,
77 77 vfs as vfsmod,
78 78 wireprototypes,
79 79 )
80 80
81 81 from .interfaces import (
82 82 repository,
83 83 util as interfaceutil,
84 84 )
85 85
86 86 from .utils import (
87 87 hashutil,
88 88 procutil,
89 89 stringutil,
90 90 urlutil,
91 91 )
92 92
93 93 from .revlogutils import (
94 94 concurrency_checker as revlogchecker,
95 95 constants as revlogconst,
96 96 sidedata as sidedatamod,
97 97 )
98 98
99 99 release = lockmod.release
100 100 urlerr = util.urlerr
101 101 urlreq = util.urlreq
102 102
103 103 RE_SKIP_DIRSTATE_ROLLBACK = re.compile(
104 104 b"^((dirstate|narrowspec.dirstate).*|branch$)"
105 105 )
106 106
107 107 # set of (path, vfs-location) tuples. vfs-location is:
108 108 # - 'plain for vfs relative paths
109 109 # - '' for svfs relative paths
110 110 _cachedfiles = set()
111 111
112 112
113 113 class _basefilecache(scmutil.filecache):
114 114 """All filecache usage on repo are done for logic that should be unfiltered"""
115 115
116 116 def __get__(self, repo, type=None):
117 117 if repo is None:
118 118 return self
119 119 # proxy to unfiltered __dict__ since filtered repo has no entry
120 120 unfi = repo.unfiltered()
121 121 try:
122 122 return unfi.__dict__[self.sname]
123 123 except KeyError:
124 124 pass
125 125 return super(_basefilecache, self).__get__(unfi, type)
126 126
127 127 def set(self, repo, value):
128 128 return super(_basefilecache, self).set(repo.unfiltered(), value)
129 129
130 130
131 131 class repofilecache(_basefilecache):
132 132 """filecache for files in .hg but outside of .hg/store"""
133 133
134 134 def __init__(self, *paths):
135 135 super(repofilecache, self).__init__(*paths)
136 136 for path in paths:
137 137 _cachedfiles.add((path, b'plain'))
138 138
139 139 def join(self, obj, fname):
140 140 return obj.vfs.join(fname)
141 141
142 142
143 143 class storecache(_basefilecache):
144 144 """filecache for files in the store"""
145 145
146 146 def __init__(self, *paths):
147 147 super(storecache, self).__init__(*paths)
148 148 for path in paths:
149 149 _cachedfiles.add((path, b''))
150 150
151 151 def join(self, obj, fname):
152 152 return obj.sjoin(fname)
153 153
154 154
155 155 class changelogcache(storecache):
156 156 """filecache for the changelog"""
157 157
158 158 def __init__(self):
159 159 super(changelogcache, self).__init__()
160 160 _cachedfiles.add((b'00changelog.i', b''))
161 161 _cachedfiles.add((b'00changelog.n', b''))
162 162
163 163 def tracked_paths(self, obj):
164 164 paths = [self.join(obj, b'00changelog.i')]
165 165 if obj.store.opener.options.get(b'persistent-nodemap', False):
166 166 paths.append(self.join(obj, b'00changelog.n'))
167 167 return paths
168 168
169 169
170 170 class manifestlogcache(storecache):
171 171 """filecache for the manifestlog"""
172 172
173 173 def __init__(self):
174 174 super(manifestlogcache, self).__init__()
175 175 _cachedfiles.add((b'00manifest.i', b''))
176 176 _cachedfiles.add((b'00manifest.n', b''))
177 177
178 178 def tracked_paths(self, obj):
179 179 paths = [self.join(obj, b'00manifest.i')]
180 180 if obj.store.opener.options.get(b'persistent-nodemap', False):
181 181 paths.append(self.join(obj, b'00manifest.n'))
182 182 return paths
183 183
184 184
185 185 class mixedrepostorecache(_basefilecache):
186 186 """filecache for a mix files in .hg/store and outside"""
187 187
188 188 def __init__(self, *pathsandlocations):
189 189 # scmutil.filecache only uses the path for passing back into our
190 190 # join(), so we can safely pass a list of paths and locations
191 191 super(mixedrepostorecache, self).__init__(*pathsandlocations)
192 192 _cachedfiles.update(pathsandlocations)
193 193
194 194 def join(self, obj, fnameandlocation):
195 195 fname, location = fnameandlocation
196 196 if location == b'plain':
197 197 return obj.vfs.join(fname)
198 198 else:
199 199 if location != b'':
200 200 raise error.ProgrammingError(
201 201 b'unexpected location: %s' % location
202 202 )
203 203 return obj.sjoin(fname)
204 204
205 205
206 206 def isfilecached(repo, name):
207 207 """check if a repo has already cached "name" filecache-ed property
208 208
209 209 This returns (cachedobj-or-None, iscached) tuple.
210 210 """
211 211 cacheentry = repo.unfiltered()._filecache.get(name, None)
212 212 if not cacheentry:
213 213 return None, False
214 214 return cacheentry.obj, True
215 215
216 216
217 217 class unfilteredpropertycache(util.propertycache):
218 218 """propertycache that apply to unfiltered repo only"""
219 219
220 220 def __get__(self, repo, type=None):
221 221 unfi = repo.unfiltered()
222 222 if unfi is repo:
223 223 return super(unfilteredpropertycache, self).__get__(unfi)
224 224 return getattr(unfi, self.name)
225 225
226 226
227 227 class filteredpropertycache(util.propertycache):
228 228 """propertycache that must take filtering in account"""
229 229
230 230 def cachevalue(self, obj, value):
231 231 object.__setattr__(obj, self.name, value)
232 232
233 233
234 234 def hasunfilteredcache(repo, name):
235 235 """check if a repo has an unfilteredpropertycache value for <name>"""
236 236 return name in vars(repo.unfiltered())
237 237
238 238
239 239 def unfilteredmethod(orig):
240 240 """decorate method that always need to be run on unfiltered version"""
241 241
242 242 @functools.wraps(orig)
243 243 def wrapper(repo, *args, **kwargs):
244 244 return orig(repo.unfiltered(), *args, **kwargs)
245 245
246 246 return wrapper
247 247
248 248
249 249 moderncaps = {
250 250 b'lookup',
251 251 b'branchmap',
252 252 b'pushkey',
253 253 b'known',
254 254 b'getbundle',
255 255 b'unbundle',
256 256 }
257 257 legacycaps = moderncaps.union({b'changegroupsubset'})
258 258
259 259
260 260 @interfaceutil.implementer(repository.ipeercommandexecutor)
261 261 class localcommandexecutor:
262 262 def __init__(self, peer):
263 263 self._peer = peer
264 264 self._sent = False
265 265 self._closed = False
266 266
267 267 def __enter__(self):
268 268 return self
269 269
270 270 def __exit__(self, exctype, excvalue, exctb):
271 271 self.close()
272 272
273 273 def callcommand(self, command, args):
274 274 if self._sent:
275 275 raise error.ProgrammingError(
276 276 b'callcommand() cannot be used after sendcommands()'
277 277 )
278 278
279 279 if self._closed:
280 280 raise error.ProgrammingError(
281 281 b'callcommand() cannot be used after close()'
282 282 )
283 283
284 284 # We don't need to support anything fancy. Just call the named
285 285 # method on the peer and return a resolved future.
286 286 fn = getattr(self._peer, pycompat.sysstr(command))
287 287
288 288 f = futures.Future()
289 289
290 290 try:
291 291 result = fn(**pycompat.strkwargs(args))
292 292 except Exception:
293 293 pycompat.future_set_exception_info(f, sys.exc_info()[1:])
294 294 else:
295 295 f.set_result(result)
296 296
297 297 return f
298 298
299 299 def sendcommands(self):
300 300 self._sent = True
301 301
302 302 def close(self):
303 303 self._closed = True
304 304
305 305
306 306 @interfaceutil.implementer(repository.ipeercommands)
307 307 class localpeer(repository.peer):
308 308 '''peer for a local repo; reflects only the most recent API'''
309 309
310 310 def __init__(self, repo, caps=None, path=None):
311 311 super(localpeer, self).__init__(repo.ui, path=path)
312 312
313 313 if caps is None:
314 314 caps = moderncaps.copy()
315 315 self._repo = repo.filtered(b'served')
316 316
317 317 if repo._wanted_sidedata:
318 318 formatted = bundle2.format_remote_wanted_sidedata(repo)
319 319 caps.add(b'exp-wanted-sidedata=' + formatted)
320 320
321 321 self._caps = repo._restrictcapabilities(caps)
322 322
323 323 # Begin of _basepeer interface.
324 324
325 325 def url(self):
326 326 return self._repo.url()
327 327
328 328 def local(self):
329 329 return self._repo
330 330
331 331 def canpush(self):
332 332 return True
333 333
334 334 def close(self):
335 335 self._repo.close()
336 336
337 337 # End of _basepeer interface.
338 338
339 339 # Begin of _basewirecommands interface.
340 340
341 341 def branchmap(self):
342 342 return self._repo.branchmap()
343 343
344 344 def capabilities(self):
345 345 return self._caps
346 346
347 347 def clonebundles(self):
348 348 return self._repo.tryread(bundlecaches.CB_MANIFEST_FILE)
349 349
350 350 def debugwireargs(self, one, two, three=None, four=None, five=None):
351 351 """Used to test argument passing over the wire"""
352 352 return b"%s %s %s %s %s" % (
353 353 one,
354 354 two,
355 355 pycompat.bytestr(three),
356 356 pycompat.bytestr(four),
357 357 pycompat.bytestr(five),
358 358 )
359 359
360 360 def getbundle(
361 361 self,
362 362 source,
363 363 heads=None,
364 364 common=None,
365 365 bundlecaps=None,
366 366 remote_sidedata=None,
367 367 **kwargs
368 368 ):
369 369 chunks = exchange.getbundlechunks(
370 370 self._repo,
371 371 source,
372 372 heads=heads,
373 373 common=common,
374 374 bundlecaps=bundlecaps,
375 375 remote_sidedata=remote_sidedata,
376 376 **kwargs
377 377 )[1]
378 378 cb = util.chunkbuffer(chunks)
379 379
380 380 if exchange.bundle2requested(bundlecaps):
381 381 # When requesting a bundle2, getbundle returns a stream to make the
382 382 # wire level function happier. We need to build a proper object
383 383 # from it in local peer.
384 384 return bundle2.getunbundler(self.ui, cb)
385 385 else:
386 386 return changegroup.getunbundler(b'01', cb, None)
387 387
388 388 def heads(self):
389 389 return self._repo.heads()
390 390
391 391 def known(self, nodes):
392 392 return self._repo.known(nodes)
393 393
394 394 def listkeys(self, namespace):
395 395 return self._repo.listkeys(namespace)
396 396
397 397 def lookup(self, key):
398 398 return self._repo.lookup(key)
399 399
400 400 def pushkey(self, namespace, key, old, new):
401 401 return self._repo.pushkey(namespace, key, old, new)
402 402
403 403 def stream_out(self):
404 404 raise error.Abort(_(b'cannot perform stream clone against local peer'))
405 405
406 406 def unbundle(self, bundle, heads, url):
407 407 """apply a bundle on a repo
408 408
409 409 This function handles the repo locking itself."""
410 410 try:
411 411 try:
412 412 bundle = exchange.readbundle(self.ui, bundle, None)
413 413 ret = exchange.unbundle(self._repo, bundle, heads, b'push', url)
414 414 if util.safehasattr(ret, b'getchunks'):
415 415 # This is a bundle20 object, turn it into an unbundler.
416 416 # This little dance should be dropped eventually when the
417 417 # API is finally improved.
418 418 stream = util.chunkbuffer(ret.getchunks())
419 419 ret = bundle2.getunbundler(self.ui, stream)
420 420 return ret
421 421 except Exception as exc:
422 422 # If the exception contains output salvaged from a bundle2
423 423 # reply, we need to make sure it is printed before continuing
424 424 # to fail. So we build a bundle2 with such output and consume
425 425 # it directly.
426 426 #
427 427 # This is not very elegant but allows a "simple" solution for
428 428 # issue4594
429 429 output = getattr(exc, '_bundle2salvagedoutput', ())
430 430 if output:
431 431 bundler = bundle2.bundle20(self._repo.ui)
432 432 for out in output:
433 433 bundler.addpart(out)
434 434 stream = util.chunkbuffer(bundler.getchunks())
435 435 b = bundle2.getunbundler(self.ui, stream)
436 436 bundle2.processbundle(self._repo, b)
437 437 raise
438 438 except error.PushRaced as exc:
439 439 raise error.ResponseError(
440 440 _(b'push failed:'), stringutil.forcebytestr(exc)
441 441 )
442 442
443 443 # End of _basewirecommands interface.
444 444
445 445 # Begin of peer interface.
446 446
447 447 def commandexecutor(self):
448 448 return localcommandexecutor(self)
449 449
450 450 # End of peer interface.
451 451
452 452
453 453 @interfaceutil.implementer(repository.ipeerlegacycommands)
454 454 class locallegacypeer(localpeer):
455 455 """peer extension which implements legacy methods too; used for tests with
456 456 restricted capabilities"""
457 457
458 458 def __init__(self, repo, path=None):
459 459 super(locallegacypeer, self).__init__(repo, caps=legacycaps, path=path)
460 460
461 461 # Begin of baselegacywirecommands interface.
462 462
463 463 def between(self, pairs):
464 464 return self._repo.between(pairs)
465 465
466 466 def branches(self, nodes):
467 467 return self._repo.branches(nodes)
468 468
469 469 def changegroup(self, nodes, source):
470 470 outgoing = discovery.outgoing(
471 471 self._repo, missingroots=nodes, ancestorsof=self._repo.heads()
472 472 )
473 473 return changegroup.makechangegroup(self._repo, outgoing, b'01', source)
474 474
475 475 def changegroupsubset(self, bases, heads, source):
476 476 outgoing = discovery.outgoing(
477 477 self._repo, missingroots=bases, ancestorsof=heads
478 478 )
479 479 return changegroup.makechangegroup(self._repo, outgoing, b'01', source)
480 480
481 481 # End of baselegacywirecommands interface.
482 482
483 483
484 484 # Functions receiving (ui, features) that extensions can register to impact
485 485 # the ability to load repositories with custom requirements. Only
486 486 # functions defined in loaded extensions are called.
487 487 #
488 488 # The function receives a set of requirement strings that the repository
489 489 # is capable of opening. Functions will typically add elements to the
490 490 # set to reflect that the extension knows how to handle that requirements.
491 491 featuresetupfuncs = set()
492 492
493 493
494 494 def _getsharedvfs(hgvfs, requirements):
495 495 """returns the vfs object pointing to root of shared source
496 496 repo for a shared repository
497 497
498 498 hgvfs is vfs pointing at .hg/ of current repo (shared one)
499 499 requirements is a set of requirements of current repo (shared one)
500 500 """
501 501 # The ``shared`` or ``relshared`` requirements indicate the
502 502 # store lives in the path contained in the ``.hg/sharedpath`` file.
503 503 # This is an absolute path for ``shared`` and relative to
504 504 # ``.hg/`` for ``relshared``.
505 505 sharedpath = hgvfs.read(b'sharedpath').rstrip(b'\n')
506 506 if requirementsmod.RELATIVE_SHARED_REQUIREMENT in requirements:
507 507 sharedpath = util.normpath(hgvfs.join(sharedpath))
508 508
509 509 sharedvfs = vfsmod.vfs(sharedpath, realpath=True)
510 510
511 511 if not sharedvfs.exists():
512 512 raise error.RepoError(
513 513 _(b'.hg/sharedpath points to nonexistent directory %s')
514 514 % sharedvfs.base
515 515 )
516 516 return sharedvfs
517 517
518 518
519 519 def _readrequires(vfs, allowmissing):
520 520 """reads the require file present at root of this vfs
521 521 and return a set of requirements
522 522
523 523 If allowmissing is True, we suppress FileNotFoundError if raised"""
524 524 # requires file contains a newline-delimited list of
525 525 # features/capabilities the opener (us) must have in order to use
526 526 # the repository. This file was introduced in Mercurial 0.9.2,
527 527 # which means very old repositories may not have one. We assume
528 528 # a missing file translates to no requirements.
529 529 read = vfs.tryread if allowmissing else vfs.read
530 530 return set(read(b'requires').splitlines())
531 531
532 532
533 533 def makelocalrepository(baseui, path: bytes, intents=None):
534 534 """Create a local repository object.
535 535
536 536 Given arguments needed to construct a local repository, this function
537 537 performs various early repository loading functionality (such as
538 538 reading the ``.hg/requires`` and ``.hg/hgrc`` files), validates that
539 539 the repository can be opened, derives a type suitable for representing
540 540 that repository, and returns an instance of it.
541 541
542 542 The returned object conforms to the ``repository.completelocalrepository``
543 543 interface.
544 544
545 545 The repository type is derived by calling a series of factory functions
546 546 for each aspect/interface of the final repository. These are defined by
547 547 ``REPO_INTERFACES``.
548 548
549 549 Each factory function is called to produce a type implementing a specific
550 550 interface. The cumulative list of returned types will be combined into a
551 551 new type and that type will be instantiated to represent the local
552 552 repository.
553 553
554 554 The factory functions each receive various state that may be consulted
555 555 as part of deriving a type.
556 556
557 557 Extensions should wrap these factory functions to customize repository type
558 558 creation. Note that an extension's wrapped function may be called even if
559 559 that extension is not loaded for the repo being constructed. Extensions
560 560 should check if their ``__name__`` appears in the
561 561 ``extensionmodulenames`` set passed to the factory function and no-op if
562 562 not.
563 563 """
564 564 ui = baseui.copy()
565 565 # Prevent copying repo configuration.
566 566 ui.copy = baseui.copy
567 567
568 568 # Working directory VFS rooted at repository root.
569 569 wdirvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
570 570
571 571 # Main VFS for .hg/ directory.
572 572 hgpath = wdirvfs.join(b'.hg')
573 573 hgvfs = vfsmod.vfs(hgpath, cacheaudited=True)
574 574 # Whether this repository is shared one or not
575 575 shared = False
576 576 # If this repository is shared, vfs pointing to shared repo
577 577 sharedvfs = None
578 578
579 579 # The .hg/ path should exist and should be a directory. All other
580 580 # cases are errors.
581 581 if not hgvfs.isdir():
582 582 try:
583 583 hgvfs.stat()
584 584 except FileNotFoundError:
585 585 pass
586 586 except ValueError as e:
587 587 # Can be raised on Python 3.8 when path is invalid.
588 588 raise error.Abort(
589 589 _(b'invalid path %s: %s') % (path, stringutil.forcebytestr(e))
590 590 )
591 591
592 592 raise error.RepoError(_(b'repository %s not found') % path)
593 593
594 594 requirements = _readrequires(hgvfs, True)
595 595 shared = (
596 596 requirementsmod.SHARED_REQUIREMENT in requirements
597 597 or requirementsmod.RELATIVE_SHARED_REQUIREMENT in requirements
598 598 )
599 599 storevfs = None
600 600 if shared:
601 601 # This is a shared repo
602 602 sharedvfs = _getsharedvfs(hgvfs, requirements)
603 603 storevfs = vfsmod.vfs(sharedvfs.join(b'store'))
604 604 else:
605 605 storevfs = vfsmod.vfs(hgvfs.join(b'store'))
606 606
607 607 # if .hg/requires contains the sharesafe requirement, it means
608 608 # there exists a `.hg/store/requires` too and we should read it
609 609 # NOTE: presence of SHARESAFE_REQUIREMENT imply that store requirement
610 610 # is present. We never write SHARESAFE_REQUIREMENT for a repo if store
611 611 # is not present, refer checkrequirementscompat() for that
612 612 #
613 613 # However, if SHARESAFE_REQUIREMENT is not present, it means that the
614 614 # repository was shared the old way. We check the share source .hg/requires
615 615 # for SHARESAFE_REQUIREMENT to detect whether the current repository needs
616 616 # to be reshared
617 617 hint = _(b"see `hg help config.format.use-share-safe` for more information")
618 618 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
619 619 if (
620 620 shared
621 621 and requirementsmod.SHARESAFE_REQUIREMENT
622 622 not in _readrequires(sharedvfs, True)
623 623 ):
624 624 mismatch_warn = ui.configbool(
625 625 b'share', b'safe-mismatch.source-not-safe.warn'
626 626 )
627 627 mismatch_config = ui.config(
628 628 b'share', b'safe-mismatch.source-not-safe'
629 629 )
630 630 mismatch_verbose_upgrade = ui.configbool(
631 631 b'share', b'safe-mismatch.source-not-safe:verbose-upgrade'
632 632 )
633 633 if mismatch_config in (
634 634 b'downgrade-allow',
635 635 b'allow',
636 636 b'downgrade-abort',
637 637 ):
638 638 # prevent cyclic import localrepo -> upgrade -> localrepo
639 639 from . import upgrade
640 640
641 641 upgrade.downgrade_share_to_non_safe(
642 642 ui,
643 643 hgvfs,
644 644 sharedvfs,
645 645 requirements,
646 646 mismatch_config,
647 647 mismatch_warn,
648 648 mismatch_verbose_upgrade,
649 649 )
650 650 elif mismatch_config == b'abort':
651 651 raise error.Abort(
652 652 _(b"share source does not support share-safe requirement"),
653 653 hint=hint,
654 654 )
655 655 else:
656 656 raise error.Abort(
657 657 _(
658 658 b"share-safe mismatch with source.\nUnrecognized"
659 659 b" value '%s' of `share.safe-mismatch.source-not-safe`"
660 660 b" set."
661 661 )
662 662 % mismatch_config,
663 663 hint=hint,
664 664 )
665 665 else:
666 666 requirements |= _readrequires(storevfs, False)
667 667 elif shared:
668 668 sourcerequires = _readrequires(sharedvfs, False)
669 669 if requirementsmod.SHARESAFE_REQUIREMENT in sourcerequires:
670 670 mismatch_config = ui.config(b'share', b'safe-mismatch.source-safe')
671 671 mismatch_warn = ui.configbool(
672 672 b'share', b'safe-mismatch.source-safe.warn'
673 673 )
674 674 mismatch_verbose_upgrade = ui.configbool(
675 675 b'share', b'safe-mismatch.source-safe:verbose-upgrade'
676 676 )
677 677 if mismatch_config in (
678 678 b'upgrade-allow',
679 679 b'allow',
680 680 b'upgrade-abort',
681 681 ):
682 682 # prevent cyclic import localrepo -> upgrade -> localrepo
683 683 from . import upgrade
684 684
685 685 upgrade.upgrade_share_to_safe(
686 686 ui,
687 687 hgvfs,
688 688 storevfs,
689 689 requirements,
690 690 mismatch_config,
691 691 mismatch_warn,
692 692 mismatch_verbose_upgrade,
693 693 )
694 694 elif mismatch_config == b'abort':
695 695 raise error.Abort(
696 696 _(
697 697 b'version mismatch: source uses share-safe'
698 698 b' functionality while the current share does not'
699 699 ),
700 700 hint=hint,
701 701 )
702 702 else:
703 703 raise error.Abort(
704 704 _(
705 705 b"share-safe mismatch with source.\nUnrecognized"
706 706 b" value '%s' of `share.safe-mismatch.source-safe` set."
707 707 )
708 708 % mismatch_config,
709 709 hint=hint,
710 710 )
711 711
712 712 # The .hg/hgrc file may load extensions or contain config options
713 713 # that influence repository construction. Attempt to load it and
714 714 # process any new extensions that it may have pulled in.
715 715 if loadhgrc(ui, wdirvfs, hgvfs, requirements, sharedvfs):
716 716 afterhgrcload(ui, wdirvfs, hgvfs, requirements)
717 717 extensions.loadall(ui)
718 718 extensions.populateui(ui)
719 719
720 720 # Set of module names of extensions loaded for this repository.
721 721 extensionmodulenames = {m.__name__ for n, m in extensions.extensions(ui)}
722 722
723 723 supportedrequirements = gathersupportedrequirements(ui)
724 724
725 725 # We first validate the requirements are known.
726 726 ensurerequirementsrecognized(requirements, supportedrequirements)
727 727
728 728 # Then we validate that the known set is reasonable to use together.
729 729 ensurerequirementscompatible(ui, requirements)
730 730
731 731 # TODO there are unhandled edge cases related to opening repositories with
732 732 # shared storage. If storage is shared, we should also test for requirements
733 733 # compatibility in the pointed-to repo. This entails loading the .hg/hgrc in
734 734 # that repo, as that repo may load extensions needed to open it. This is a
735 735 # bit complicated because we don't want the other hgrc to overwrite settings
736 736 # in this hgrc.
737 737 #
738 738 # This bug is somewhat mitigated by the fact that we copy the .hg/requires
739 739 # file when sharing repos. But if a requirement is added after the share is
740 740 # performed, thereby introducing a new requirement for the opener, we may
741 741 # will not see that and could encounter a run-time error interacting with
742 742 # that shared store since it has an unknown-to-us requirement.
743 743
744 744 # At this point, we know we should be capable of opening the repository.
745 745 # Now get on with doing that.
746 746
747 747 features = set()
748 748
749 749 # The "store" part of the repository holds versioned data. How it is
750 750 # accessed is determined by various requirements. If `shared` or
751 751 # `relshared` requirements are present, this indicates current repository
752 752 # is a share and store exists in path mentioned in `.hg/sharedpath`
753 753 if shared:
754 754 storebasepath = sharedvfs.base
755 755 cachepath = sharedvfs.join(b'cache')
756 756 features.add(repository.REPO_FEATURE_SHARED_STORAGE)
757 757 else:
758 758 storebasepath = hgvfs.base
759 759 cachepath = hgvfs.join(b'cache')
760 760 wcachepath = hgvfs.join(b'wcache')
761 761
762 762 # The store has changed over time and the exact layout is dictated by
763 763 # requirements. The store interface abstracts differences across all
764 764 # of them.
765 765 store = makestore(
766 766 requirements,
767 767 storebasepath,
768 768 lambda base: vfsmod.vfs(base, cacheaudited=True),
769 769 )
770 770 hgvfs.createmode = store.createmode
771 771
772 772 storevfs = store.vfs
773 773 storevfs.options = resolvestorevfsoptions(ui, requirements, features)
774 774
775 775 if (
776 776 requirementsmod.REVLOGV2_REQUIREMENT in requirements
777 777 or requirementsmod.CHANGELOGV2_REQUIREMENT in requirements
778 778 ):
779 779 features.add(repository.REPO_FEATURE_SIDE_DATA)
780 780 # the revlogv2 docket introduced race condition that we need to fix
781 781 features.discard(repository.REPO_FEATURE_STREAM_CLONE)
782 782
783 783 # The cache vfs is used to manage cache files.
784 784 cachevfs = vfsmod.vfs(cachepath, cacheaudited=True)
785 785 cachevfs.createmode = store.createmode
786 786 # The cache vfs is used to manage cache files related to the working copy
787 787 wcachevfs = vfsmod.vfs(wcachepath, cacheaudited=True)
788 788 wcachevfs.createmode = store.createmode
789 789
790 790 # Now resolve the type for the repository object. We do this by repeatedly
791 791 # calling a factory function to produces types for specific aspects of the
792 792 # repo's operation. The aggregate returned types are used as base classes
793 793 # for a dynamically-derived type, which will represent our new repository.
794 794
795 795 bases = []
796 796 extrastate = {}
797 797
798 798 for iface, fn in REPO_INTERFACES:
799 799 # We pass all potentially useful state to give extensions tons of
800 800 # flexibility.
801 801 typ = fn()(
802 802 ui=ui,
803 803 intents=intents,
804 804 requirements=requirements,
805 805 features=features,
806 806 wdirvfs=wdirvfs,
807 807 hgvfs=hgvfs,
808 808 store=store,
809 809 storevfs=storevfs,
810 810 storeoptions=storevfs.options,
811 811 cachevfs=cachevfs,
812 812 wcachevfs=wcachevfs,
813 813 extensionmodulenames=extensionmodulenames,
814 814 extrastate=extrastate,
815 815 baseclasses=bases,
816 816 )
817 817
818 818 if not isinstance(typ, type):
819 819 raise error.ProgrammingError(
820 820 b'unable to construct type for %s' % iface
821 821 )
822 822
823 823 bases.append(typ)
824 824
825 825 # type() allows you to use characters in type names that wouldn't be
826 826 # recognized as Python symbols in source code. We abuse that to add
827 827 # rich information about our constructed repo.
828 828 name = pycompat.sysstr(
829 829 b'derivedrepo:%s<%s>' % (wdirvfs.base, b','.join(sorted(requirements)))
830 830 )
831 831
832 832 cls = type(name, tuple(bases), {})
833 833
834 834 return cls(
835 835 baseui=baseui,
836 836 ui=ui,
837 837 origroot=path,
838 838 wdirvfs=wdirvfs,
839 839 hgvfs=hgvfs,
840 840 requirements=requirements,
841 841 supportedrequirements=supportedrequirements,
842 842 sharedpath=storebasepath,
843 843 store=store,
844 844 cachevfs=cachevfs,
845 845 wcachevfs=wcachevfs,
846 846 features=features,
847 847 intents=intents,
848 848 )
849 849
850 850
851 851 def loadhgrc(
852 852 ui,
853 853 wdirvfs: vfsmod.vfs,
854 854 hgvfs: vfsmod.vfs,
855 855 requirements,
856 856 sharedvfs: Optional[vfsmod.vfs] = None,
857 857 ):
858 858 """Load hgrc files/content into a ui instance.
859 859
860 860 This is called during repository opening to load any additional
861 861 config files or settings relevant to the current repository.
862 862
863 863 Returns a bool indicating whether any additional configs were loaded.
864 864
865 865 Extensions should monkeypatch this function to modify how per-repo
866 866 configs are loaded. For example, an extension may wish to pull in
867 867 configs from alternate files or sources.
868 868
869 869 sharedvfs is vfs object pointing to source repo if the current one is a
870 870 shared one
871 871 """
872 872 if not rcutil.use_repo_hgrc():
873 873 return False
874 874
875 875 ret = False
876 876 # first load config from shared source if we has to
877 877 if requirementsmod.SHARESAFE_REQUIREMENT in requirements and sharedvfs:
878 878 try:
879 879 ui.readconfig(sharedvfs.join(b'hgrc'), root=sharedvfs.base)
880 880 ret = True
881 881 except IOError:
882 882 pass
883 883
884 884 try:
885 885 ui.readconfig(hgvfs.join(b'hgrc'), root=wdirvfs.base)
886 886 ret = True
887 887 except IOError:
888 888 pass
889 889
890 890 try:
891 891 ui.readconfig(hgvfs.join(b'hgrc-not-shared'), root=wdirvfs.base)
892 892 ret = True
893 893 except IOError:
894 894 pass
895 895
896 896 return ret
897 897
898 898
899 899 def afterhgrcload(ui, wdirvfs, hgvfs, requirements):
900 900 """Perform additional actions after .hg/hgrc is loaded.
901 901
902 902 This function is called during repository loading immediately after
903 903 the .hg/hgrc file is loaded and before per-repo extensions are loaded.
904 904
905 905 The function can be used to validate configs, automatically add
906 906 options (including extensions) based on requirements, etc.
907 907 """
908 908
909 909 # Map of requirements to list of extensions to load automatically when
910 910 # requirement is present.
911 911 autoextensions = {
912 912 b'git': [b'git'],
913 913 b'largefiles': [b'largefiles'],
914 914 b'lfs': [b'lfs'],
915 915 }
916 916
917 917 for requirement, names in sorted(autoextensions.items()):
918 918 if requirement not in requirements:
919 919 continue
920 920
921 921 for name in names:
922 922 if not ui.hasconfig(b'extensions', name):
923 923 ui.setconfig(b'extensions', name, b'', source=b'autoload')
924 924
925 925
926 926 def gathersupportedrequirements(ui):
927 927 """Determine the complete set of recognized requirements."""
928 928 # Start with all requirements supported by this file.
929 929 supported = set(localrepository._basesupported)
930 930
931 931 # Execute ``featuresetupfuncs`` entries if they belong to an extension
932 932 # relevant to this ui instance.
933 933 modules = {m.__name__ for n, m in extensions.extensions(ui)}
934 934
935 935 for fn in featuresetupfuncs:
936 936 if fn.__module__ in modules:
937 937 fn(ui, supported)
938 938
939 939 # Add derived requirements from registered compression engines.
940 940 for name in util.compengines:
941 941 engine = util.compengines[name]
942 942 if engine.available() and engine.revlogheader():
943 943 supported.add(b'exp-compression-%s' % name)
944 944 if engine.name() == b'zstd':
945 945 supported.add(requirementsmod.REVLOG_COMPRESSION_ZSTD)
946 946
947 947 return supported
948 948
949 949
950 950 def ensurerequirementsrecognized(requirements, supported):
951 951 """Validate that a set of local requirements is recognized.
952 952
953 953 Receives a set of requirements. Raises an ``error.RepoError`` if there
954 954 exists any requirement in that set that currently loaded code doesn't
955 955 recognize.
956 956
957 957 Returns a set of supported requirements.
958 958 """
959 959 missing = set()
960 960
961 961 for requirement in requirements:
962 962 if requirement in supported:
963 963 continue
964 964
965 965 if not requirement or not requirement[0:1].isalnum():
966 966 raise error.RequirementError(_(b'.hg/requires file is corrupt'))
967 967
968 968 missing.add(requirement)
969 969
970 970 if missing:
971 971 raise error.RequirementError(
972 972 _(b'repository requires features unknown to this Mercurial: %s')
973 973 % b' '.join(sorted(missing)),
974 974 hint=_(
975 975 b'see https://mercurial-scm.org/wiki/MissingRequirement '
976 976 b'for more information'
977 977 ),
978 978 )
979 979
980 980
981 981 def ensurerequirementscompatible(ui, requirements):
982 982 """Validates that a set of recognized requirements is mutually compatible.
983 983
984 984 Some requirements may not be compatible with others or require
985 985 config options that aren't enabled. This function is called during
986 986 repository opening to ensure that the set of requirements needed
987 987 to open a repository is sane and compatible with config options.
988 988
989 989 Extensions can monkeypatch this function to perform additional
990 990 checking.
991 991
992 992 ``error.RepoError`` should be raised on failure.
993 993 """
994 994 if (
995 995 requirementsmod.SPARSE_REQUIREMENT in requirements
996 996 and not sparse.enabled
997 997 ):
998 998 raise error.RepoError(
999 999 _(
1000 1000 b'repository is using sparse feature but '
1001 1001 b'sparse is not enabled; enable the '
1002 1002 b'"sparse" extensions to access'
1003 1003 )
1004 1004 )
1005 1005
1006 1006
1007 1007 def makestore(requirements, path, vfstype):
1008 1008 """Construct a storage object for a repository."""
1009 1009 if requirementsmod.STORE_REQUIREMENT in requirements:
1010 1010 if requirementsmod.FNCACHE_REQUIREMENT in requirements:
1011 1011 dotencode = requirementsmod.DOTENCODE_REQUIREMENT in requirements
1012 1012 return storemod.fncachestore(path, vfstype, dotencode)
1013 1013
1014 1014 return storemod.encodedstore(path, vfstype)
1015 1015
1016 1016 return storemod.basicstore(path, vfstype)
1017 1017
1018 1018
1019 1019 def resolvestorevfsoptions(ui, requirements, features):
1020 1020 """Resolve the options to pass to the store vfs opener.
1021 1021
1022 1022 The returned dict is used to influence behavior of the storage layer.
1023 1023 """
1024 1024 options = {}
1025 1025
1026 1026 if requirementsmod.TREEMANIFEST_REQUIREMENT in requirements:
1027 1027 options[b'treemanifest'] = True
1028 1028
1029 1029 # experimental config: format.manifestcachesize
1030 1030 manifestcachesize = ui.configint(b'format', b'manifestcachesize')
1031 1031 if manifestcachesize is not None:
1032 1032 options[b'manifestcachesize'] = manifestcachesize
1033 1033
1034 1034 # In the absence of another requirement superseding a revlog-related
1035 1035 # requirement, we have to assume the repo is using revlog version 0.
1036 1036 # This revlog format is super old and we don't bother trying to parse
1037 1037 # opener options for it because those options wouldn't do anything
1038 1038 # meaningful on such old repos.
1039 1039 if (
1040 1040 requirementsmod.REVLOGV1_REQUIREMENT in requirements
1041 1041 or requirementsmod.REVLOGV2_REQUIREMENT in requirements
1042 1042 ):
1043 1043 options.update(resolverevlogstorevfsoptions(ui, requirements, features))
1044 1044 else: # explicitly mark repo as using revlogv0
1045 1045 options[b'revlogv0'] = True
1046 1046
1047 1047 if requirementsmod.COPIESSDC_REQUIREMENT in requirements:
1048 1048 options[b'copies-storage'] = b'changeset-sidedata'
1049 1049 else:
1050 1050 writecopiesto = ui.config(b'experimental', b'copies.write-to')
1051 1051 copiesextramode = (b'changeset-only', b'compatibility')
1052 1052 if writecopiesto in copiesextramode:
1053 1053 options[b'copies-storage'] = b'extra'
1054 1054
1055 1055 return options
1056 1056
1057 1057
1058 1058 def resolverevlogstorevfsoptions(ui, requirements, features):
1059 1059 """Resolve opener options specific to revlogs."""
1060 1060
1061 1061 options = {}
1062 1062 options[b'flagprocessors'] = {}
1063 1063
1064 1064 if requirementsmod.REVLOGV1_REQUIREMENT in requirements:
1065 1065 options[b'revlogv1'] = True
1066 1066 if requirementsmod.REVLOGV2_REQUIREMENT in requirements:
1067 1067 options[b'revlogv2'] = True
1068 1068 if requirementsmod.CHANGELOGV2_REQUIREMENT in requirements:
1069 1069 options[b'changelogv2'] = True
1070 1070 cmp_rank = ui.configbool(b'experimental', b'changelog-v2.compute-rank')
1071 1071 options[b'changelogv2.compute-rank'] = cmp_rank
1072 1072
1073 1073 if requirementsmod.GENERALDELTA_REQUIREMENT in requirements:
1074 1074 options[b'generaldelta'] = True
1075 1075
1076 1076 # experimental config: format.chunkcachesize
1077 1077 chunkcachesize = ui.configint(b'format', b'chunkcachesize')
1078 1078 if chunkcachesize is not None:
1079 1079 options[b'chunkcachesize'] = chunkcachesize
1080 1080
1081 1081 deltabothparents = ui.configbool(
1082 1082 b'storage', b'revlog.optimize-delta-parent-choice'
1083 1083 )
1084 1084 options[b'deltabothparents'] = deltabothparents
1085 1085 dps_cgds = ui.configint(
1086 1086 b'storage',
1087 1087 b'revlog.delta-parent-search.candidate-group-chunk-size',
1088 1088 )
1089 1089 options[b'delta-parent-search.candidate-group-chunk-size'] = dps_cgds
1090 1090 options[b'debug-delta'] = ui.configbool(b'debug', b'revlog.debug-delta')
1091 1091
1092 1092 issue6528 = ui.configbool(b'storage', b'revlog.issue6528.fix-incoming')
1093 1093 options[b'issue6528.fix-incoming'] = issue6528
1094 1094
1095 1095 lazydelta = ui.configbool(b'storage', b'revlog.reuse-external-delta')
1096 1096 lazydeltabase = False
1097 1097 if lazydelta:
1098 1098 lazydeltabase = ui.configbool(
1099 1099 b'storage', b'revlog.reuse-external-delta-parent'
1100 1100 )
1101 1101 if lazydeltabase is None:
1102 1102 lazydeltabase = not scmutil.gddeltaconfig(ui)
1103 1103 options[b'lazydelta'] = lazydelta
1104 1104 options[b'lazydeltabase'] = lazydeltabase
1105 1105
1106 1106 chainspan = ui.configbytes(b'experimental', b'maxdeltachainspan')
1107 1107 if 0 <= chainspan:
1108 1108 options[b'maxdeltachainspan'] = chainspan
1109 1109
1110 1110 mmapindexthreshold = ui.configbytes(b'experimental', b'mmapindexthreshold')
1111 1111 if mmapindexthreshold is not None:
1112 1112 options[b'mmapindexthreshold'] = mmapindexthreshold
1113 1113
1114 1114 withsparseread = ui.configbool(b'experimental', b'sparse-read')
1115 1115 srdensitythres = float(
1116 1116 ui.config(b'experimental', b'sparse-read.density-threshold')
1117 1117 )
1118 1118 srmingapsize = ui.configbytes(b'experimental', b'sparse-read.min-gap-size')
1119 1119 options[b'with-sparse-read'] = withsparseread
1120 1120 options[b'sparse-read-density-threshold'] = srdensitythres
1121 1121 options[b'sparse-read-min-gap-size'] = srmingapsize
1122 1122
1123 1123 sparserevlog = requirementsmod.SPARSEREVLOG_REQUIREMENT in requirements
1124 1124 options[b'sparse-revlog'] = sparserevlog
1125 1125 if sparserevlog:
1126 1126 options[b'generaldelta'] = True
1127 1127
1128 1128 maxchainlen = None
1129 1129 if sparserevlog:
1130 1130 maxchainlen = revlogconst.SPARSE_REVLOG_MAX_CHAIN_LENGTH
1131 1131 # experimental config: format.maxchainlen
1132 1132 maxchainlen = ui.configint(b'format', b'maxchainlen', maxchainlen)
1133 1133 if maxchainlen is not None:
1134 1134 options[b'maxchainlen'] = maxchainlen
1135 1135
1136 1136 for r in requirements:
1137 1137 # we allow multiple compression engine requirement to co-exist because
1138 1138 # strickly speaking, revlog seems to support mixed compression style.
1139 1139 #
1140 1140 # The compression used for new entries will be "the last one"
1141 1141 prefix = r.startswith
1142 1142 if prefix(b'revlog-compression-') or prefix(b'exp-compression-'):
1143 1143 options[b'compengine'] = r.split(b'-', 2)[2]
1144 1144
1145 1145 options[b'zlib.level'] = ui.configint(b'storage', b'revlog.zlib.level')
1146 1146 if options[b'zlib.level'] is not None:
1147 1147 if not (0 <= options[b'zlib.level'] <= 9):
1148 1148 msg = _(b'invalid value for `storage.revlog.zlib.level` config: %d')
1149 1149 raise error.Abort(msg % options[b'zlib.level'])
1150 1150 options[b'zstd.level'] = ui.configint(b'storage', b'revlog.zstd.level')
1151 1151 if options[b'zstd.level'] is not None:
1152 1152 if not (0 <= options[b'zstd.level'] <= 22):
1153 1153 msg = _(b'invalid value for `storage.revlog.zstd.level` config: %d')
1154 1154 raise error.Abort(msg % options[b'zstd.level'])
1155 1155
1156 1156 if requirementsmod.NARROW_REQUIREMENT in requirements:
1157 1157 options[b'enableellipsis'] = True
1158 1158
1159 1159 if ui.configbool(b'experimental', b'rust.index'):
1160 1160 options[b'rust.index'] = True
1161 1161 if requirementsmod.NODEMAP_REQUIREMENT in requirements:
1162 1162 slow_path = ui.config(
1163 1163 b'storage', b'revlog.persistent-nodemap.slow-path'
1164 1164 )
1165 1165 if slow_path not in (b'allow', b'warn', b'abort'):
1166 1166 default = ui.config_default(
1167 1167 b'storage', b'revlog.persistent-nodemap.slow-path'
1168 1168 )
1169 1169 msg = _(
1170 1170 b'unknown value for config '
1171 1171 b'"storage.revlog.persistent-nodemap.slow-path": "%s"\n'
1172 1172 )
1173 1173 ui.warn(msg % slow_path)
1174 1174 if not ui.quiet:
1175 1175 ui.warn(_(b'falling back to default value: %s\n') % default)
1176 1176 slow_path = default
1177 1177
1178 1178 msg = _(
1179 1179 b"accessing `persistent-nodemap` repository without associated "
1180 1180 b"fast implementation."
1181 1181 )
1182 1182 hint = _(
1183 1183 b"check `hg help config.format.use-persistent-nodemap` "
1184 1184 b"for details"
1185 1185 )
1186 1186 if not revlog.HAS_FAST_PERSISTENT_NODEMAP:
1187 1187 if slow_path == b'warn':
1188 1188 msg = b"warning: " + msg + b'\n'
1189 1189 ui.warn(msg)
1190 1190 if not ui.quiet:
1191 1191 hint = b'(' + hint + b')\n'
1192 1192 ui.warn(hint)
1193 1193 if slow_path == b'abort':
1194 1194 raise error.Abort(msg, hint=hint)
1195 1195 options[b'persistent-nodemap'] = True
1196 1196 if requirementsmod.DIRSTATE_V2_REQUIREMENT in requirements:
1197 1197 slow_path = ui.config(b'storage', b'dirstate-v2.slow-path')
1198 1198 if slow_path not in (b'allow', b'warn', b'abort'):
1199 1199 default = ui.config_default(b'storage', b'dirstate-v2.slow-path')
1200 1200 msg = _(b'unknown value for config "dirstate-v2.slow-path": "%s"\n')
1201 1201 ui.warn(msg % slow_path)
1202 1202 if not ui.quiet:
1203 1203 ui.warn(_(b'falling back to default value: %s\n') % default)
1204 1204 slow_path = default
1205 1205
1206 1206 msg = _(
1207 1207 b"accessing `dirstate-v2` repository without associated "
1208 1208 b"fast implementation."
1209 1209 )
1210 1210 hint = _(
1211 1211 b"check `hg help config.format.use-dirstate-v2` " b"for details"
1212 1212 )
1213 1213 if not dirstate.HAS_FAST_DIRSTATE_V2:
1214 1214 if slow_path == b'warn':
1215 1215 msg = b"warning: " + msg + b'\n'
1216 1216 ui.warn(msg)
1217 1217 if not ui.quiet:
1218 1218 hint = b'(' + hint + b')\n'
1219 1219 ui.warn(hint)
1220 1220 if slow_path == b'abort':
1221 1221 raise error.Abort(msg, hint=hint)
1222 1222 if ui.configbool(b'storage', b'revlog.persistent-nodemap.mmap'):
1223 1223 options[b'persistent-nodemap.mmap'] = True
1224 1224 if ui.configbool(b'devel', b'persistent-nodemap'):
1225 1225 options[b'devel-force-nodemap'] = True
1226 1226
1227 1227 return options
1228 1228
1229 1229
1230 1230 def makemain(**kwargs):
1231 1231 """Produce a type conforming to ``ilocalrepositorymain``."""
1232 1232 return localrepository
1233 1233
1234 1234
1235 1235 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
1236 1236 class revlogfilestorage:
1237 1237 """File storage when using revlogs."""
1238 1238
1239 1239 def file(self, path):
1240 1240 if path.startswith(b'/'):
1241 1241 path = path[1:]
1242 1242
1243 1243 return filelog.filelog(self.svfs, path)
1244 1244
1245 1245
1246 1246 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
1247 1247 class revlognarrowfilestorage:
1248 1248 """File storage when using revlogs and narrow files."""
1249 1249
1250 1250 def file(self, path):
1251 1251 if path.startswith(b'/'):
1252 1252 path = path[1:]
1253 1253
1254 1254 return filelog.narrowfilelog(self.svfs, path, self._storenarrowmatch)
1255 1255
1256 1256
1257 1257 def makefilestorage(requirements, features, **kwargs):
1258 1258 """Produce a type conforming to ``ilocalrepositoryfilestorage``."""
1259 1259 features.add(repository.REPO_FEATURE_REVLOG_FILE_STORAGE)
1260 1260 features.add(repository.REPO_FEATURE_STREAM_CLONE)
1261 1261
1262 1262 if requirementsmod.NARROW_REQUIREMENT in requirements:
1263 1263 return revlognarrowfilestorage
1264 1264 else:
1265 1265 return revlogfilestorage
1266 1266
1267 1267
1268 1268 # List of repository interfaces and factory functions for them. Each
1269 1269 # will be called in order during ``makelocalrepository()`` to iteratively
1270 1270 # derive the final type for a local repository instance. We capture the
1271 1271 # function as a lambda so we don't hold a reference and the module-level
1272 1272 # functions can be wrapped.
1273 1273 REPO_INTERFACES = [
1274 1274 (repository.ilocalrepositorymain, lambda: makemain),
1275 1275 (repository.ilocalrepositoryfilestorage, lambda: makefilestorage),
1276 1276 ]
1277 1277
1278 1278
1279 1279 @interfaceutil.implementer(repository.ilocalrepositorymain)
1280 1280 class localrepository:
1281 1281 """Main class for representing local repositories.
1282 1282
1283 1283 All local repositories are instances of this class.
1284 1284
1285 1285 Constructed on its own, instances of this class are not usable as
1286 1286 repository objects. To obtain a usable repository object, call
1287 1287 ``hg.repository()``, ``localrepo.instance()``, or
1288 1288 ``localrepo.makelocalrepository()``. The latter is the lowest-level.
1289 1289 ``instance()`` adds support for creating new repositories.
1290 1290 ``hg.repository()`` adds more extension integration, including calling
1291 1291 ``reposetup()``. Generally speaking, ``hg.repository()`` should be
1292 1292 used.
1293 1293 """
1294 1294
1295 1295 _basesupported = {
1296 1296 requirementsmod.ARCHIVED_PHASE_REQUIREMENT,
1297 1297 requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT,
1298 1298 requirementsmod.CHANGELOGV2_REQUIREMENT,
1299 1299 requirementsmod.COPIESSDC_REQUIREMENT,
1300 1300 requirementsmod.DIRSTATE_TRACKED_HINT_V1,
1301 1301 requirementsmod.DIRSTATE_V2_REQUIREMENT,
1302 1302 requirementsmod.DOTENCODE_REQUIREMENT,
1303 1303 requirementsmod.FNCACHE_REQUIREMENT,
1304 1304 requirementsmod.GENERALDELTA_REQUIREMENT,
1305 1305 requirementsmod.INTERNAL_PHASE_REQUIREMENT,
1306 1306 requirementsmod.NODEMAP_REQUIREMENT,
1307 1307 requirementsmod.RELATIVE_SHARED_REQUIREMENT,
1308 1308 requirementsmod.REVLOGV1_REQUIREMENT,
1309 1309 requirementsmod.REVLOGV2_REQUIREMENT,
1310 1310 requirementsmod.SHARED_REQUIREMENT,
1311 1311 requirementsmod.SHARESAFE_REQUIREMENT,
1312 1312 requirementsmod.SPARSE_REQUIREMENT,
1313 1313 requirementsmod.SPARSEREVLOG_REQUIREMENT,
1314 1314 requirementsmod.STORE_REQUIREMENT,
1315 1315 requirementsmod.TREEMANIFEST_REQUIREMENT,
1316 1316 }
1317 1317
1318 1318 # list of prefix for file which can be written without 'wlock'
1319 1319 # Extensions should extend this list when needed
1320 1320 _wlockfreeprefix = {
1321 1321 # We migh consider requiring 'wlock' for the next
1322 1322 # two, but pretty much all the existing code assume
1323 1323 # wlock is not needed so we keep them excluded for
1324 1324 # now.
1325 1325 b'hgrc',
1326 1326 b'requires',
1327 1327 # XXX cache is a complicatged business someone
1328 1328 # should investigate this in depth at some point
1329 1329 b'cache/',
1330 1330 # XXX bisect was still a bit too messy at the time
1331 1331 # this changeset was introduced. Someone should fix
1332 1332 # the remainig bit and drop this line
1333 1333 b'bisect.state',
1334 1334 }
1335 1335
1336 1336 def __init__(
1337 1337 self,
1338 1338 baseui,
1339 1339 ui,
1340 1340 origroot: bytes,
1341 1341 wdirvfs: vfsmod.vfs,
1342 1342 hgvfs: vfsmod.vfs,
1343 1343 requirements,
1344 1344 supportedrequirements,
1345 1345 sharedpath: bytes,
1346 1346 store,
1347 1347 cachevfs: vfsmod.vfs,
1348 1348 wcachevfs: vfsmod.vfs,
1349 1349 features,
1350 1350 intents=None,
1351 1351 ):
1352 1352 """Create a new local repository instance.
1353 1353
1354 1354 Most callers should use ``hg.repository()``, ``localrepo.instance()``,
1355 1355 or ``localrepo.makelocalrepository()`` for obtaining a new repository
1356 1356 object.
1357 1357
1358 1358 Arguments:
1359 1359
1360 1360 baseui
1361 1361 ``ui.ui`` instance that ``ui`` argument was based off of.
1362 1362
1363 1363 ui
1364 1364 ``ui.ui`` instance for use by the repository.
1365 1365
1366 1366 origroot
1367 1367 ``bytes`` path to working directory root of this repository.
1368 1368
1369 1369 wdirvfs
1370 1370 ``vfs.vfs`` rooted at the working directory.
1371 1371
1372 1372 hgvfs
1373 1373 ``vfs.vfs`` rooted at .hg/
1374 1374
1375 1375 requirements
1376 1376 ``set`` of bytestrings representing repository opening requirements.
1377 1377
1378 1378 supportedrequirements
1379 1379 ``set`` of bytestrings representing repository requirements that we
1380 1380 know how to open. May be a supetset of ``requirements``.
1381 1381
1382 1382 sharedpath
1383 1383 ``bytes`` Defining path to storage base directory. Points to a
1384 1384 ``.hg/`` directory somewhere.
1385 1385
1386 1386 store
1387 1387 ``store.basicstore`` (or derived) instance providing access to
1388 1388 versioned storage.
1389 1389
1390 1390 cachevfs
1391 1391 ``vfs.vfs`` used for cache files.
1392 1392
1393 1393 wcachevfs
1394 1394 ``vfs.vfs`` used for cache files related to the working copy.
1395 1395
1396 1396 features
1397 1397 ``set`` of bytestrings defining features/capabilities of this
1398 1398 instance.
1399 1399
1400 1400 intents
1401 1401 ``set`` of system strings indicating what this repo will be used
1402 1402 for.
1403 1403 """
1404 1404 self.baseui = baseui
1405 1405 self.ui = ui
1406 1406 self.origroot = origroot
1407 1407 # vfs rooted at working directory.
1408 1408 self.wvfs = wdirvfs
1409 1409 self.root = wdirvfs.base
1410 1410 # vfs rooted at .hg/. Used to access most non-store paths.
1411 1411 self.vfs = hgvfs
1412 1412 self.path = hgvfs.base
1413 1413 self.requirements = requirements
1414 1414 self.nodeconstants = sha1nodeconstants
1415 1415 self.nullid = self.nodeconstants.nullid
1416 1416 self.supported = supportedrequirements
1417 1417 self.sharedpath = sharedpath
1418 1418 self.store = store
1419 1419 self.cachevfs = cachevfs
1420 1420 self.wcachevfs = wcachevfs
1421 1421 self.features = features
1422 1422
1423 1423 self.filtername = None
1424 1424
1425 1425 if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool(
1426 1426 b'devel', b'check-locks'
1427 1427 ):
1428 1428 self.vfs.audit = self._getvfsward(self.vfs.audit)
1429 1429 # A list of callback to shape the phase if no data were found.
1430 1430 # Callback are in the form: func(repo, roots) --> processed root.
1431 1431 # This list it to be filled by extension during repo setup
1432 1432 self._phasedefaults = []
1433 1433
1434 1434 color.setup(self.ui)
1435 1435
1436 1436 self.spath = self.store.path
1437 1437 self.svfs = self.store.vfs
1438 1438 self.sjoin = self.store.join
1439 1439 if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool(
1440 1440 b'devel', b'check-locks'
1441 1441 ):
1442 1442 if util.safehasattr(self.svfs, b'vfs'): # this is filtervfs
1443 1443 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
1444 1444 else: # standard vfs
1445 1445 self.svfs.audit = self._getsvfsward(self.svfs.audit)
1446 1446
1447 1447 self._dirstatevalidatewarned = False
1448 1448
1449 1449 self._branchcaches = branchmap.BranchMapCache()
1450 1450 self._revbranchcache = None
1451 1451 self._filterpats = {}
1452 1452 self._datafilters = {}
1453 1453 self._transref = self._lockref = self._wlockref = None
1454 1454
1455 1455 # A cache for various files under .hg/ that tracks file changes,
1456 1456 # (used by the filecache decorator)
1457 1457 #
1458 1458 # Maps a property name to its util.filecacheentry
1459 1459 self._filecache = {}
1460 1460
1461 1461 # hold sets of revision to be filtered
1462 1462 # should be cleared when something might have changed the filter value:
1463 1463 # - new changesets,
1464 1464 # - phase change,
1465 1465 # - new obsolescence marker,
1466 1466 # - working directory parent change,
1467 1467 # - bookmark changes
1468 1468 self.filteredrevcache = {}
1469 1469
1470 1470 self._dirstate = None
1471 1471 # post-dirstate-status hooks
1472 1472 self._postdsstatus = []
1473 1473
1474 1474 self._pending_narrow_pats = None
1475 1475 self._pending_narrow_pats_dirstate = None
1476 1476
1477 1477 # generic mapping between names and nodes
1478 1478 self.names = namespaces.namespaces()
1479 1479
1480 1480 # Key to signature value.
1481 1481 self._sparsesignaturecache = {}
1482 1482 # Signature to cached matcher instance.
1483 1483 self._sparsematchercache = {}
1484 1484
1485 1485 self._extrafilterid = repoview.extrafilter(ui)
1486 1486
1487 1487 self.filecopiesmode = None
1488 1488 if requirementsmod.COPIESSDC_REQUIREMENT in self.requirements:
1489 1489 self.filecopiesmode = b'changeset-sidedata'
1490 1490
1491 1491 self._wanted_sidedata = set()
1492 1492 self._sidedata_computers = {}
1493 1493 sidedatamod.set_sidedata_spec_for_repo(self)
1494 1494
1495 1495 def _getvfsward(self, origfunc):
1496 1496 """build a ward for self.vfs"""
1497 1497 rref = weakref.ref(self)
1498 1498
1499 1499 def checkvfs(path, mode=None):
1500 1500 ret = origfunc(path, mode=mode)
1501 1501 repo = rref()
1502 1502 if (
1503 1503 repo is None
1504 1504 or not util.safehasattr(repo, b'_wlockref')
1505 1505 or not util.safehasattr(repo, b'_lockref')
1506 1506 ):
1507 1507 return
1508 1508 if mode in (None, b'r', b'rb'):
1509 1509 return
1510 1510 if path.startswith(repo.path):
1511 1511 # truncate name relative to the repository (.hg)
1512 1512 path = path[len(repo.path) + 1 :]
1513 1513 if path.startswith(b'cache/'):
1514 1514 msg = b'accessing cache with vfs instead of cachevfs: "%s"'
1515 1515 repo.ui.develwarn(msg % path, stacklevel=3, config=b"cache-vfs")
1516 1516 # path prefixes covered by 'lock'
1517 1517 vfs_path_prefixes = (
1518 1518 b'journal.',
1519 1519 b'undo.',
1520 1520 b'strip-backup/',
1521 1521 b'cache/',
1522 1522 )
1523 1523 if any(path.startswith(prefix) for prefix in vfs_path_prefixes):
1524 1524 if repo._currentlock(repo._lockref) is None:
1525 1525 repo.ui.develwarn(
1526 1526 b'write with no lock: "%s"' % path,
1527 1527 stacklevel=3,
1528 1528 config=b'check-locks',
1529 1529 )
1530 1530 elif repo._currentlock(repo._wlockref) is None:
1531 1531 # rest of vfs files are covered by 'wlock'
1532 1532 #
1533 1533 # exclude special files
1534 1534 for prefix in self._wlockfreeprefix:
1535 1535 if path.startswith(prefix):
1536 1536 return
1537 1537 repo.ui.develwarn(
1538 1538 b'write with no wlock: "%s"' % path,
1539 1539 stacklevel=3,
1540 1540 config=b'check-locks',
1541 1541 )
1542 1542 return ret
1543 1543
1544 1544 return checkvfs
1545 1545
1546 1546 def _getsvfsward(self, origfunc):
1547 1547 """build a ward for self.svfs"""
1548 1548 rref = weakref.ref(self)
1549 1549
1550 1550 def checksvfs(path, mode=None):
1551 1551 ret = origfunc(path, mode=mode)
1552 1552 repo = rref()
1553 1553 if repo is None or not util.safehasattr(repo, b'_lockref'):
1554 1554 return
1555 1555 if mode in (None, b'r', b'rb'):
1556 1556 return
1557 1557 if path.startswith(repo.sharedpath):
1558 1558 # truncate name relative to the repository (.hg)
1559 1559 path = path[len(repo.sharedpath) + 1 :]
1560 1560 if repo._currentlock(repo._lockref) is None:
1561 1561 repo.ui.develwarn(
1562 1562 b'write with no lock: "%s"' % path, stacklevel=4
1563 1563 )
1564 1564 return ret
1565 1565
1566 1566 return checksvfs
1567 1567
1568 1568 @property
1569 1569 def vfs_map(self):
1570 1570 return {
1571 1571 b'': self.svfs,
1572 1572 b'plain': self.vfs,
1573 1573 b'store': self.svfs,
1574 1574 }
1575 1575
1576 1576 def close(self):
1577 1577 self._writecaches()
1578 1578
1579 1579 def _writecaches(self):
1580 1580 if self._revbranchcache:
1581 1581 self._revbranchcache.write()
1582 1582
1583 1583 def _restrictcapabilities(self, caps):
1584 1584 if self.ui.configbool(b'experimental', b'bundle2-advertise'):
1585 1585 caps = set(caps)
1586 1586 capsblob = bundle2.encodecaps(
1587 1587 bundle2.getrepocaps(self, role=b'client')
1588 1588 )
1589 1589 caps.add(b'bundle2=' + urlreq.quote(capsblob))
1590 1590 if self.ui.configbool(b'experimental', b'narrow'):
1591 1591 caps.add(wireprototypes.NARROWCAP)
1592 1592 return caps
1593 1593
1594 1594 # Don't cache auditor/nofsauditor, or you'll end up with reference cycle:
1595 1595 # self -> auditor -> self._checknested -> self
1596 1596
1597 1597 @property
1598 1598 def auditor(self):
1599 1599 # This is only used by context.workingctx.match in order to
1600 1600 # detect files in subrepos.
1601 1601 return pathutil.pathauditor(self.root, callback=self._checknested)
1602 1602
1603 1603 @property
1604 1604 def nofsauditor(self):
1605 1605 # This is only used by context.basectx.match in order to detect
1606 1606 # files in subrepos.
1607 1607 return pathutil.pathauditor(
1608 1608 self.root, callback=self._checknested, realfs=False, cached=True
1609 1609 )
1610 1610
1611 1611 def _checknested(self, path):
1612 1612 """Determine if path is a legal nested repository."""
1613 1613 if not path.startswith(self.root):
1614 1614 return False
1615 1615 subpath = path[len(self.root) + 1 :]
1616 1616 normsubpath = util.pconvert(subpath)
1617 1617
1618 1618 # XXX: Checking against the current working copy is wrong in
1619 1619 # the sense that it can reject things like
1620 1620 #
1621 1621 # $ hg cat -r 10 sub/x.txt
1622 1622 #
1623 1623 # if sub/ is no longer a subrepository in the working copy
1624 1624 # parent revision.
1625 1625 #
1626 1626 # However, it can of course also allow things that would have
1627 1627 # been rejected before, such as the above cat command if sub/
1628 1628 # is a subrepository now, but was a normal directory before.
1629 1629 # The old path auditor would have rejected by mistake since it
1630 1630 # panics when it sees sub/.hg/.
1631 1631 #
1632 1632 # All in all, checking against the working copy seems sensible
1633 1633 # since we want to prevent access to nested repositories on
1634 1634 # the filesystem *now*.
1635 1635 ctx = self[None]
1636 1636 parts = util.splitpath(subpath)
1637 1637 while parts:
1638 1638 prefix = b'/'.join(parts)
1639 1639 if prefix in ctx.substate:
1640 1640 if prefix == normsubpath:
1641 1641 return True
1642 1642 else:
1643 1643 sub = ctx.sub(prefix)
1644 1644 return sub.checknested(subpath[len(prefix) + 1 :])
1645 1645 else:
1646 1646 parts.pop()
1647 1647 return False
1648 1648
1649 1649 def peer(self, path=None):
1650 1650 return localpeer(self, path=path) # not cached to avoid reference cycle
1651 1651
1652 1652 def unfiltered(self):
1653 1653 """Return unfiltered version of the repository
1654 1654
1655 1655 Intended to be overwritten by filtered repo."""
1656 1656 return self
1657 1657
1658 1658 def filtered(self, name, visibilityexceptions=None):
1659 1659 """Return a filtered version of a repository
1660 1660
1661 1661 The `name` parameter is the identifier of the requested view. This
1662 1662 will return a repoview object set "exactly" to the specified view.
1663 1663
1664 1664 This function does not apply recursive filtering to a repository. For
1665 1665 example calling `repo.filtered("served")` will return a repoview using
1666 1666 the "served" view, regardless of the initial view used by `repo`.
1667 1667
1668 1668 In other word, there is always only one level of `repoview` "filtering".
1669 1669 """
1670 1670 if self._extrafilterid is not None and b'%' not in name:
1671 1671 name = name + b'%' + self._extrafilterid
1672 1672
1673 1673 cls = repoview.newtype(self.unfiltered().__class__)
1674 1674 return cls(self, name, visibilityexceptions)
1675 1675
1676 1676 @mixedrepostorecache(
1677 1677 (b'bookmarks', b'plain'),
1678 1678 (b'bookmarks.current', b'plain'),
1679 1679 (b'bookmarks', b''),
1680 1680 (b'00changelog.i', b''),
1681 1681 )
1682 1682 def _bookmarks(self):
1683 1683 # Since the multiple files involved in the transaction cannot be
1684 1684 # written atomically (with current repository format), there is a race
1685 1685 # condition here.
1686 1686 #
1687 1687 # 1) changelog content A is read
1688 1688 # 2) outside transaction update changelog to content B
1689 1689 # 3) outside transaction update bookmark file referring to content B
1690 1690 # 4) bookmarks file content is read and filtered against changelog-A
1691 1691 #
1692 1692 # When this happens, bookmarks against nodes missing from A are dropped.
1693 1693 #
1694 1694 # Having this happening during read is not great, but it become worse
1695 1695 # when this happen during write because the bookmarks to the "unknown"
1696 1696 # nodes will be dropped for good. However, writes happen within locks.
1697 1697 # This locking makes it possible to have a race free consistent read.
1698 1698 # For this purpose data read from disc before locking are
1699 1699 # "invalidated" right after the locks are taken. This invalidations are
1700 1700 # "light", the `filecache` mechanism keep the data in memory and will
1701 1701 # reuse them if the underlying files did not changed. Not parsing the
1702 1702 # same data multiple times helps performances.
1703 1703 #
1704 1704 # Unfortunately in the case describe above, the files tracked by the
1705 1705 # bookmarks file cache might not have changed, but the in-memory
1706 1706 # content is still "wrong" because we used an older changelog content
1707 1707 # to process the on-disk data. So after locking, the changelog would be
1708 1708 # refreshed but `_bookmarks` would be preserved.
1709 1709 # Adding `00changelog.i` to the list of tracked file is not
1710 1710 # enough, because at the time we build the content for `_bookmarks` in
1711 1711 # (4), the changelog file has already diverged from the content used
1712 1712 # for loading `changelog` in (1)
1713 1713 #
1714 1714 # To prevent the issue, we force the changelog to be explicitly
1715 1715 # reloaded while computing `_bookmarks`. The data race can still happen
1716 1716 # without the lock (with a narrower window), but it would no longer go
1717 1717 # undetected during the lock time refresh.
1718 1718 #
1719 1719 # The new schedule is as follow
1720 1720 #
1721 1721 # 1) filecache logic detect that `_bookmarks` needs to be computed
1722 1722 # 2) cachestat for `bookmarks` and `changelog` are captured (for book)
1723 1723 # 3) We force `changelog` filecache to be tested
1724 1724 # 4) cachestat for `changelog` are captured (for changelog)
1725 1725 # 5) `_bookmarks` is computed and cached
1726 1726 #
1727 1727 # The step in (3) ensure we have a changelog at least as recent as the
1728 1728 # cache stat computed in (1). As a result at locking time:
1729 1729 # * if the changelog did not changed since (1) -> we can reuse the data
1730 1730 # * otherwise -> the bookmarks get refreshed.
1731 1731 self._refreshchangelog()
1732 1732 return bookmarks.bmstore(self)
1733 1733
1734 1734 def _refreshchangelog(self):
1735 1735 """make sure the in memory changelog match the on-disk one"""
1736 1736 if 'changelog' in vars(self) and self.currenttransaction() is None:
1737 1737 del self.changelog
1738 1738
1739 1739 @property
1740 1740 def _activebookmark(self):
1741 1741 return self._bookmarks.active
1742 1742
1743 1743 # _phasesets depend on changelog. what we need is to call
1744 1744 # _phasecache.invalidate() if '00changelog.i' was changed, but it
1745 1745 # can't be easily expressed in filecache mechanism.
1746 1746 @storecache(b'phaseroots', b'00changelog.i')
1747 1747 def _phasecache(self):
1748 1748 return phases.phasecache(self, self._phasedefaults)
1749 1749
1750 1750 @storecache(b'obsstore')
1751 1751 def obsstore(self):
1752 1752 return obsolete.makestore(self.ui, self)
1753 1753
1754 1754 @changelogcache()
1755 1755 def changelog(repo):
1756 1756 # load dirstate before changelog to avoid race see issue6303
1757 1757 repo.dirstate.prefetch_parents()
1758 1758 return repo.store.changelog(
1759 1759 txnutil.mayhavepending(repo.root),
1760 1760 concurrencychecker=revlogchecker.get_checker(repo.ui, b'changelog'),
1761 1761 )
1762 1762
1763 1763 @manifestlogcache()
1764 1764 def manifestlog(self):
1765 1765 return self.store.manifestlog(self, self._storenarrowmatch)
1766 1766
1767 1767 @unfilteredpropertycache
1768 1768 def dirstate(self):
1769 1769 if self._dirstate is None:
1770 1770 self._dirstate = self._makedirstate()
1771 1771 else:
1772 1772 self._dirstate.refresh()
1773 1773 return self._dirstate
1774 1774
1775 1775 def _makedirstate(self):
1776 1776 """Extension point for wrapping the dirstate per-repo."""
1777 1777 sparsematchfn = None
1778 1778 if sparse.use_sparse(self):
1779 1779 sparsematchfn = lambda: sparse.matcher(self)
1780 1780 v2_req = requirementsmod.DIRSTATE_V2_REQUIREMENT
1781 1781 th = requirementsmod.DIRSTATE_TRACKED_HINT_V1
1782 1782 use_dirstate_v2 = v2_req in self.requirements
1783 1783 use_tracked_hint = th in self.requirements
1784 1784
1785 1785 return dirstate.dirstate(
1786 1786 self.vfs,
1787 1787 self.ui,
1788 1788 self.root,
1789 1789 self._dirstatevalidate,
1790 1790 sparsematchfn,
1791 1791 self.nodeconstants,
1792 1792 use_dirstate_v2,
1793 1793 use_tracked_hint=use_tracked_hint,
1794 1794 )
1795 1795
1796 1796 def _dirstatevalidate(self, node):
1797 1797 try:
1798 1798 self.changelog.rev(node)
1799 1799 return node
1800 1800 except error.LookupError:
1801 1801 if not self._dirstatevalidatewarned:
1802 1802 self._dirstatevalidatewarned = True
1803 1803 self.ui.warn(
1804 1804 _(b"warning: ignoring unknown working parent %s!\n")
1805 1805 % short(node)
1806 1806 )
1807 1807 return self.nullid
1808 1808
1809 1809 @storecache(narrowspec.FILENAME)
1810 1810 def narrowpats(self):
1811 1811 """matcher patterns for this repository's narrowspec
1812 1812
1813 1813 A tuple of (includes, excludes).
1814 1814 """
1815 1815 # the narrow management should probably move into its own object
1816 1816 val = self._pending_narrow_pats
1817 1817 if val is None:
1818 1818 val = narrowspec.load(self)
1819 1819 return val
1820 1820
1821 1821 @storecache(narrowspec.FILENAME)
1822 1822 def _storenarrowmatch(self):
1823 1823 if requirementsmod.NARROW_REQUIREMENT not in self.requirements:
1824 1824 return matchmod.always()
1825 1825 include, exclude = self.narrowpats
1826 1826 return narrowspec.match(self.root, include=include, exclude=exclude)
1827 1827
1828 1828 @storecache(narrowspec.FILENAME)
1829 1829 def _narrowmatch(self):
1830 1830 if requirementsmod.NARROW_REQUIREMENT not in self.requirements:
1831 1831 return matchmod.always()
1832 1832 narrowspec.checkworkingcopynarrowspec(self)
1833 1833 include, exclude = self.narrowpats
1834 1834 return narrowspec.match(self.root, include=include, exclude=exclude)
1835 1835
1836 1836 def narrowmatch(self, match=None, includeexact=False):
1837 1837 """matcher corresponding the the repo's narrowspec
1838 1838
1839 1839 If `match` is given, then that will be intersected with the narrow
1840 1840 matcher.
1841 1841
1842 1842 If `includeexact` is True, then any exact matches from `match` will
1843 1843 be included even if they're outside the narrowspec.
1844 1844 """
1845 1845 if match:
1846 1846 if includeexact and not self._narrowmatch.always():
1847 1847 # do not exclude explicitly-specified paths so that they can
1848 1848 # be warned later on
1849 1849 em = matchmod.exact(match.files())
1850 1850 nm = matchmod.unionmatcher([self._narrowmatch, em])
1851 1851 return matchmod.intersectmatchers(match, nm)
1852 1852 return matchmod.intersectmatchers(match, self._narrowmatch)
1853 1853 return self._narrowmatch
1854 1854
1855 1855 def setnarrowpats(self, newincludes, newexcludes):
1856 1856 narrowspec.save(self, newincludes, newexcludes)
1857 1857 self.invalidate(clearfilecache=True)
1858 1858
1859 1859 @unfilteredpropertycache
1860 1860 def _quick_access_changeid_null(self):
1861 1861 return {
1862 1862 b'null': (nullrev, self.nodeconstants.nullid),
1863 1863 nullrev: (nullrev, self.nodeconstants.nullid),
1864 1864 self.nullid: (nullrev, self.nullid),
1865 1865 }
1866 1866
1867 1867 @unfilteredpropertycache
1868 1868 def _quick_access_changeid_wc(self):
1869 1869 # also fast path access to the working copy parents
1870 1870 # however, only do it for filter that ensure wc is visible.
1871 1871 quick = self._quick_access_changeid_null.copy()
1872 1872 cl = self.unfiltered().changelog
1873 1873 for node in self.dirstate.parents():
1874 1874 if node == self.nullid:
1875 1875 continue
1876 1876 rev = cl.index.get_rev(node)
1877 1877 if rev is None:
1878 1878 # unknown working copy parent case:
1879 1879 #
1880 1880 # skip the fast path and let higher code deal with it
1881 1881 continue
1882 1882 pair = (rev, node)
1883 1883 quick[rev] = pair
1884 1884 quick[node] = pair
1885 1885 # also add the parents of the parents
1886 1886 for r in cl.parentrevs(rev):
1887 1887 if r == nullrev:
1888 1888 continue
1889 1889 n = cl.node(r)
1890 1890 pair = (r, n)
1891 1891 quick[r] = pair
1892 1892 quick[n] = pair
1893 1893 p1node = self.dirstate.p1()
1894 1894 if p1node != self.nullid:
1895 1895 quick[b'.'] = quick[p1node]
1896 1896 return quick
1897 1897
1898 1898 @unfilteredmethod
1899 1899 def _quick_access_changeid_invalidate(self):
1900 1900 if '_quick_access_changeid_wc' in vars(self):
1901 1901 del self.__dict__['_quick_access_changeid_wc']
1902 1902
1903 1903 @property
1904 1904 def _quick_access_changeid(self):
1905 1905 """an helper dictionnary for __getitem__ calls
1906 1906
1907 1907 This contains a list of symbol we can recognise right away without
1908 1908 further processing.
1909 1909 """
1910 1910 if self.filtername in repoview.filter_has_wc:
1911 1911 return self._quick_access_changeid_wc
1912 1912 return self._quick_access_changeid_null
1913 1913
1914 1914 def __getitem__(self, changeid):
1915 1915 # dealing with special cases
1916 1916 if changeid is None:
1917 1917 return context.workingctx(self)
1918 1918 if isinstance(changeid, context.basectx):
1919 1919 return changeid
1920 1920
1921 1921 # dealing with multiple revisions
1922 1922 if isinstance(changeid, slice):
1923 1923 # wdirrev isn't contiguous so the slice shouldn't include it
1924 1924 return [
1925 1925 self[i]
1926 1926 for i in range(*changeid.indices(len(self)))
1927 1927 if i not in self.changelog.filteredrevs
1928 1928 ]
1929 1929
1930 1930 # dealing with some special values
1931 1931 quick_access = self._quick_access_changeid.get(changeid)
1932 1932 if quick_access is not None:
1933 1933 rev, node = quick_access
1934 1934 return context.changectx(self, rev, node, maybe_filtered=False)
1935 1935 if changeid == b'tip':
1936 1936 node = self.changelog.tip()
1937 1937 rev = self.changelog.rev(node)
1938 1938 return context.changectx(self, rev, node)
1939 1939
1940 1940 # dealing with arbitrary values
1941 1941 try:
1942 1942 if isinstance(changeid, int):
1943 1943 node = self.changelog.node(changeid)
1944 1944 rev = changeid
1945 1945 elif changeid == b'.':
1946 1946 # this is a hack to delay/avoid loading obsmarkers
1947 1947 # when we know that '.' won't be hidden
1948 1948 node = self.dirstate.p1()
1949 1949 rev = self.unfiltered().changelog.rev(node)
1950 1950 elif len(changeid) == self.nodeconstants.nodelen:
1951 1951 try:
1952 1952 node = changeid
1953 1953 rev = self.changelog.rev(changeid)
1954 1954 except error.FilteredLookupError:
1955 1955 changeid = hex(changeid) # for the error message
1956 1956 raise
1957 1957 except LookupError:
1958 1958 # check if it might have come from damaged dirstate
1959 1959 #
1960 1960 # XXX we could avoid the unfiltered if we had a recognizable
1961 1961 # exception for filtered changeset access
1962 1962 if (
1963 1963 self.local()
1964 1964 and changeid in self.unfiltered().dirstate.parents()
1965 1965 ):
1966 1966 msg = _(b"working directory has unknown parent '%s'!")
1967 1967 raise error.Abort(msg % short(changeid))
1968 1968 changeid = hex(changeid) # for the error message
1969 1969 raise
1970 1970
1971 1971 elif len(changeid) == 2 * self.nodeconstants.nodelen:
1972 1972 node = bin(changeid)
1973 1973 rev = self.changelog.rev(node)
1974 1974 else:
1975 1975 raise error.ProgrammingError(
1976 1976 b"unsupported changeid '%s' of type %s"
1977 1977 % (changeid, pycompat.bytestr(type(changeid)))
1978 1978 )
1979 1979
1980 1980 return context.changectx(self, rev, node)
1981 1981
1982 1982 except (error.FilteredIndexError, error.FilteredLookupError):
1983 1983 raise error.FilteredRepoLookupError(
1984 1984 _(b"filtered revision '%s'") % pycompat.bytestr(changeid)
1985 1985 )
1986 1986 except (IndexError, LookupError):
1987 1987 raise error.RepoLookupError(
1988 1988 _(b"unknown revision '%s'") % pycompat.bytestr(changeid)
1989 1989 )
1990 1990 except error.WdirUnsupported:
1991 1991 return context.workingctx(self)
1992 1992
1993 1993 def __contains__(self, changeid):
1994 1994 """True if the given changeid exists"""
1995 1995 try:
1996 1996 self[changeid]
1997 1997 return True
1998 1998 except error.RepoLookupError:
1999 1999 return False
2000 2000
2001 2001 def __nonzero__(self):
2002 2002 return True
2003 2003
2004 2004 __bool__ = __nonzero__
2005 2005
2006 2006 def __len__(self):
2007 2007 # no need to pay the cost of repoview.changelog
2008 2008 unfi = self.unfiltered()
2009 2009 return len(unfi.changelog)
2010 2010
2011 2011 def __iter__(self):
2012 2012 return iter(self.changelog)
2013 2013
2014 2014 def revs(self, expr: bytes, *args):
2015 2015 """Find revisions matching a revset.
2016 2016
2017 2017 The revset is specified as a string ``expr`` that may contain
2018 2018 %-formatting to escape certain types. See ``revsetlang.formatspec``.
2019 2019
2020 2020 Revset aliases from the configuration are not expanded. To expand
2021 2021 user aliases, consider calling ``scmutil.revrange()`` or
2022 2022 ``repo.anyrevs([expr], user=True)``.
2023 2023
2024 2024 Returns a smartset.abstractsmartset, which is a list-like interface
2025 2025 that contains integer revisions.
2026 2026 """
2027 2027 tree = revsetlang.spectree(expr, *args)
2028 2028 return revset.makematcher(tree)(self)
2029 2029
2030 2030 def set(self, expr: bytes, *args):
2031 2031 """Find revisions matching a revset and emit changectx instances.
2032 2032
2033 2033 This is a convenience wrapper around ``revs()`` that iterates the
2034 2034 result and is a generator of changectx instances.
2035 2035
2036 2036 Revset aliases from the configuration are not expanded. To expand
2037 2037 user aliases, consider calling ``scmutil.revrange()``.
2038 2038 """
2039 2039 for r in self.revs(expr, *args):
2040 2040 yield self[r]
2041 2041
2042 2042 def anyrevs(self, specs: bytes, user=False, localalias=None):
2043 2043 """Find revisions matching one of the given revsets.
2044 2044
2045 2045 Revset aliases from the configuration are not expanded by default. To
2046 2046 expand user aliases, specify ``user=True``. To provide some local
2047 2047 definitions overriding user aliases, set ``localalias`` to
2048 2048 ``{name: definitionstring}``.
2049 2049 """
2050 2050 if specs == [b'null']:
2051 2051 return revset.baseset([nullrev])
2052 2052 if specs == [b'.']:
2053 2053 quick_data = self._quick_access_changeid.get(b'.')
2054 2054 if quick_data is not None:
2055 2055 return revset.baseset([quick_data[0]])
2056 2056 if user:
2057 2057 m = revset.matchany(
2058 2058 self.ui,
2059 2059 specs,
2060 2060 lookup=revset.lookupfn(self),
2061 2061 localalias=localalias,
2062 2062 )
2063 2063 else:
2064 2064 m = revset.matchany(None, specs, localalias=localalias)
2065 2065 return m(self)
2066 2066
2067 2067 def url(self) -> bytes:
2068 2068 return b'file:' + self.root
2069 2069
2070 2070 def hook(self, name, throw=False, **args):
2071 2071 """Call a hook, passing this repo instance.
2072 2072
2073 2073 This a convenience method to aid invoking hooks. Extensions likely
2074 2074 won't call this unless they have registered a custom hook or are
2075 2075 replacing code that is expected to call a hook.
2076 2076 """
2077 2077 return hook.hook(self.ui, self, name, throw, **args)
2078 2078
2079 2079 @filteredpropertycache
2080 2080 def _tagscache(self):
2081 2081 """Returns a tagscache object that contains various tags related
2082 2082 caches."""
2083 2083
2084 2084 # This simplifies its cache management by having one decorated
2085 2085 # function (this one) and the rest simply fetch things from it.
2086 2086 class tagscache:
2087 2087 def __init__(self):
2088 2088 # These two define the set of tags for this repository. tags
2089 2089 # maps tag name to node; tagtypes maps tag name to 'global' or
2090 2090 # 'local'. (Global tags are defined by .hgtags across all
2091 2091 # heads, and local tags are defined in .hg/localtags.)
2092 2092 # They constitute the in-memory cache of tags.
2093 2093 self.tags = self.tagtypes = None
2094 2094
2095 2095 self.nodetagscache = self.tagslist = None
2096 2096
2097 2097 cache = tagscache()
2098 2098 cache.tags, cache.tagtypes = self._findtags()
2099 2099
2100 2100 return cache
2101 2101
2102 2102 def tags(self):
2103 2103 '''return a mapping of tag to node'''
2104 2104 t = {}
2105 2105 if self.changelog.filteredrevs:
2106 2106 tags, tt = self._findtags()
2107 2107 else:
2108 2108 tags = self._tagscache.tags
2109 2109 rev = self.changelog.rev
2110 2110 for k, v in tags.items():
2111 2111 try:
2112 2112 # ignore tags to unknown nodes
2113 2113 rev(v)
2114 2114 t[k] = v
2115 2115 except (error.LookupError, ValueError):
2116 2116 pass
2117 2117 return t
2118 2118
2119 2119 def _findtags(self):
2120 2120 """Do the hard work of finding tags. Return a pair of dicts
2121 2121 (tags, tagtypes) where tags maps tag name to node, and tagtypes
2122 2122 maps tag name to a string like \'global\' or \'local\'.
2123 2123 Subclasses or extensions are free to add their own tags, but
2124 2124 should be aware that the returned dicts will be retained for the
2125 2125 duration of the localrepo object."""
2126 2126
2127 2127 # XXX what tagtype should subclasses/extensions use? Currently
2128 2128 # mq and bookmarks add tags, but do not set the tagtype at all.
2129 2129 # Should each extension invent its own tag type? Should there
2130 2130 # be one tagtype for all such "virtual" tags? Or is the status
2131 2131 # quo fine?
2132 2132
2133 2133 # map tag name to (node, hist)
2134 2134 alltags = tagsmod.findglobaltags(self.ui, self)
2135 2135 # map tag name to tag type
2136 2136 tagtypes = {tag: b'global' for tag in alltags}
2137 2137
2138 2138 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
2139 2139
2140 2140 # Build the return dicts. Have to re-encode tag names because
2141 2141 # the tags module always uses UTF-8 (in order not to lose info
2142 2142 # writing to the cache), but the rest of Mercurial wants them in
2143 2143 # local encoding.
2144 2144 tags = {}
2145 2145 for name, (node, hist) in alltags.items():
2146 2146 if node != self.nullid:
2147 2147 tags[encoding.tolocal(name)] = node
2148 2148 tags[b'tip'] = self.changelog.tip()
2149 2149 tagtypes = {
2150 2150 encoding.tolocal(name): value for (name, value) in tagtypes.items()
2151 2151 }
2152 2152 return (tags, tagtypes)
2153 2153
2154 2154 def tagtype(self, tagname):
2155 2155 """
2156 2156 return the type of the given tag. result can be:
2157 2157
2158 2158 'local' : a local tag
2159 2159 'global' : a global tag
2160 2160 None : tag does not exist
2161 2161 """
2162 2162
2163 2163 return self._tagscache.tagtypes.get(tagname)
2164 2164
2165 2165 def tagslist(self):
2166 2166 '''return a list of tags ordered by revision'''
2167 2167 if not self._tagscache.tagslist:
2168 2168 l = []
2169 2169 for t, n in self.tags().items():
2170 2170 l.append((self.changelog.rev(n), t, n))
2171 2171 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
2172 2172
2173 2173 return self._tagscache.tagslist
2174 2174
2175 2175 def nodetags(self, node):
2176 2176 '''return the tags associated with a node'''
2177 2177 if not self._tagscache.nodetagscache:
2178 2178 nodetagscache = {}
2179 2179 for t, n in self._tagscache.tags.items():
2180 2180 nodetagscache.setdefault(n, []).append(t)
2181 2181 for tags in nodetagscache.values():
2182 2182 tags.sort()
2183 2183 self._tagscache.nodetagscache = nodetagscache
2184 2184 return self._tagscache.nodetagscache.get(node, [])
2185 2185
2186 2186 def nodebookmarks(self, node):
2187 2187 """return the list of bookmarks pointing to the specified node"""
2188 2188 return self._bookmarks.names(node)
2189 2189
2190 2190 def branchmap(self):
2191 2191 """returns a dictionary {branch: [branchheads]} with branchheads
2192 2192 ordered by increasing revision number"""
2193 2193 return self._branchcaches[self]
2194 2194
2195 2195 @unfilteredmethod
2196 2196 def revbranchcache(self):
2197 2197 if not self._revbranchcache:
2198 2198 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
2199 2199 return self._revbranchcache
2200 2200
2201 2201 def register_changeset(self, rev, changelogrevision):
2202 2202 self.revbranchcache().setdata(rev, changelogrevision)
2203 2203
2204 2204 def branchtip(self, branch, ignoremissing=False):
2205 2205 """return the tip node for a given branch
2206 2206
2207 2207 If ignoremissing is True, then this method will not raise an error.
2208 2208 This is helpful for callers that only expect None for a missing branch
2209 2209 (e.g. namespace).
2210 2210
2211 2211 """
2212 2212 try:
2213 2213 return self.branchmap().branchtip(branch)
2214 2214 except KeyError:
2215 2215 if not ignoremissing:
2216 2216 raise error.RepoLookupError(_(b"unknown branch '%s'") % branch)
2217 2217 else:
2218 2218 pass
2219 2219
2220 2220 def lookup(self, key):
2221 2221 node = scmutil.revsymbol(self, key).node()
2222 2222 if node is None:
2223 2223 raise error.RepoLookupError(_(b"unknown revision '%s'") % key)
2224 2224 return node
2225 2225
2226 2226 def lookupbranch(self, key):
2227 2227 if self.branchmap().hasbranch(key):
2228 2228 return key
2229 2229
2230 2230 return scmutil.revsymbol(self, key).branch()
2231 2231
2232 2232 def known(self, nodes):
2233 2233 cl = self.changelog
2234 2234 get_rev = cl.index.get_rev
2235 2235 filtered = cl.filteredrevs
2236 2236 result = []
2237 2237 for n in nodes:
2238 2238 r = get_rev(n)
2239 2239 resp = not (r is None or r in filtered)
2240 2240 result.append(resp)
2241 2241 return result
2242 2242
2243 2243 def local(self):
2244 2244 return self
2245 2245
2246 2246 def publishing(self):
2247 2247 # it's safe (and desirable) to trust the publish flag unconditionally
2248 2248 # so that we don't finalize changes shared between users via ssh or nfs
2249 2249 return self.ui.configbool(b'phases', b'publish', untrusted=True)
2250 2250
2251 2251 def cancopy(self):
2252 2252 # so statichttprepo's override of local() works
2253 2253 if not self.local():
2254 2254 return False
2255 2255 if not self.publishing():
2256 2256 return True
2257 2257 # if publishing we can't copy if there is filtered content
2258 2258 return not self.filtered(b'visible').changelog.filteredrevs
2259 2259
2260 2260 def shared(self):
2261 2261 '''the type of shared repository (None if not shared)'''
2262 2262 if self.sharedpath != self.path:
2263 2263 return b'store'
2264 2264 return None
2265 2265
2266 2266 def wjoin(self, f: bytes, *insidef: bytes) -> bytes:
2267 2267 return self.vfs.reljoin(self.root, f, *insidef)
2268 2268
2269 2269 def setparents(self, p1, p2=None):
2270 2270 if p2 is None:
2271 2271 p2 = self.nullid
2272 2272 self[None].setparents(p1, p2)
2273 2273 self._quick_access_changeid_invalidate()
2274 2274
2275 2275 def filectx(self, path: bytes, changeid=None, fileid=None, changectx=None):
2276 2276 """changeid must be a changeset revision, if specified.
2277 2277 fileid can be a file revision or node."""
2278 2278 return context.filectx(
2279 2279 self, path, changeid, fileid, changectx=changectx
2280 2280 )
2281 2281
2282 2282 def getcwd(self) -> bytes:
2283 2283 return self.dirstate.getcwd()
2284 2284
2285 2285 def pathto(self, f: bytes, cwd: Optional[bytes] = None) -> bytes:
2286 2286 return self.dirstate.pathto(f, cwd)
2287 2287
2288 2288 def _loadfilter(self, filter):
2289 2289 if filter not in self._filterpats:
2290 2290 l = []
2291 2291 for pat, cmd in self.ui.configitems(filter):
2292 2292 if cmd == b'!':
2293 2293 continue
2294 2294 mf = matchmod.match(self.root, b'', [pat])
2295 2295 fn = None
2296 2296 params = cmd
2297 2297 for name, filterfn in self._datafilters.items():
2298 2298 if cmd.startswith(name):
2299 2299 fn = filterfn
2300 2300 params = cmd[len(name) :].lstrip()
2301 2301 break
2302 2302 if not fn:
2303 2303 fn = lambda s, c, **kwargs: procutil.filter(s, c)
2304 2304 fn.__name__ = 'commandfilter'
2305 2305 # Wrap old filters not supporting keyword arguments
2306 2306 if not pycompat.getargspec(fn)[2]:
2307 2307 oldfn = fn
2308 2308 fn = lambda s, c, oldfn=oldfn, **kwargs: oldfn(s, c)
2309 2309 fn.__name__ = 'compat-' + oldfn.__name__
2310 2310 l.append((mf, fn, params))
2311 2311 self._filterpats[filter] = l
2312 2312 return self._filterpats[filter]
2313 2313
2314 2314 def _filter(self, filterpats, filename, data):
2315 2315 for mf, fn, cmd in filterpats:
2316 2316 if mf(filename):
2317 2317 self.ui.debug(
2318 2318 b"filtering %s through %s\n"
2319 2319 % (filename, cmd or pycompat.sysbytes(fn.__name__))
2320 2320 )
2321 2321 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
2322 2322 break
2323 2323
2324 2324 return data
2325 2325
2326 2326 @unfilteredpropertycache
2327 2327 def _encodefilterpats(self):
2328 2328 return self._loadfilter(b'encode')
2329 2329
2330 2330 @unfilteredpropertycache
2331 2331 def _decodefilterpats(self):
2332 2332 return self._loadfilter(b'decode')
2333 2333
2334 2334 def adddatafilter(self, name, filter):
2335 2335 self._datafilters[name] = filter
2336 2336
2337 2337 def wread(self, filename: bytes) -> bytes:
2338 2338 if self.wvfs.islink(filename):
2339 2339 data = self.wvfs.readlink(filename)
2340 2340 else:
2341 2341 data = self.wvfs.read(filename)
2342 2342 return self._filter(self._encodefilterpats, filename, data)
2343 2343
2344 2344 def wwrite(
2345 2345 self,
2346 2346 filename: bytes,
2347 2347 data: bytes,
2348 2348 flags: bytes,
2349 2349 backgroundclose=False,
2350 2350 **kwargs
2351 2351 ) -> int:
2352 2352 """write ``data`` into ``filename`` in the working directory
2353 2353
2354 2354 This returns length of written (maybe decoded) data.
2355 2355 """
2356 2356 data = self._filter(self._decodefilterpats, filename, data)
2357 2357 if b'l' in flags:
2358 2358 self.wvfs.symlink(data, filename)
2359 2359 else:
2360 2360 self.wvfs.write(
2361 2361 filename, data, backgroundclose=backgroundclose, **kwargs
2362 2362 )
2363 2363 if b'x' in flags:
2364 2364 self.wvfs.setflags(filename, False, True)
2365 2365 else:
2366 2366 self.wvfs.setflags(filename, False, False)
2367 2367 return len(data)
2368 2368
2369 2369 def wwritedata(self, filename: bytes, data: bytes) -> bytes:
2370 2370 return self._filter(self._decodefilterpats, filename, data)
2371 2371
2372 2372 def currenttransaction(self):
2373 2373 """return the current transaction or None if non exists"""
2374 2374 if self._transref:
2375 2375 tr = self._transref()
2376 2376 else:
2377 2377 tr = None
2378 2378
2379 2379 if tr and tr.running():
2380 2380 return tr
2381 2381 return None
2382 2382
2383 2383 def transaction(self, desc, report=None):
2384 2384 if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool(
2385 2385 b'devel', b'check-locks'
2386 2386 ):
2387 2387 if self._currentlock(self._lockref) is None:
2388 2388 raise error.ProgrammingError(b'transaction requires locking')
2389 2389 tr = self.currenttransaction()
2390 2390 if tr is not None:
2391 2391 return tr.nest(name=desc)
2392 2392
2393 2393 # abort here if the journal already exists
2394 2394 if self.svfs.exists(b"journal"):
2395 2395 raise error.RepoError(
2396 2396 _(b"abandoned transaction found"),
2397 2397 hint=_(b"run 'hg recover' to clean up transaction"),
2398 2398 )
2399 2399
2400 2400 # At that point your dirstate should be clean:
2401 2401 #
2402 2402 # - If you don't have the wlock, why would you still have a dirty
2403 2403 # dirstate ?
2404 2404 #
2405 2405 # - If you hold the wlock, you should not be opening a transaction in
2406 2406 # the middle of a `distate.changing_*` block. The transaction needs to
2407 2407 # be open before that and wrap the change-context.
2408 2408 #
2409 2409 # - If you are not within a `dirstate.changing_*` context, why is our
2410 2410 # dirstate dirty?
2411 2411 if self.dirstate._dirty:
2412 2412 m = "cannot open a transaction with a dirty dirstate"
2413 2413 raise error.ProgrammingError(m)
2414 2414
2415 2415 idbase = b"%.40f#%f" % (random.random(), time.time())
2416 2416 ha = hex(hashutil.sha1(idbase).digest())
2417 2417 txnid = b'TXN:' + ha
2418 2418 self.hook(b'pretxnopen', throw=True, txnname=desc, txnid=txnid)
2419 2419
2420 2420 self._writejournal(desc)
2421 2421 if report:
2422 2422 rp = report
2423 2423 else:
2424 2424 rp = self.ui.warn
2425 2425 vfsmap = self.vfs_map
2426 2426 # we must avoid cyclic reference between repo and transaction.
2427 2427 reporef = weakref.ref(self)
2428 2428 # Code to track tag movement
2429 2429 #
2430 2430 # Since tags are all handled as file content, it is actually quite hard
2431 2431 # to track these movement from a code perspective. So we fallback to a
2432 2432 # tracking at the repository level. One could envision to track changes
2433 2433 # to the '.hgtags' file through changegroup apply but that fails to
2434 2434 # cope with case where transaction expose new heads without changegroup
2435 2435 # being involved (eg: phase movement).
2436 2436 #
2437 2437 # For now, We gate the feature behind a flag since this likely comes
2438 2438 # with performance impacts. The current code run more often than needed
2439 2439 # and do not use caches as much as it could. The current focus is on
2440 2440 # the behavior of the feature so we disable it by default. The flag
2441 2441 # will be removed when we are happy with the performance impact.
2442 2442 #
2443 2443 # Once this feature is no longer experimental move the following
2444 2444 # documentation to the appropriate help section:
2445 2445 #
2446 2446 # The ``HG_TAG_MOVED`` variable will be set if the transaction touched
2447 2447 # tags (new or changed or deleted tags). In addition the details of
2448 2448 # these changes are made available in a file at:
2449 2449 # ``REPOROOT/.hg/changes/tags.changes``.
2450 2450 # Make sure you check for HG_TAG_MOVED before reading that file as it
2451 2451 # might exist from a previous transaction even if no tag were touched
2452 2452 # in this one. Changes are recorded in a line base format::
2453 2453 #
2454 2454 # <action> <hex-node> <tag-name>\n
2455 2455 #
2456 2456 # Actions are defined as follow:
2457 2457 # "-R": tag is removed,
2458 2458 # "+A": tag is added,
2459 2459 # "-M": tag is moved (old value),
2460 2460 # "+M": tag is moved (new value),
2461 2461 tracktags = lambda x: None
2462 2462 # experimental config: experimental.hook-track-tags
2463 2463 shouldtracktags = self.ui.configbool(
2464 2464 b'experimental', b'hook-track-tags'
2465 2465 )
2466 2466 if desc != b'strip' and shouldtracktags:
2467 2467 oldheads = self.changelog.headrevs()
2468 2468
2469 2469 def tracktags(tr2):
2470 2470 repo = reporef()
2471 2471 assert repo is not None # help pytype
2472 2472 oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads)
2473 2473 newheads = repo.changelog.headrevs()
2474 2474 newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads)
2475 2475 # notes: we compare lists here.
2476 2476 # As we do it only once buiding set would not be cheaper
2477 2477 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
2478 2478 if changes:
2479 2479 tr2.hookargs[b'tag_moved'] = b'1'
2480 2480 with repo.vfs(
2481 2481 b'changes/tags.changes', b'w', atomictemp=True
2482 2482 ) as changesfile:
2483 2483 # note: we do not register the file to the transaction
2484 2484 # because we needs it to still exist on the transaction
2485 2485 # is close (for txnclose hooks)
2486 2486 tagsmod.writediff(changesfile, changes)
2487 2487
2488 2488 def validate(tr2):
2489 2489 """will run pre-closing hooks"""
2490 2490 # XXX the transaction API is a bit lacking here so we take a hacky
2491 2491 # path for now
2492 2492 #
2493 2493 # We cannot add this as a "pending" hooks since the 'tr.hookargs'
2494 2494 # dict is copied before these run. In addition we needs the data
2495 2495 # available to in memory hooks too.
2496 2496 #
2497 2497 # Moreover, we also need to make sure this runs before txnclose
2498 2498 # hooks and there is no "pending" mechanism that would execute
2499 2499 # logic only if hooks are about to run.
2500 2500 #
2501 2501 # Fixing this limitation of the transaction is also needed to track
2502 2502 # other families of changes (bookmarks, phases, obsolescence).
2503 2503 #
2504 2504 # This will have to be fixed before we remove the experimental
2505 2505 # gating.
2506 2506 tracktags(tr2)
2507 2507 repo = reporef()
2508 2508 assert repo is not None # help pytype
2509 2509
2510 2510 singleheadopt = (b'experimental', b'single-head-per-branch')
2511 2511 singlehead = repo.ui.configbool(*singleheadopt)
2512 2512 if singlehead:
2513 2513 singleheadsub = repo.ui.configsuboptions(*singleheadopt)[1]
2514 2514 accountclosed = singleheadsub.get(
2515 2515 b"account-closed-heads", False
2516 2516 )
2517 2517 if singleheadsub.get(b"public-changes-only", False):
2518 2518 filtername = b"immutable"
2519 2519 else:
2520 2520 filtername = b"visible"
2521 2521 scmutil.enforcesinglehead(
2522 2522 repo, tr2, desc, accountclosed, filtername
2523 2523 )
2524 2524 if hook.hashook(repo.ui, b'pretxnclose-bookmark'):
2525 2525 for name, (old, new) in sorted(
2526 2526 tr.changes[b'bookmarks'].items()
2527 2527 ):
2528 2528 args = tr.hookargs.copy()
2529 2529 args.update(bookmarks.preparehookargs(name, old, new))
2530 2530 repo.hook(
2531 2531 b'pretxnclose-bookmark',
2532 2532 throw=True,
2533 2533 **pycompat.strkwargs(args)
2534 2534 )
2535 2535 if hook.hashook(repo.ui, b'pretxnclose-phase'):
2536 2536 cl = repo.unfiltered().changelog
2537 2537 for revs, (old, new) in tr.changes[b'phases']:
2538 2538 for rev in revs:
2539 2539 args = tr.hookargs.copy()
2540 2540 node = hex(cl.node(rev))
2541 2541 args.update(phases.preparehookargs(node, old, new))
2542 2542 repo.hook(
2543 2543 b'pretxnclose-phase',
2544 2544 throw=True,
2545 2545 **pycompat.strkwargs(args)
2546 2546 )
2547 2547
2548 2548 repo.hook(
2549 2549 b'pretxnclose', throw=True, **pycompat.strkwargs(tr.hookargs)
2550 2550 )
2551 2551
2552 2552 def releasefn(tr, success):
2553 2553 repo = reporef()
2554 2554 if repo is None:
2555 2555 # If the repo has been GC'd (and this release function is being
2556 2556 # called from transaction.__del__), there's not much we can do,
2557 2557 # so just leave the unfinished transaction there and let the
2558 2558 # user run `hg recover`.
2559 2559 return
2560 2560 if success:
2561 2561 # this should be explicitly invoked here, because
2562 2562 # in-memory changes aren't written out at closing
2563 2563 # transaction, if tr.addfilegenerator (via
2564 2564 # dirstate.write or so) isn't invoked while
2565 2565 # transaction running
2566 2566 repo.dirstate.write(None)
2567 2567 else:
2568 2568 # discard all changes (including ones already written
2569 2569 # out) in this transaction
2570 2570 repo.invalidate(clearfilecache=True)
2571 2571
2572 2572 tr = transaction.transaction(
2573 2573 rp,
2574 2574 self.svfs,
2575 2575 vfsmap,
2576 2576 b"journal",
2577 2577 b"undo",
2578 2578 lambda: None,
2579 2579 self.store.createmode,
2580 2580 validator=validate,
2581 2581 releasefn=releasefn,
2582 2582 checkambigfiles=_cachedfiles,
2583 2583 name=desc,
2584 2584 )
2585 2585 for vfs_id, path in self._journalfiles():
2586 2586 tr.add_journal(vfs_id, path)
2587 2587 tr.changes[b'origrepolen'] = len(self)
2588 2588 tr.changes[b'obsmarkers'] = set()
2589 2589 tr.changes[b'phases'] = []
2590 2590 tr.changes[b'bookmarks'] = {}
2591 2591
2592 2592 tr.hookargs[b'txnid'] = txnid
2593 2593 tr.hookargs[b'txnname'] = desc
2594 2594 tr.hookargs[b'changes'] = tr.changes
2595 2595 # note: writing the fncache only during finalize mean that the file is
2596 2596 # outdated when running hooks. As fncache is used for streaming clone,
2597 2597 # this is not expected to break anything that happen during the hooks.
2598 2598 tr.addfinalize(b'flush-fncache', self.store.write)
2599 2599
2600 2600 def txnclosehook(tr2):
2601 2601 """To be run if transaction is successful, will schedule a hook run"""
2602 2602 # Don't reference tr2 in hook() so we don't hold a reference.
2603 2603 # This reduces memory consumption when there are multiple
2604 2604 # transactions per lock. This can likely go away if issue5045
2605 2605 # fixes the function accumulation.
2606 2606 hookargs = tr2.hookargs
2607 2607
2608 2608 def hookfunc(unused_success):
2609 2609 repo = reporef()
2610 2610 assert repo is not None # help pytype
2611 2611
2612 2612 if hook.hashook(repo.ui, b'txnclose-bookmark'):
2613 2613 bmchanges = sorted(tr.changes[b'bookmarks'].items())
2614 2614 for name, (old, new) in bmchanges:
2615 2615 args = tr.hookargs.copy()
2616 2616 args.update(bookmarks.preparehookargs(name, old, new))
2617 2617 repo.hook(
2618 2618 b'txnclose-bookmark',
2619 2619 throw=False,
2620 2620 **pycompat.strkwargs(args)
2621 2621 )
2622 2622
2623 2623 if hook.hashook(repo.ui, b'txnclose-phase'):
2624 2624 cl = repo.unfiltered().changelog
2625 2625 phasemv = sorted(
2626 2626 tr.changes[b'phases'], key=lambda r: r[0][0]
2627 2627 )
2628 2628 for revs, (old, new) in phasemv:
2629 2629 for rev in revs:
2630 2630 args = tr.hookargs.copy()
2631 2631 node = hex(cl.node(rev))
2632 2632 args.update(phases.preparehookargs(node, old, new))
2633 2633 repo.hook(
2634 2634 b'txnclose-phase',
2635 2635 throw=False,
2636 2636 **pycompat.strkwargs(args)
2637 2637 )
2638 2638
2639 2639 repo.hook(
2640 2640 b'txnclose', throw=False, **pycompat.strkwargs(hookargs)
2641 2641 )
2642 2642
2643 2643 repo = reporef()
2644 2644 assert repo is not None # help pytype
2645 2645 repo._afterlock(hookfunc)
2646 2646
2647 2647 tr.addfinalize(b'txnclose-hook', txnclosehook)
2648 2648 # Include a leading "-" to make it happen before the transaction summary
2649 2649 # reports registered via scmutil.registersummarycallback() whose names
2650 2650 # are 00-txnreport etc. That way, the caches will be warm when the
2651 2651 # callbacks run.
2652 2652 tr.addpostclose(b'-warm-cache', self._buildcacheupdater(tr))
2653 2653
2654 2654 def txnaborthook(tr2):
2655 2655 """To be run if transaction is aborted"""
2656 2656 repo = reporef()
2657 2657 assert repo is not None # help pytype
2658 2658 repo.hook(
2659 2659 b'txnabort', throw=False, **pycompat.strkwargs(tr2.hookargs)
2660 2660 )
2661 2661
2662 2662 tr.addabort(b'txnabort-hook', txnaborthook)
2663 2663 # avoid eager cache invalidation. in-memory data should be identical
2664 2664 # to stored data if transaction has no error.
2665 2665 tr.addpostclose(b'refresh-filecachestats', self._refreshfilecachestats)
2666 2666 self._transref = weakref.ref(tr)
2667 2667 scmutil.registersummarycallback(self, tr, desc)
2668 2668 # This only exist to deal with the need of rollback to have viable
2669 2669 # parents at the end of the operation. So backup viable parents at the
2670 2670 # time of this operation.
2671 2671 #
2672 2672 # We only do it when the `wlock` is taken, otherwise other might be
2673 2673 # altering the dirstate under us.
2674 2674 #
2675 2675 # This is really not a great way to do this (first, because we cannot
2676 2676 # always do it). There are more viable alternative that exists
2677 2677 #
2678 2678 # - backing only the working copy parent in a dedicated files and doing
2679 2679 # a clean "keep-update" to them on `hg rollback`.
2680 2680 #
2681 2681 # - slightly changing the behavior an applying a logic similar to "hg
2682 2682 # strip" to pick a working copy destination on `hg rollback`
2683 2683 if self.currentwlock() is not None:
2684 2684 ds = self.dirstate
2685 2685 if not self.vfs.exists(b'branch'):
2686 2686 # force a file to be written if None exist
2687 2687 ds.setbranch(b'default', None)
2688 2688
2689 2689 def backup_dirstate(tr):
2690 2690 for f in ds.all_file_names():
2691 2691 # hardlink backup is okay because `dirstate` is always
2692 2692 # atomically written and possible data file are append only
2693 2693 # and resistant to trailing data.
2694 2694 tr.addbackup(f, hardlink=True, location=b'plain')
2695 2695
2696 2696 tr.addvalidator(b'dirstate-backup', backup_dirstate)
2697 2697 return tr
2698 2698
2699 2699 def _journalfiles(self):
2700 2700 return (
2701 2701 (self.svfs, b'journal'),
2702 2702 (self.vfs, b'journal.desc'),
2703 2703 )
2704 2704
2705 2705 def undofiles(self):
2706 2706 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
2707 2707
2708 2708 @unfilteredmethod
2709 2709 def _writejournal(self, desc):
2710 2710 self.vfs.write(b"journal.desc", b"%d\n%s\n" % (len(self), desc))
2711 2711
2712 2712 def recover(self):
2713 2713 with self.lock():
2714 2714 if self.svfs.exists(b"journal"):
2715 2715 self.ui.status(_(b"rolling back interrupted transaction\n"))
2716 2716 vfsmap = self.vfs_map
2717 2717 transaction.rollback(
2718 2718 self.svfs,
2719 2719 vfsmap,
2720 2720 b"journal",
2721 2721 self.ui.warn,
2722 2722 checkambigfiles=_cachedfiles,
2723 2723 )
2724 2724 self.invalidate()
2725 2725 return True
2726 2726 else:
2727 2727 self.ui.warn(_(b"no interrupted transaction available\n"))
2728 2728 return False
2729 2729
2730 2730 def rollback(self, dryrun=False, force=False):
2731 2731 wlock = lock = None
2732 2732 try:
2733 2733 wlock = self.wlock()
2734 2734 lock = self.lock()
2735 2735 if self.svfs.exists(b"undo"):
2736 2736 return self._rollback(dryrun, force)
2737 2737 else:
2738 2738 self.ui.warn(_(b"no rollback information available\n"))
2739 2739 return 1
2740 2740 finally:
2741 2741 release(lock, wlock)
2742 2742
2743 2743 @unfilteredmethod # Until we get smarter cache management
2744 2744 def _rollback(self, dryrun, force):
2745 2745 ui = self.ui
2746 2746
2747 2747 parents = self.dirstate.parents()
2748 2748 try:
2749 2749 args = self.vfs.read(b'undo.desc').splitlines()
2750 2750 (oldlen, desc, detail) = (int(args[0]), args[1], None)
2751 2751 if len(args) >= 3:
2752 2752 detail = args[2]
2753 2753 oldtip = oldlen - 1
2754 2754
2755 2755 if detail and ui.verbose:
2756 2756 msg = _(
2757 2757 b'repository tip rolled back to revision %d'
2758 2758 b' (undo %s: %s)\n'
2759 2759 ) % (oldtip, desc, detail)
2760 2760 else:
2761 2761 msg = _(
2762 2762 b'repository tip rolled back to revision %d (undo %s)\n'
2763 2763 ) % (oldtip, desc)
2764 2764 parentgone = any(self[p].rev() > oldtip for p in parents)
2765 2765 except IOError:
2766 2766 msg = _(b'rolling back unknown transaction\n')
2767 2767 desc = None
2768 2768 parentgone = True
2769 2769
2770 2770 if not force and self[b'.'] != self[b'tip'] and desc == b'commit':
2771 2771 raise error.Abort(
2772 2772 _(
2773 2773 b'rollback of last commit while not checked out '
2774 2774 b'may lose data'
2775 2775 ),
2776 2776 hint=_(b'use -f to force'),
2777 2777 )
2778 2778
2779 2779 ui.status(msg)
2780 2780 if dryrun:
2781 2781 return 0
2782 2782
2783 2783 self.destroying()
2784 2784 vfsmap = self.vfs_map
2785 2785 skip_journal_pattern = None
2786 2786 if not parentgone:
2787 2787 skip_journal_pattern = RE_SKIP_DIRSTATE_ROLLBACK
2788 2788 transaction.rollback(
2789 2789 self.svfs,
2790 2790 vfsmap,
2791 2791 b'undo',
2792 2792 ui.warn,
2793 2793 checkambigfiles=_cachedfiles,
2794 2794 skip_journal_pattern=skip_journal_pattern,
2795 2795 )
2796 2796 self.invalidate()
2797 2797 self.dirstate.invalidate()
2798 2798
2799 2799 if parentgone:
2800 2800 # replace this with some explicit parent update in the future.
2801 2801 has_node = self.changelog.index.has_node
2802 2802 if not all(has_node(p) for p in self.dirstate._pl):
2803 2803 # There was no dirstate to backup initially, we need to drop
2804 2804 # the existing one.
2805 2805 with self.dirstate.changing_parents(self):
2806 2806 self.dirstate.setparents(self.nullid)
2807 2807 self.dirstate.clear()
2808 2808
2809 2809 parents = tuple([p.rev() for p in self[None].parents()])
2810 2810 if len(parents) > 1:
2811 2811 ui.status(
2812 2812 _(
2813 2813 b'working directory now based on '
2814 2814 b'revisions %d and %d\n'
2815 2815 )
2816 2816 % parents
2817 2817 )
2818 2818 else:
2819 2819 ui.status(
2820 2820 _(b'working directory now based on revision %d\n') % parents
2821 2821 )
2822 2822 mergestatemod.mergestate.clean(self)
2823 2823
2824 2824 # TODO: if we know which new heads may result from this rollback, pass
2825 2825 # them to destroy(), which will prevent the branchhead cache from being
2826 2826 # invalidated.
2827 2827 self.destroyed()
2828 2828 return 0
2829 2829
2830 2830 def _buildcacheupdater(self, newtransaction):
2831 2831 """called during transaction to build the callback updating cache
2832 2832
2833 2833 Lives on the repository to help extension who might want to augment
2834 2834 this logic. For this purpose, the created transaction is passed to the
2835 2835 method.
2836 2836 """
2837 2837 # we must avoid cyclic reference between repo and transaction.
2838 2838 reporef = weakref.ref(self)
2839 2839
2840 2840 def updater(tr):
2841 2841 repo = reporef()
2842 2842 assert repo is not None # help pytype
2843 2843 repo.updatecaches(tr)
2844 2844
2845 2845 return updater
2846 2846
2847 2847 @unfilteredmethod
2848 2848 def updatecaches(self, tr=None, full=False, caches=None):
2849 2849 """warm appropriate caches
2850 2850
2851 2851 If this function is called after a transaction closed. The transaction
2852 2852 will be available in the 'tr' argument. This can be used to selectively
2853 2853 update caches relevant to the changes in that transaction.
2854 2854
2855 2855 If 'full' is set, make sure all caches the function knows about have
2856 2856 up-to-date data. Even the ones usually loaded more lazily.
2857 2857
2858 2858 The `full` argument can take a special "post-clone" value. In this case
2859 2859 the cache warming is made after a clone and of the slower cache might
2860 2860 be skipped, namely the `.fnodetags` one. This argument is 5.8 specific
2861 2861 as we plan for a cleaner way to deal with this for 5.9.
2862 2862 """
2863 2863 if tr is not None and tr.hookargs.get(b'source') == b'strip':
2864 2864 # During strip, many caches are invalid but
2865 2865 # later call to `destroyed` will refresh them.
2866 2866 return
2867 2867
2868 2868 unfi = self.unfiltered()
2869 2869
2870 2870 if full:
2871 2871 msg = (
2872 2872 "`full` argument for `repo.updatecaches` is deprecated\n"
2873 2873 "(use `caches=repository.CACHE_ALL` instead)"
2874 2874 )
2875 2875 self.ui.deprecwarn(msg, b"5.9")
2876 2876 caches = repository.CACHES_ALL
2877 2877 if full == b"post-clone":
2878 2878 caches = repository.CACHES_POST_CLONE
2879 2879 caches = repository.CACHES_ALL
2880 2880 elif caches is None:
2881 2881 caches = repository.CACHES_DEFAULT
2882 2882
2883 2883 if repository.CACHE_BRANCHMAP_SERVED in caches:
2884 2884 if tr is None or tr.changes[b'origrepolen'] < len(self):
2885 2885 # accessing the 'served' branchmap should refresh all the others,
2886 2886 self.ui.debug(b'updating the branch cache\n')
2887 2887 self.filtered(b'served').branchmap()
2888 2888 self.filtered(b'served.hidden').branchmap()
2889 2889 # flush all possibly delayed write.
2890 2890 self._branchcaches.write_delayed(self)
2891 2891
2892 2892 if repository.CACHE_CHANGELOG_CACHE in caches:
2893 2893 self.changelog.update_caches(transaction=tr)
2894 2894
2895 2895 if repository.CACHE_MANIFESTLOG_CACHE in caches:
2896 2896 self.manifestlog.update_caches(transaction=tr)
2897 2897
2898 2898 if repository.CACHE_REV_BRANCH in caches:
2899 2899 rbc = unfi.revbranchcache()
2900 2900 for r in unfi.changelog:
2901 2901 rbc.branchinfo(r)
2902 2902 rbc.write()
2903 2903
2904 2904 if repository.CACHE_FULL_MANIFEST in caches:
2905 2905 # ensure the working copy parents are in the manifestfulltextcache
2906 2906 for ctx in self[b'.'].parents():
2907 2907 ctx.manifest() # accessing the manifest is enough
2908 2908
2909 2909 if repository.CACHE_FILE_NODE_TAGS in caches:
2910 2910 # accessing fnode cache warms the cache
2911 2911 tagsmod.fnoderevs(self.ui, unfi, unfi.changelog.revs())
2912 2912
2913 2913 if repository.CACHE_TAGS_DEFAULT in caches:
2914 2914 # accessing tags warm the cache
2915 2915 self.tags()
2916 2916 if repository.CACHE_TAGS_SERVED in caches:
2917 2917 self.filtered(b'served').tags()
2918 2918
2919 2919 if repository.CACHE_BRANCHMAP_ALL in caches:
2920 2920 # The CACHE_BRANCHMAP_ALL updates lazily-loaded caches immediately,
2921 2921 # so we're forcing a write to cause these caches to be warmed up
2922 2922 # even if they haven't explicitly been requested yet (if they've
2923 2923 # never been used by hg, they won't ever have been written, even if
2924 2924 # they're a subset of another kind of cache that *has* been used).
2925 2925 for filt in repoview.filtertable.keys():
2926 2926 filtered = self.filtered(filt)
2927 2927 filtered.branchmap().write(filtered)
2928 2928
2929 2929 def invalidatecaches(self):
2930 2930 if '_tagscache' in vars(self):
2931 2931 # can't use delattr on proxy
2932 2932 del self.__dict__['_tagscache']
2933 2933
2934 2934 self._branchcaches.clear()
2935 2935 self.invalidatevolatilesets()
2936 2936 self._sparsesignaturecache.clear()
2937 2937
2938 2938 def invalidatevolatilesets(self):
2939 2939 self.filteredrevcache.clear()
2940 2940 obsolete.clearobscaches(self)
2941 2941 self._quick_access_changeid_invalidate()
2942 2942
2943 2943 def invalidatedirstate(self):
2944 2944 """Invalidates the dirstate, causing the next call to dirstate
2945 2945 to check if it was modified since the last time it was read,
2946 2946 rereading it if it has.
2947 2947
2948 2948 This is different to dirstate.invalidate() that it doesn't always
2949 2949 rereads the dirstate. Use dirstate.invalidate() if you want to
2950 2950 explicitly read the dirstate again (i.e. restoring it to a previous
2951 2951 known good state)."""
2952 2952 unfi = self.unfiltered()
2953 2953 if 'dirstate' in unfi.__dict__:
2954 assert not self.dirstate.is_changing_any
2954 2955 del unfi.__dict__['dirstate']
2955 2956
2956 2957 def invalidate(self, clearfilecache=False):
2957 2958 """Invalidates both store and non-store parts other than dirstate
2958 2959
2959 2960 If a transaction is running, invalidation of store is omitted,
2960 2961 because discarding in-memory changes might cause inconsistency
2961 2962 (e.g. incomplete fncache causes unintentional failure, but
2962 2963 redundant one doesn't).
2963 2964 """
2964 2965 unfiltered = self.unfiltered() # all file caches are stored unfiltered
2965 2966 for k in list(self._filecache.keys()):
2966 2967 if (
2967 2968 k == b'changelog'
2968 2969 and self.currenttransaction()
2969 2970 and self.changelog._delayed
2970 2971 ):
2971 2972 # The changelog object may store unwritten revisions. We don't
2972 2973 # want to lose them.
2973 2974 # TODO: Solve the problem instead of working around it.
2974 2975 continue
2975 2976
2976 2977 if clearfilecache:
2977 2978 del self._filecache[k]
2978 2979 try:
2979 2980 delattr(unfiltered, k)
2980 2981 except AttributeError:
2981 2982 pass
2982 2983 self.invalidatecaches()
2983 2984 if not self.currenttransaction():
2984 2985 # TODO: Changing contents of store outside transaction
2985 2986 # causes inconsistency. We should make in-memory store
2986 2987 # changes detectable, and abort if changed.
2987 2988 self.store.invalidatecaches()
2988 2989
2989 2990 def invalidateall(self):
2990 2991 """Fully invalidates both store and non-store parts, causing the
2991 2992 subsequent operation to reread any outside changes."""
2992 2993 # extension should hook this to invalidate its caches
2993 2994 self.invalidate()
2994 2995 self.invalidatedirstate()
2995 2996
2996 2997 @unfilteredmethod
2997 2998 def _refreshfilecachestats(self, tr):
2998 2999 """Reload stats of cached files so that they are flagged as valid"""
2999 3000 for k, ce in self._filecache.items():
3000 3001 k = pycompat.sysstr(k)
3001 3002 if k == 'dirstate' or k not in self.__dict__:
3002 3003 continue
3003 3004 ce.refresh()
3004 3005
3005 3006 def _lock(
3006 3007 self,
3007 3008 vfs,
3008 3009 lockname,
3009 3010 wait,
3010 3011 releasefn,
3011 3012 acquirefn,
3012 3013 desc,
3013 3014 ):
3014 3015 timeout = 0
3015 3016 warntimeout = 0
3016 3017 if wait:
3017 3018 timeout = self.ui.configint(b"ui", b"timeout")
3018 3019 warntimeout = self.ui.configint(b"ui", b"timeout.warn")
3019 3020 # internal config: ui.signal-safe-lock
3020 3021 signalsafe = self.ui.configbool(b'ui', b'signal-safe-lock')
3021 3022
3022 3023 l = lockmod.trylock(
3023 3024 self.ui,
3024 3025 vfs,
3025 3026 lockname,
3026 3027 timeout,
3027 3028 warntimeout,
3028 3029 releasefn=releasefn,
3029 3030 acquirefn=acquirefn,
3030 3031 desc=desc,
3031 3032 signalsafe=signalsafe,
3032 3033 )
3033 3034 return l
3034 3035
3035 3036 def _afterlock(self, callback):
3036 3037 """add a callback to be run when the repository is fully unlocked
3037 3038
3038 3039 The callback will be executed when the outermost lock is released
3039 3040 (with wlock being higher level than 'lock')."""
3040 3041 for ref in (self._wlockref, self._lockref):
3041 3042 l = ref and ref()
3042 3043 if l and l.held:
3043 3044 l.postrelease.append(callback)
3044 3045 break
3045 3046 else: # no lock have been found.
3046 3047 callback(True)
3047 3048
3048 3049 def lock(self, wait=True):
3049 3050 """Lock the repository store (.hg/store) and return a weak reference
3050 3051 to the lock. Use this before modifying the store (e.g. committing or
3051 3052 stripping). If you are opening a transaction, get a lock as well.)
3052 3053
3053 3054 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
3054 3055 'wlock' first to avoid a dead-lock hazard."""
3055 3056 l = self._currentlock(self._lockref)
3056 3057 if l is not None:
3057 3058 l.lock()
3058 3059 return l
3059 3060
3060 3061 l = self._lock(
3061 3062 vfs=self.svfs,
3062 3063 lockname=b"lock",
3063 3064 wait=wait,
3064 3065 releasefn=None,
3065 3066 acquirefn=self.invalidate,
3066 3067 desc=_(b'repository %s') % self.origroot,
3067 3068 )
3068 3069 self._lockref = weakref.ref(l)
3069 3070 return l
3070 3071
3071 3072 def wlock(self, wait=True):
3072 3073 """Lock the non-store parts of the repository (everything under
3073 3074 .hg except .hg/store) and return a weak reference to the lock.
3074 3075
3075 3076 Use this before modifying files in .hg.
3076 3077
3077 3078 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
3078 3079 'wlock' first to avoid a dead-lock hazard."""
3079 3080 l = self._wlockref() if self._wlockref else None
3080 3081 if l is not None and l.held:
3081 3082 l.lock()
3082 3083 return l
3083 3084
3084 3085 # We do not need to check for non-waiting lock acquisition. Such
3085 3086 # acquisition would not cause dead-lock as they would just fail.
3086 3087 if wait and (
3087 3088 self.ui.configbool(b'devel', b'all-warnings')
3088 3089 or self.ui.configbool(b'devel', b'check-locks')
3089 3090 ):
3090 3091 if self._currentlock(self._lockref) is not None:
3091 3092 self.ui.develwarn(b'"wlock" acquired after "lock"')
3092 3093
3093 3094 def unlock():
3094 3095 if self.dirstate.is_changing_any:
3095 3096 msg = b"wlock release in the middle of a changing parents"
3096 3097 self.ui.develwarn(msg)
3097 3098 self.dirstate.invalidate()
3098 3099 else:
3099 3100 if self.dirstate._dirty:
3100 3101 msg = b"dirty dirstate on wlock release"
3101 3102 self.ui.develwarn(msg)
3102 3103 self.dirstate.write(None)
3103 3104
3104 3105 unfi = self.unfiltered()
3105 3106 if 'dirstate' in unfi.__dict__:
3106 3107 del unfi.__dict__['dirstate']
3107 3108
3108 3109 l = self._lock(
3109 3110 self.vfs,
3110 3111 b"wlock",
3111 3112 wait,
3112 3113 unlock,
3113 3114 self.invalidatedirstate,
3114 3115 _(b'working directory of %s') % self.origroot,
3115 3116 )
3116 3117 self._wlockref = weakref.ref(l)
3117 3118 return l
3118 3119
3119 3120 def _currentlock(self, lockref):
3120 3121 """Returns the lock if it's held, or None if it's not."""
3121 3122 if lockref is None:
3122 3123 return None
3123 3124 l = lockref()
3124 3125 if l is None or not l.held:
3125 3126 return None
3126 3127 return l
3127 3128
3128 3129 def currentwlock(self):
3129 3130 """Returns the wlock if it's held, or None if it's not."""
3130 3131 return self._currentlock(self._wlockref)
3131 3132
3132 3133 def checkcommitpatterns(self, wctx, match, status, fail):
3133 3134 """check for commit arguments that aren't committable"""
3134 3135 if match.isexact() or match.prefix():
3135 3136 matched = set(status.modified + status.added + status.removed)
3136 3137
3137 3138 for f in match.files():
3138 3139 f = self.dirstate.normalize(f)
3139 3140 if f == b'.' or f in matched or f in wctx.substate:
3140 3141 continue
3141 3142 if f in status.deleted:
3142 3143 fail(f, _(b'file not found!'))
3143 3144 # Is it a directory that exists or used to exist?
3144 3145 if self.wvfs.isdir(f) or wctx.p1().hasdir(f):
3145 3146 d = f + b'/'
3146 3147 for mf in matched:
3147 3148 if mf.startswith(d):
3148 3149 break
3149 3150 else:
3150 3151 fail(f, _(b"no match under directory!"))
3151 3152 elif f not in self.dirstate:
3152 3153 fail(f, _(b"file not tracked!"))
3153 3154
3154 3155 @unfilteredmethod
3155 3156 def commit(
3156 3157 self,
3157 3158 text=b"",
3158 3159 user=None,
3159 3160 date=None,
3160 3161 match=None,
3161 3162 force=False,
3162 3163 editor=None,
3163 3164 extra=None,
3164 3165 ):
3165 3166 """Add a new revision to current repository.
3166 3167
3167 3168 Revision information is gathered from the working directory,
3168 3169 match can be used to filter the committed files. If editor is
3169 3170 supplied, it is called to get a commit message.
3170 3171 """
3171 3172 if extra is None:
3172 3173 extra = {}
3173 3174
3174 3175 def fail(f, msg):
3175 3176 raise error.InputError(b'%s: %s' % (f, msg))
3176 3177
3177 3178 if not match:
3178 3179 match = matchmod.always()
3179 3180
3180 3181 if not force:
3181 3182 match.bad = fail
3182 3183
3183 3184 # lock() for recent changelog (see issue4368)
3184 3185 with self.wlock(), self.lock():
3185 3186 wctx = self[None]
3186 3187 merge = len(wctx.parents()) > 1
3187 3188
3188 3189 if not force and merge and not match.always():
3189 3190 raise error.Abort(
3190 3191 _(
3191 3192 b'cannot partially commit a merge '
3192 3193 b'(do not specify files or patterns)'
3193 3194 )
3194 3195 )
3195 3196
3196 3197 status = self.status(match=match, clean=force)
3197 3198 if force:
3198 3199 status.modified.extend(
3199 3200 status.clean
3200 3201 ) # mq may commit clean files
3201 3202
3202 3203 # check subrepos
3203 3204 subs, commitsubs, newstate = subrepoutil.precommit(
3204 3205 self.ui, wctx, status, match, force=force
3205 3206 )
3206 3207
3207 3208 # make sure all explicit patterns are matched
3208 3209 if not force:
3209 3210 self.checkcommitpatterns(wctx, match, status, fail)
3210 3211
3211 3212 cctx = context.workingcommitctx(
3212 3213 self, status, text, user, date, extra
3213 3214 )
3214 3215
3215 3216 ms = mergestatemod.mergestate.read(self)
3216 3217 mergeutil.checkunresolved(ms)
3217 3218
3218 3219 # internal config: ui.allowemptycommit
3219 3220 if cctx.isempty() and not self.ui.configbool(
3220 3221 b'ui', b'allowemptycommit'
3221 3222 ):
3222 3223 self.ui.debug(b'nothing to commit, clearing merge state\n')
3223 3224 ms.reset()
3224 3225 return None
3225 3226
3226 3227 if merge and cctx.deleted():
3227 3228 raise error.Abort(_(b"cannot commit merge with missing files"))
3228 3229
3229 3230 if editor:
3230 3231 cctx._text = editor(self, cctx, subs)
3231 3232 edited = text != cctx._text
3232 3233
3233 3234 # Save commit message in case this transaction gets rolled back
3234 3235 # (e.g. by a pretxncommit hook). Leave the content alone on
3235 3236 # the assumption that the user will use the same editor again.
3236 3237 msg_path = self.savecommitmessage(cctx._text)
3237 3238
3238 3239 # commit subs and write new state
3239 3240 if subs:
3240 3241 uipathfn = scmutil.getuipathfn(self)
3241 3242 for s in sorted(commitsubs):
3242 3243 sub = wctx.sub(s)
3243 3244 self.ui.status(
3244 3245 _(b'committing subrepository %s\n')
3245 3246 % uipathfn(subrepoutil.subrelpath(sub))
3246 3247 )
3247 3248 sr = sub.commit(cctx._text, user, date)
3248 3249 newstate[s] = (newstate[s][0], sr)
3249 3250 subrepoutil.writestate(self, newstate)
3250 3251
3251 3252 p1, p2 = self.dirstate.parents()
3252 3253 hookp1, hookp2 = hex(p1), (p2 != self.nullid and hex(p2) or b'')
3253 3254 try:
3254 3255 self.hook(
3255 3256 b"precommit", throw=True, parent1=hookp1, parent2=hookp2
3256 3257 )
3257 3258 with self.transaction(b'commit'):
3258 3259 ret = self.commitctx(cctx, True)
3259 3260 # update bookmarks, dirstate and mergestate
3260 3261 bookmarks.update(self, [p1, p2], ret)
3261 3262 cctx.markcommitted(ret)
3262 3263 ms.reset()
3263 3264 except: # re-raises
3264 3265 if edited:
3265 3266 self.ui.write(
3266 3267 _(b'note: commit message saved in %s\n') % msg_path
3267 3268 )
3268 3269 self.ui.write(
3269 3270 _(
3270 3271 b"note: use 'hg commit --logfile "
3271 3272 b"%s --edit' to reuse it\n"
3272 3273 )
3273 3274 % msg_path
3274 3275 )
3275 3276 raise
3276 3277
3277 3278 def commithook(unused_success):
3278 3279 # hack for command that use a temporary commit (eg: histedit)
3279 3280 # temporary commit got stripped before hook release
3280 3281 if self.changelog.hasnode(ret):
3281 3282 self.hook(
3282 3283 b"commit", node=hex(ret), parent1=hookp1, parent2=hookp2
3283 3284 )
3284 3285
3285 3286 self._afterlock(commithook)
3286 3287 return ret
3287 3288
3288 3289 @unfilteredmethod
3289 3290 def commitctx(self, ctx, error=False, origctx=None):
3290 3291 return commit.commitctx(self, ctx, error=error, origctx=origctx)
3291 3292
3292 3293 @unfilteredmethod
3293 3294 def destroying(self):
3294 3295 """Inform the repository that nodes are about to be destroyed.
3295 3296 Intended for use by strip and rollback, so there's a common
3296 3297 place for anything that has to be done before destroying history.
3297 3298
3298 3299 This is mostly useful for saving state that is in memory and waiting
3299 3300 to be flushed when the current lock is released. Because a call to
3300 3301 destroyed is imminent, the repo will be invalidated causing those
3301 3302 changes to stay in memory (waiting for the next unlock), or vanish
3302 3303 completely.
3303 3304 """
3304 3305 # When using the same lock to commit and strip, the phasecache is left
3305 3306 # dirty after committing. Then when we strip, the repo is invalidated,
3306 3307 # causing those changes to disappear.
3307 3308 if '_phasecache' in vars(self):
3308 3309 self._phasecache.write()
3309 3310
3310 3311 @unfilteredmethod
3311 3312 def destroyed(self):
3312 3313 """Inform the repository that nodes have been destroyed.
3313 3314 Intended for use by strip and rollback, so there's a common
3314 3315 place for anything that has to be done after destroying history.
3315 3316 """
3316 3317 # When one tries to:
3317 3318 # 1) destroy nodes thus calling this method (e.g. strip)
3318 3319 # 2) use phasecache somewhere (e.g. commit)
3319 3320 #
3320 3321 # then 2) will fail because the phasecache contains nodes that were
3321 3322 # removed. We can either remove phasecache from the filecache,
3322 3323 # causing it to reload next time it is accessed, or simply filter
3323 3324 # the removed nodes now and write the updated cache.
3324 3325 self._phasecache.filterunknown(self)
3325 3326 self._phasecache.write()
3326 3327
3327 3328 # refresh all repository caches
3328 3329 self.updatecaches()
3329 3330
3330 3331 # Ensure the persistent tag cache is updated. Doing it now
3331 3332 # means that the tag cache only has to worry about destroyed
3332 3333 # heads immediately after a strip/rollback. That in turn
3333 3334 # guarantees that "cachetip == currenttip" (comparing both rev
3334 3335 # and node) always means no nodes have been added or destroyed.
3335 3336
3336 3337 # XXX this is suboptimal when qrefresh'ing: we strip the current
3337 3338 # head, refresh the tag cache, then immediately add a new head.
3338 3339 # But I think doing it this way is necessary for the "instant
3339 3340 # tag cache retrieval" case to work.
3340 3341 self.invalidate()
3341 3342
3342 3343 def status(
3343 3344 self,
3344 3345 node1=b'.',
3345 3346 node2=None,
3346 3347 match=None,
3347 3348 ignored=False,
3348 3349 clean=False,
3349 3350 unknown=False,
3350 3351 listsubrepos=False,
3351 3352 ):
3352 3353 '''a convenience method that calls node1.status(node2)'''
3353 3354 return self[node1].status(
3354 3355 node2, match, ignored, clean, unknown, listsubrepos
3355 3356 )
3356 3357
3357 3358 def addpostdsstatus(self, ps):
3358 3359 """Add a callback to run within the wlock, at the point at which status
3359 3360 fixups happen.
3360 3361
3361 3362 On status completion, callback(wctx, status) will be called with the
3362 3363 wlock held, unless the dirstate has changed from underneath or the wlock
3363 3364 couldn't be grabbed.
3364 3365
3365 3366 Callbacks should not capture and use a cached copy of the dirstate --
3366 3367 it might change in the meanwhile. Instead, they should access the
3367 3368 dirstate via wctx.repo().dirstate.
3368 3369
3369 3370 This list is emptied out after each status run -- extensions should
3370 3371 make sure it adds to this list each time dirstate.status is called.
3371 3372 Extensions should also make sure they don't call this for statuses
3372 3373 that don't involve the dirstate.
3373 3374 """
3374 3375
3375 3376 # The list is located here for uniqueness reasons -- it is actually
3376 3377 # managed by the workingctx, but that isn't unique per-repo.
3377 3378 self._postdsstatus.append(ps)
3378 3379
3379 3380 def postdsstatus(self):
3380 3381 """Used by workingctx to get the list of post-dirstate-status hooks."""
3381 3382 return self._postdsstatus
3382 3383
3383 3384 def clearpostdsstatus(self):
3384 3385 """Used by workingctx to clear post-dirstate-status hooks."""
3385 3386 del self._postdsstatus[:]
3386 3387
3387 3388 def heads(self, start=None):
3388 3389 if start is None:
3389 3390 cl = self.changelog
3390 3391 headrevs = reversed(cl.headrevs())
3391 3392 return [cl.node(rev) for rev in headrevs]
3392 3393
3393 3394 heads = self.changelog.heads(start)
3394 3395 # sort the output in rev descending order
3395 3396 return sorted(heads, key=self.changelog.rev, reverse=True)
3396 3397
3397 3398 def branchheads(self, branch=None, start=None, closed=False):
3398 3399 """return a (possibly filtered) list of heads for the given branch
3399 3400
3400 3401 Heads are returned in topological order, from newest to oldest.
3401 3402 If branch is None, use the dirstate branch.
3402 3403 If start is not None, return only heads reachable from start.
3403 3404 If closed is True, return heads that are marked as closed as well.
3404 3405 """
3405 3406 if branch is None:
3406 3407 branch = self[None].branch()
3407 3408 branches = self.branchmap()
3408 3409 if not branches.hasbranch(branch):
3409 3410 return []
3410 3411 # the cache returns heads ordered lowest to highest
3411 3412 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
3412 3413 if start is not None:
3413 3414 # filter out the heads that cannot be reached from startrev
3414 3415 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
3415 3416 bheads = [h for h in bheads if h in fbheads]
3416 3417 return bheads
3417 3418
3418 3419 def branches(self, nodes):
3419 3420 if not nodes:
3420 3421 nodes = [self.changelog.tip()]
3421 3422 b = []
3422 3423 for n in nodes:
3423 3424 t = n
3424 3425 while True:
3425 3426 p = self.changelog.parents(n)
3426 3427 if p[1] != self.nullid or p[0] == self.nullid:
3427 3428 b.append((t, n, p[0], p[1]))
3428 3429 break
3429 3430 n = p[0]
3430 3431 return b
3431 3432
3432 3433 def between(self, pairs):
3433 3434 r = []
3434 3435
3435 3436 for top, bottom in pairs:
3436 3437 n, l, i = top, [], 0
3437 3438 f = 1
3438 3439
3439 3440 while n != bottom and n != self.nullid:
3440 3441 p = self.changelog.parents(n)[0]
3441 3442 if i == f:
3442 3443 l.append(n)
3443 3444 f = f * 2
3444 3445 n = p
3445 3446 i += 1
3446 3447
3447 3448 r.append(l)
3448 3449
3449 3450 return r
3450 3451
3451 3452 def checkpush(self, pushop):
3452 3453 """Extensions can override this function if additional checks have
3453 3454 to be performed before pushing, or call it if they override push
3454 3455 command.
3455 3456 """
3456 3457
3457 3458 @unfilteredpropertycache
3458 3459 def prepushoutgoinghooks(self):
3459 3460 """Return util.hooks consists of a pushop with repo, remote, outgoing
3460 3461 methods, which are called before pushing changesets.
3461 3462 """
3462 3463 return util.hooks()
3463 3464
3464 3465 def pushkey(self, namespace, key, old, new):
3465 3466 try:
3466 3467 tr = self.currenttransaction()
3467 3468 hookargs = {}
3468 3469 if tr is not None:
3469 3470 hookargs.update(tr.hookargs)
3470 3471 hookargs = pycompat.strkwargs(hookargs)
3471 3472 hookargs['namespace'] = namespace
3472 3473 hookargs['key'] = key
3473 3474 hookargs['old'] = old
3474 3475 hookargs['new'] = new
3475 3476 self.hook(b'prepushkey', throw=True, **hookargs)
3476 3477 except error.HookAbort as exc:
3477 3478 self.ui.write_err(_(b"pushkey-abort: %s\n") % exc)
3478 3479 if exc.hint:
3479 3480 self.ui.write_err(_(b"(%s)\n") % exc.hint)
3480 3481 return False
3481 3482 self.ui.debug(b'pushing key for "%s:%s"\n' % (namespace, key))
3482 3483 ret = pushkey.push(self, namespace, key, old, new)
3483 3484
3484 3485 def runhook(unused_success):
3485 3486 self.hook(
3486 3487 b'pushkey',
3487 3488 namespace=namespace,
3488 3489 key=key,
3489 3490 old=old,
3490 3491 new=new,
3491 3492 ret=ret,
3492 3493 )
3493 3494
3494 3495 self._afterlock(runhook)
3495 3496 return ret
3496 3497
3497 3498 def listkeys(self, namespace):
3498 3499 self.hook(b'prelistkeys', throw=True, namespace=namespace)
3499 3500 self.ui.debug(b'listing keys for "%s"\n' % namespace)
3500 3501 values = pushkey.list(self, namespace)
3501 3502 self.hook(b'listkeys', namespace=namespace, values=values)
3502 3503 return values
3503 3504
3504 3505 def debugwireargs(self, one, two, three=None, four=None, five=None):
3505 3506 '''used to test argument passing over the wire'''
3506 3507 return b"%s %s %s %s %s" % (
3507 3508 one,
3508 3509 two,
3509 3510 pycompat.bytestr(three),
3510 3511 pycompat.bytestr(four),
3511 3512 pycompat.bytestr(five),
3512 3513 )
3513 3514
3514 3515 def savecommitmessage(self, text):
3515 3516 fp = self.vfs(b'last-message.txt', b'wb')
3516 3517 try:
3517 3518 fp.write(text)
3518 3519 finally:
3519 3520 fp.close()
3520 3521 return self.pathto(fp.name[len(self.root) + 1 :])
3521 3522
3522 3523 def register_wanted_sidedata(self, category):
3523 3524 if repository.REPO_FEATURE_SIDE_DATA not in self.features:
3524 3525 # Only revlogv2 repos can want sidedata.
3525 3526 return
3526 3527 self._wanted_sidedata.add(pycompat.bytestr(category))
3527 3528
3528 3529 def register_sidedata_computer(
3529 3530 self, kind, category, keys, computer, flags, replace=False
3530 3531 ):
3531 3532 if kind not in revlogconst.ALL_KINDS:
3532 3533 msg = _(b"unexpected revlog kind '%s'.")
3533 3534 raise error.ProgrammingError(msg % kind)
3534 3535 category = pycompat.bytestr(category)
3535 3536 already_registered = category in self._sidedata_computers.get(kind, [])
3536 3537 if already_registered and not replace:
3537 3538 msg = _(
3538 3539 b"cannot register a sidedata computer twice for category '%s'."
3539 3540 )
3540 3541 raise error.ProgrammingError(msg % category)
3541 3542 if replace and not already_registered:
3542 3543 msg = _(
3543 3544 b"cannot replace a sidedata computer that isn't registered "
3544 3545 b"for category '%s'."
3545 3546 )
3546 3547 raise error.ProgrammingError(msg % category)
3547 3548 self._sidedata_computers.setdefault(kind, {})
3548 3549 self._sidedata_computers[kind][category] = (keys, computer, flags)
3549 3550
3550 3551
3551 3552 def undoname(fn: bytes) -> bytes:
3552 3553 base, name = os.path.split(fn)
3553 3554 assert name.startswith(b'journal')
3554 3555 return os.path.join(base, name.replace(b'journal', b'undo', 1))
3555 3556
3556 3557
3557 3558 def instance(ui, path: bytes, create, intents=None, createopts=None):
3558 3559 # prevent cyclic import localrepo -> upgrade -> localrepo
3559 3560 from . import upgrade
3560 3561
3561 3562 localpath = urlutil.urllocalpath(path)
3562 3563 if create:
3563 3564 createrepository(ui, localpath, createopts=createopts)
3564 3565
3565 3566 def repo_maker():
3566 3567 return makelocalrepository(ui, localpath, intents=intents)
3567 3568
3568 3569 repo = repo_maker()
3569 3570 repo = upgrade.may_auto_upgrade(repo, repo_maker)
3570 3571 return repo
3571 3572
3572 3573
3573 3574 def islocal(path: bytes) -> bool:
3574 3575 return True
3575 3576
3576 3577
3577 3578 def defaultcreateopts(ui, createopts=None):
3578 3579 """Populate the default creation options for a repository.
3579 3580
3580 3581 A dictionary of explicitly requested creation options can be passed
3581 3582 in. Missing keys will be populated.
3582 3583 """
3583 3584 createopts = dict(createopts or {})
3584 3585
3585 3586 if b'backend' not in createopts:
3586 3587 # experimental config: storage.new-repo-backend
3587 3588 createopts[b'backend'] = ui.config(b'storage', b'new-repo-backend')
3588 3589
3589 3590 return createopts
3590 3591
3591 3592
3592 3593 def clone_requirements(ui, createopts, srcrepo):
3593 3594 """clone the requirements of a local repo for a local clone
3594 3595
3595 3596 The store requirements are unchanged while the working copy requirements
3596 3597 depends on the configuration
3597 3598 """
3598 3599 target_requirements = set()
3599 3600 if not srcrepo.requirements:
3600 3601 # this is a legacy revlog "v0" repository, we cannot do anything fancy
3601 3602 # with it.
3602 3603 return target_requirements
3603 3604 createopts = defaultcreateopts(ui, createopts=createopts)
3604 3605 for r in newreporequirements(ui, createopts):
3605 3606 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
3606 3607 target_requirements.add(r)
3607 3608
3608 3609 for r in srcrepo.requirements:
3609 3610 if r not in requirementsmod.WORKING_DIR_REQUIREMENTS:
3610 3611 target_requirements.add(r)
3611 3612 return target_requirements
3612 3613
3613 3614
3614 3615 def newreporequirements(ui, createopts):
3615 3616 """Determine the set of requirements for a new local repository.
3616 3617
3617 3618 Extensions can wrap this function to specify custom requirements for
3618 3619 new repositories.
3619 3620 """
3620 3621
3621 3622 if b'backend' not in createopts:
3622 3623 raise error.ProgrammingError(
3623 3624 b'backend key not present in createopts; '
3624 3625 b'was defaultcreateopts() called?'
3625 3626 )
3626 3627
3627 3628 if createopts[b'backend'] != b'revlogv1':
3628 3629 raise error.Abort(
3629 3630 _(
3630 3631 b'unable to determine repository requirements for '
3631 3632 b'storage backend: %s'
3632 3633 )
3633 3634 % createopts[b'backend']
3634 3635 )
3635 3636
3636 3637 requirements = {requirementsmod.REVLOGV1_REQUIREMENT}
3637 3638 if ui.configbool(b'format', b'usestore'):
3638 3639 requirements.add(requirementsmod.STORE_REQUIREMENT)
3639 3640 if ui.configbool(b'format', b'usefncache'):
3640 3641 requirements.add(requirementsmod.FNCACHE_REQUIREMENT)
3641 3642 if ui.configbool(b'format', b'dotencode'):
3642 3643 requirements.add(requirementsmod.DOTENCODE_REQUIREMENT)
3643 3644
3644 3645 compengines = ui.configlist(b'format', b'revlog-compression')
3645 3646 for compengine in compengines:
3646 3647 if compengine in util.compengines:
3647 3648 engine = util.compengines[compengine]
3648 3649 if engine.available() and engine.revlogheader():
3649 3650 break
3650 3651 else:
3651 3652 raise error.Abort(
3652 3653 _(
3653 3654 b'compression engines %s defined by '
3654 3655 b'format.revlog-compression not available'
3655 3656 )
3656 3657 % b', '.join(b'"%s"' % e for e in compengines),
3657 3658 hint=_(
3658 3659 b'run "hg debuginstall" to list available '
3659 3660 b'compression engines'
3660 3661 ),
3661 3662 )
3662 3663
3663 3664 # zlib is the historical default and doesn't need an explicit requirement.
3664 3665 if compengine == b'zstd':
3665 3666 requirements.add(b'revlog-compression-zstd')
3666 3667 elif compengine != b'zlib':
3667 3668 requirements.add(b'exp-compression-%s' % compengine)
3668 3669
3669 3670 if scmutil.gdinitconfig(ui):
3670 3671 requirements.add(requirementsmod.GENERALDELTA_REQUIREMENT)
3671 3672 if ui.configbool(b'format', b'sparse-revlog'):
3672 3673 requirements.add(requirementsmod.SPARSEREVLOG_REQUIREMENT)
3673 3674
3674 3675 # experimental config: format.use-dirstate-v2
3675 3676 # Keep this logic in sync with `has_dirstate_v2()` in `tests/hghave.py`
3676 3677 if ui.configbool(b'format', b'use-dirstate-v2'):
3677 3678 requirements.add(requirementsmod.DIRSTATE_V2_REQUIREMENT)
3678 3679
3679 3680 # experimental config: format.exp-use-copies-side-data-changeset
3680 3681 if ui.configbool(b'format', b'exp-use-copies-side-data-changeset'):
3681 3682 requirements.add(requirementsmod.CHANGELOGV2_REQUIREMENT)
3682 3683 requirements.add(requirementsmod.COPIESSDC_REQUIREMENT)
3683 3684 if ui.configbool(b'experimental', b'treemanifest'):
3684 3685 requirements.add(requirementsmod.TREEMANIFEST_REQUIREMENT)
3685 3686
3686 3687 changelogv2 = ui.config(b'format', b'exp-use-changelog-v2')
3687 3688 if changelogv2 == b'enable-unstable-format-and-corrupt-my-data':
3688 3689 requirements.add(requirementsmod.CHANGELOGV2_REQUIREMENT)
3689 3690
3690 3691 revlogv2 = ui.config(b'experimental', b'revlogv2')
3691 3692 if revlogv2 == b'enable-unstable-format-and-corrupt-my-data':
3692 3693 requirements.discard(requirementsmod.REVLOGV1_REQUIREMENT)
3693 3694 requirements.add(requirementsmod.REVLOGV2_REQUIREMENT)
3694 3695 # experimental config: format.internal-phase
3695 3696 if ui.configbool(b'format', b'use-internal-phase'):
3696 3697 requirements.add(requirementsmod.INTERNAL_PHASE_REQUIREMENT)
3697 3698
3698 3699 # experimental config: format.exp-archived-phase
3699 3700 if ui.configbool(b'format', b'exp-archived-phase'):
3700 3701 requirements.add(requirementsmod.ARCHIVED_PHASE_REQUIREMENT)
3701 3702
3702 3703 if createopts.get(b'narrowfiles'):
3703 3704 requirements.add(requirementsmod.NARROW_REQUIREMENT)
3704 3705
3705 3706 if createopts.get(b'lfs'):
3706 3707 requirements.add(b'lfs')
3707 3708
3708 3709 if ui.configbool(b'format', b'bookmarks-in-store'):
3709 3710 requirements.add(requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT)
3710 3711
3711 3712 if ui.configbool(b'format', b'use-persistent-nodemap'):
3712 3713 requirements.add(requirementsmod.NODEMAP_REQUIREMENT)
3713 3714
3714 3715 # if share-safe is enabled, let's create the new repository with the new
3715 3716 # requirement
3716 3717 if ui.configbool(b'format', b'use-share-safe'):
3717 3718 requirements.add(requirementsmod.SHARESAFE_REQUIREMENT)
3718 3719
3719 3720 # if we are creating a share-repoΒΉ we have to handle requirement
3720 3721 # differently.
3721 3722 #
3722 3723 # [1] (i.e. reusing the store from another repository, just having a
3723 3724 # working copy)
3724 3725 if b'sharedrepo' in createopts:
3725 3726 source_requirements = set(createopts[b'sharedrepo'].requirements)
3726 3727
3727 3728 if requirementsmod.SHARESAFE_REQUIREMENT not in source_requirements:
3728 3729 # share to an old school repository, we have to copy the
3729 3730 # requirements and hope for the best.
3730 3731 requirements = source_requirements
3731 3732 else:
3732 3733 # We have control on the working copy only, so "copy" the non
3733 3734 # working copy part over, ignoring previous logic.
3734 3735 to_drop = set()
3735 3736 for req in requirements:
3736 3737 if req in requirementsmod.WORKING_DIR_REQUIREMENTS:
3737 3738 continue
3738 3739 if req in source_requirements:
3739 3740 continue
3740 3741 to_drop.add(req)
3741 3742 requirements -= to_drop
3742 3743 requirements |= source_requirements
3743 3744
3744 3745 if createopts.get(b'sharedrelative'):
3745 3746 requirements.add(requirementsmod.RELATIVE_SHARED_REQUIREMENT)
3746 3747 else:
3747 3748 requirements.add(requirementsmod.SHARED_REQUIREMENT)
3748 3749
3749 3750 if ui.configbool(b'format', b'use-dirstate-tracked-hint'):
3750 3751 version = ui.configint(b'format', b'use-dirstate-tracked-hint.version')
3751 3752 msg = _(b"ignoring unknown tracked key version: %d\n")
3752 3753 hint = _(
3753 3754 b"see `hg help config.format.use-dirstate-tracked-hint-version"
3754 3755 )
3755 3756 if version != 1:
3756 3757 ui.warn(msg % version, hint=hint)
3757 3758 else:
3758 3759 requirements.add(requirementsmod.DIRSTATE_TRACKED_HINT_V1)
3759 3760
3760 3761 return requirements
3761 3762
3762 3763
3763 3764 def checkrequirementscompat(ui, requirements):
3764 3765 """Checks compatibility of repository requirements enabled and disabled.
3765 3766
3766 3767 Returns a set of requirements which needs to be dropped because dependend
3767 3768 requirements are not enabled. Also warns users about it"""
3768 3769
3769 3770 dropped = set()
3770 3771
3771 3772 if requirementsmod.STORE_REQUIREMENT not in requirements:
3772 3773 if requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT in requirements:
3773 3774 ui.warn(
3774 3775 _(
3775 3776 b'ignoring enabled \'format.bookmarks-in-store\' config '
3776 3777 b'beacuse it is incompatible with disabled '
3777 3778 b'\'format.usestore\' config\n'
3778 3779 )
3779 3780 )
3780 3781 dropped.add(requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT)
3781 3782
3782 3783 if (
3783 3784 requirementsmod.SHARED_REQUIREMENT in requirements
3784 3785 or requirementsmod.RELATIVE_SHARED_REQUIREMENT in requirements
3785 3786 ):
3786 3787 raise error.Abort(
3787 3788 _(
3788 3789 b"cannot create shared repository as source was created"
3789 3790 b" with 'format.usestore' config disabled"
3790 3791 )
3791 3792 )
3792 3793
3793 3794 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
3794 3795 if ui.hasconfig(b'format', b'use-share-safe'):
3795 3796 msg = _(
3796 3797 b"ignoring enabled 'format.use-share-safe' config because "
3797 3798 b"it is incompatible with disabled 'format.usestore'"
3798 3799 b" config\n"
3799 3800 )
3800 3801 ui.warn(msg)
3801 3802 dropped.add(requirementsmod.SHARESAFE_REQUIREMENT)
3802 3803
3803 3804 return dropped
3804 3805
3805 3806
3806 3807 def filterknowncreateopts(ui, createopts):
3807 3808 """Filters a dict of repo creation options against options that are known.
3808 3809
3809 3810 Receives a dict of repo creation options and returns a dict of those
3810 3811 options that we don't know how to handle.
3811 3812
3812 3813 This function is called as part of repository creation. If the
3813 3814 returned dict contains any items, repository creation will not
3814 3815 be allowed, as it means there was a request to create a repository
3815 3816 with options not recognized by loaded code.
3816 3817
3817 3818 Extensions can wrap this function to filter out creation options
3818 3819 they know how to handle.
3819 3820 """
3820 3821 known = {
3821 3822 b'backend',
3822 3823 b'lfs',
3823 3824 b'narrowfiles',
3824 3825 b'sharedrepo',
3825 3826 b'sharedrelative',
3826 3827 b'shareditems',
3827 3828 b'shallowfilestore',
3828 3829 }
3829 3830
3830 3831 return {k: v for k, v in createopts.items() if k not in known}
3831 3832
3832 3833
3833 3834 def createrepository(ui, path: bytes, createopts=None, requirements=None):
3834 3835 """Create a new repository in a vfs.
3835 3836
3836 3837 ``path`` path to the new repo's working directory.
3837 3838 ``createopts`` options for the new repository.
3838 3839 ``requirement`` predefined set of requirements.
3839 3840 (incompatible with ``createopts``)
3840 3841
3841 3842 The following keys for ``createopts`` are recognized:
3842 3843
3843 3844 backend
3844 3845 The storage backend to use.
3845 3846 lfs
3846 3847 Repository will be created with ``lfs`` requirement. The lfs extension
3847 3848 will automatically be loaded when the repository is accessed.
3848 3849 narrowfiles
3849 3850 Set up repository to support narrow file storage.
3850 3851 sharedrepo
3851 3852 Repository object from which storage should be shared.
3852 3853 sharedrelative
3853 3854 Boolean indicating if the path to the shared repo should be
3854 3855 stored as relative. By default, the pointer to the "parent" repo
3855 3856 is stored as an absolute path.
3856 3857 shareditems
3857 3858 Set of items to share to the new repository (in addition to storage).
3858 3859 shallowfilestore
3859 3860 Indicates that storage for files should be shallow (not all ancestor
3860 3861 revisions are known).
3861 3862 """
3862 3863
3863 3864 if requirements is not None:
3864 3865 if createopts is not None:
3865 3866 msg = b'cannot specify both createopts and requirements'
3866 3867 raise error.ProgrammingError(msg)
3867 3868 createopts = {}
3868 3869 else:
3869 3870 createopts = defaultcreateopts(ui, createopts=createopts)
3870 3871
3871 3872 unknownopts = filterknowncreateopts(ui, createopts)
3872 3873
3873 3874 if not isinstance(unknownopts, dict):
3874 3875 raise error.ProgrammingError(
3875 3876 b'filterknowncreateopts() did not return a dict'
3876 3877 )
3877 3878
3878 3879 if unknownopts:
3879 3880 raise error.Abort(
3880 3881 _(
3881 3882 b'unable to create repository because of unknown '
3882 3883 b'creation option: %s'
3883 3884 )
3884 3885 % b', '.join(sorted(unknownopts)),
3885 3886 hint=_(b'is a required extension not loaded?'),
3886 3887 )
3887 3888
3888 3889 requirements = newreporequirements(ui, createopts=createopts)
3889 3890 requirements -= checkrequirementscompat(ui, requirements)
3890 3891
3891 3892 wdirvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
3892 3893
3893 3894 hgvfs = vfsmod.vfs(wdirvfs.join(b'.hg'))
3894 3895 if hgvfs.exists():
3895 3896 raise error.RepoError(_(b'repository %s already exists') % path)
3896 3897
3897 3898 if b'sharedrepo' in createopts:
3898 3899 sharedpath = createopts[b'sharedrepo'].sharedpath
3899 3900
3900 3901 if createopts.get(b'sharedrelative'):
3901 3902 try:
3902 3903 sharedpath = os.path.relpath(sharedpath, hgvfs.base)
3903 3904 sharedpath = util.pconvert(sharedpath)
3904 3905 except (IOError, ValueError) as e:
3905 3906 # ValueError is raised on Windows if the drive letters differ
3906 3907 # on each path.
3907 3908 raise error.Abort(
3908 3909 _(b'cannot calculate relative path'),
3909 3910 hint=stringutil.forcebytestr(e),
3910 3911 )
3911 3912
3912 3913 if not wdirvfs.exists():
3913 3914 wdirvfs.makedirs()
3914 3915
3915 3916 hgvfs.makedir(notindexed=True)
3916 3917 if b'sharedrepo' not in createopts:
3917 3918 hgvfs.mkdir(b'cache')
3918 3919 hgvfs.mkdir(b'wcache')
3919 3920
3920 3921 has_store = requirementsmod.STORE_REQUIREMENT in requirements
3921 3922 if has_store and b'sharedrepo' not in createopts:
3922 3923 hgvfs.mkdir(b'store')
3923 3924
3924 3925 # We create an invalid changelog outside the store so very old
3925 3926 # Mercurial versions (which didn't know about the requirements
3926 3927 # file) encounter an error on reading the changelog. This
3927 3928 # effectively locks out old clients and prevents them from
3928 3929 # mucking with a repo in an unknown format.
3929 3930 #
3930 3931 # The revlog header has version 65535, which won't be recognized by
3931 3932 # such old clients.
3932 3933 hgvfs.append(
3933 3934 b'00changelog.i',
3934 3935 b'\0\0\xFF\xFF dummy changelog to prevent using the old repo '
3935 3936 b'layout',
3936 3937 )
3937 3938
3938 3939 # Filter the requirements into working copy and store ones
3939 3940 wcreq, storereq = scmutil.filterrequirements(requirements)
3940 3941 # write working copy ones
3941 3942 scmutil.writerequires(hgvfs, wcreq)
3942 3943 # If there are store requirements and the current repository
3943 3944 # is not a shared one, write stored requirements
3944 3945 # For new shared repository, we don't need to write the store
3945 3946 # requirements as they are already present in store requires
3946 3947 if storereq and b'sharedrepo' not in createopts:
3947 3948 storevfs = vfsmod.vfs(hgvfs.join(b'store'), cacheaudited=True)
3948 3949 scmutil.writerequires(storevfs, storereq)
3949 3950
3950 3951 # Write out file telling readers where to find the shared store.
3951 3952 if b'sharedrepo' in createopts:
3952 3953 hgvfs.write(b'sharedpath', sharedpath)
3953 3954
3954 3955 if createopts.get(b'shareditems'):
3955 3956 shared = b'\n'.join(sorted(createopts[b'shareditems'])) + b'\n'
3956 3957 hgvfs.write(b'shared', shared)
3957 3958
3958 3959
3959 3960 def poisonrepository(repo):
3960 3961 """Poison a repository instance so it can no longer be used."""
3961 3962 # Perform any cleanup on the instance.
3962 3963 repo.close()
3963 3964
3964 3965 # Our strategy is to replace the type of the object with one that
3965 3966 # has all attribute lookups result in error.
3966 3967 #
3967 3968 # But we have to allow the close() method because some constructors
3968 3969 # of repos call close() on repo references.
3969 3970 class poisonedrepository:
3970 3971 def __getattribute__(self, item):
3971 3972 if item == 'close':
3972 3973 return object.__getattribute__(self, item)
3973 3974
3974 3975 raise error.ProgrammingError(
3975 3976 b'repo instances should not be used after unshare'
3976 3977 )
3977 3978
3978 3979 def close(self):
3979 3980 pass
3980 3981
3981 3982 # We may have a repoview, which intercepts __setattr__. So be sure
3982 3983 # we operate at the lowest level possible.
3983 3984 object.__setattr__(repo, '__class__', poisonedrepository)
@@ -1,497 +1,520 b''
1 1 setup
2 2
3 3 $ cat > myextension.py <<EOF
4 4 > from mercurial import error, registrar
5 5 > cmdtable = {}
6 6 > command = registrar.command(cmdtable)
7 7 > @command(b'crash', [], b'hg crash')
8 8 > def crash(ui, *args, **kwargs):
9 9 > raise Exception("oops")
10 10 > @command(b'abortcmd', [], b'hg abortcmd')
11 11 > def abort(ui, *args, **kwargs):
12 12 > raise error.Abort(b"oops")
13 13 > EOF
14 14 $ abspath=`pwd`/myextension.py
15 15
16 16 $ cat >> $HGRCPATH <<EOF
17 17 > [extensions]
18 18 > blackbox=
19 19 > mock=$TESTDIR/mockblackbox.py
20 20 > mq=
21 21 > myextension=$TESTTMP/myextension.py
22 22 > [alias]
23 23 > confuse = log --limit 3
24 24 > so-confusing = confuse --style compact
25 25 > EOF
26 26
27 27 $ hg init blackboxtest
28 28 $ cd blackboxtest
29 29
30 30 command, exit codes, and duration
31 31
32 32 $ echo a > a
33 33 $ hg add a
34 34 $ hg blackbox --config blackbox.dirty=True
35 35 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> init blackboxtest exited 0 after * seconds (glob)
36 36 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> add a
37 37 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> add a exited 0 after * seconds (glob)
38 38 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000+ (5000)> blackbox --config *blackbox.dirty=True* (glob)
39 39
40 40 failure exit code
41 41 $ rm ./.hg/blackbox.log
42 42 $ hg add non-existent
43 43 non-existent: $ENOENT$
44 44 [1]
45 45 $ hg blackbox
46 46 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> add non-existent
47 47 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> add non-existent exited 1 after * seconds (glob)
48 48 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox
49 49
50 50 abort exit code
51 51 $ rm ./.hg/blackbox.log
52 52 $ hg abortcmd 2> /dev/null
53 53 [255]
54 54 $ hg blackbox -l 2
55 55 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> abortcmd exited 255 after * seconds (glob)
56 56 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox -l 2
57 57
58 58 unhandled exception
59 59 $ rm ./.hg/blackbox.log
60 60 #if chg
61 61 (chg exits 255 because it fails to receive an exit code)
62 62 $ hg crash 2>/dev/null
63 63 [255]
64 64 #else
65 65 (hg exits 1 because Python default exit code for uncaught exception is 1)
66 66 $ hg crash 2>/dev/null
67 67 [1]
68 68 #endif
69 69 $ hg blackbox -l 2
70 70 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> crash exited 1 after * seconds (glob)
71 71 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox -l 2
72 72
73 73 alias expansion is logged
74 74 $ rm ./.hg/blackbox.log
75 75 $ hg confuse
76 76 $ hg blackbox
77 77 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> confuse
78 78 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> alias 'confuse' expands to 'log --limit 3'
79 79 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> confuse exited 0 after * seconds (glob)
80 80 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox
81 81
82 82 recursive aliases work correctly
83 83 $ rm ./.hg/blackbox.log
84 84 $ hg so-confusing
85 85 $ hg blackbox
86 86 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> so-confusing
87 87 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> alias 'so-confusing' expands to 'confuse --style compact'
88 88 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> alias 'confuse' expands to 'log --limit 3'
89 89 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> so-confusing exited 0 after * seconds (glob)
90 90 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox
91 91
92 92 custom date format
93 93 $ rm ./.hg/blackbox.log
94 94 $ hg --config blackbox.date-format='%Y-%m-%d @ %H:%M:%S' \
95 95 > --config devel.default-date='1334347993 0' --traceback status
96 96 A a
97 97 $ hg blackbox
98 98 2012-04-13 @ 20:13:13 bob @0000000000000000000000000000000000000000 (5000)> --config *blackbox.date-format=%Y-%m-%d @ %H:%M:%S* --config *devel.default-date=1334347993 0* --traceback status (glob)
99 99 2012-04-13 @ 20:13:13 bob @0000000000000000000000000000000000000000 (5000)> --config *blackbox.date-format=%Y-%m-%d @ %H:%M:%S* --config *devel.default-date=1334347993 0* --traceback status exited 0 after * seconds (glob)
100 100 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox
101 101
102 102 incoming change tracking
103 103
104 104 create two heads to verify that we only see one change in the log later
105 105 $ hg commit -ma
106 106 $ hg up null
107 107 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
108 108 $ echo b > b
109 109 $ hg commit -Amb
110 110 adding b
111 111 created new head
112 112
113 113 clone, commit, pull
114 114 $ hg clone . ../blackboxtest2
115 115 updating to branch default
116 116 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
117 117 $ echo c > c
118 118 $ hg commit -Amc
119 119 adding c
120 120 $ cd ../blackboxtest2
121 121 $ hg pull
122 122 pulling from $TESTTMP/blackboxtest
123 123 searching for changes
124 124 adding changesets
125 125 adding manifests
126 126 adding file changes
127 127 added 1 changesets with 1 changes to 1 files
128 128 new changesets d02f48003e62
129 129 (run 'hg update' to get a working copy)
130 130 $ hg blackbox -l 6
131 131 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote branch cache (served) with 1 labels and 2 nodes
132 132 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated branch cache (served.hidden) in * seconds (glob)
133 133 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote branch cache (served.hidden) with 1 labels and 2 nodes
134 134 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> 1 incoming changes - new heads: d02f48003e62
135 135 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> pull exited 0 after * seconds (glob)
136 136 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> blackbox -l 6
137 137
138 138 we must not cause a failure if we cannot write to the log
139 139
140 140 $ hg rollback
141 141 repository tip rolled back to revision 1 (undo pull)
142 142
143 143 $ mv .hg/blackbox.log .hg/blackbox.log-
144 144 $ mkdir .hg/blackbox.log
145 145 $ hg --debug incoming
146 146 warning: cannot write to blackbox.log: * (glob)
147 147 comparing with $TESTTMP/blackboxtest
148 148 query 1; heads
149 149 searching for changes
150 150 all local changesets known remotely
151 151 changeset: 2:d02f48003e62c24e2659d97d30f2a83abe5d5d51
152 152 tag: tip
153 153 phase: draft
154 154 parent: 1:6563da9dcf87b1949716e38ff3e3dfaa3198eb06
155 155 parent: -1:0000000000000000000000000000000000000000
156 156 manifest: 2:ab9d46b053ebf45b7996f2922b9893ff4b63d892
157 157 user: test
158 158 date: Thu Jan 01 00:00:00 1970 +0000
159 159 files+: c
160 160 extra: branch=default
161 161 description:
162 162 c
163 163
164 164
165 165 $ hg pull
166 166 pulling from $TESTTMP/blackboxtest
167 167 searching for changes
168 168 adding changesets
169 169 adding manifests
170 170 adding file changes
171 171 added 1 changesets with 1 changes to 1 files
172 172 new changesets d02f48003e62
173 173 (run 'hg update' to get a working copy)
174 174
175 175 a failure reading from the log is fatal
176 176
177 177 $ hg blackbox -l 3
178 178 abort: *$TESTTMP/blackboxtest2/.hg/blackbox.log* (glob)
179 179 [255]
180 180
181 181 $ rmdir .hg/blackbox.log
182 182 $ mv .hg/blackbox.log- .hg/blackbox.log
183 183
184 184 backup bundles get logged
185 185
186 186 $ touch d
187 187 $ hg commit -Amd
188 188 adding d
189 189 created new head
190 190 $ hg strip tip
191 191 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
192 192 saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/*-backup.hg (glob)
193 193 $ hg blackbox -l 6
194 194 1970-01-01 00:00:00.000 bob @73f6ee326b27d820b0472f1a825e3a50f3dc489b (5000)> strip tip
195 195 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/73f6ee326b27-7612e004-backup.hg
196 196 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated branch cache (immutable) in * seconds (glob)
197 197 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote branch cache (immutable) with 1 labels and 2 nodes
198 198 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> strip tip exited 0 after * seconds (glob)
199 199 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> blackbox -l 6
200 200
201 201 extension and python hooks - use the eol extension for a pythonhook
202 202
203 203 $ echo '[extensions]' >> .hg/hgrc
204 204 $ echo 'eol=' >> .hg/hgrc
205 205 $ echo '[hooks]' >> .hg/hgrc
206 206 $ echo 'update = echo hooked' >> .hg/hgrc
207 207 $ hg update
208 208 The fsmonitor extension is incompatible with the eol extension and has been disabled. (fsmonitor !)
209 209 hooked
210 210 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
211 211 updated to "d02f48003e62: c"
212 212 1 other heads for branch "default"
213 213 $ cat >> .hg/hgrc <<EOF
214 214 > [extensions]
215 215 > # disable eol, because it is not needed for subsequent tests
216 216 > # (in addition, keeping it requires extra care for fsmonitor)
217 217 > eol=!
218 218 > EOF
219 219 $ hg blackbox -l 5
220 220 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> update (no-chg !)
221 221 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> pythonhook-preupdate: hgext.eol.preupdate finished in * seconds (glob)
222 222 1970-01-01 00:00:00.000 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> exthook-update: echo hooked finished in * seconds (glob)
223 223 1970-01-01 00:00:00.000 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> update exited 0 after * seconds (glob)
224 224 1970-01-01 00:00:00.000 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> serve --no-profile --cmdserver chgunix --address $TESTTMP.chgsock/server.* --daemon-postexec 'chdir:/' (glob) (chg !)
225 225 1970-01-01 00:00:00.000 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> blackbox -l 5
226 226
227 227 log rotation
228 228
229 229 $ echo '[blackbox]' >> .hg/hgrc
230 230 $ echo 'maxsize = 20 b' >> .hg/hgrc
231 231 $ echo 'maxfiles = 3' >> .hg/hgrc
232 232 $ hg status
233 233 $ hg status
234 234 $ hg status
235 235 $ hg tip -q
236 236 2:d02f48003e62
237 237 $ ls .hg/blackbox.log*
238 238 .hg/blackbox.log
239 239 .hg/blackbox.log.1
240 240 .hg/blackbox.log.2
241 241 $ cd ..
242 242
243 243 $ hg init blackboxtest3
244 244 $ cd blackboxtest3
245 245 $ hg blackbox
246 246 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> init blackboxtest3 exited 0 after * seconds (glob)
247 247 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox
248 248 $ mv .hg/blackbox.log .hg/blackbox.log-
249 249 $ mkdir .hg/blackbox.log
250 250 $ sed -e 's/\(.*test1.*\)/#\1/; s#\(.*commit2.*\)#os.rmdir(".hg/blackbox.log")\
251 251 > os.rename(".hg/blackbox.log-", ".hg/blackbox.log")\
252 252 > \1#' $TESTDIR/test-dispatch.py > ../test-dispatch.py
253 253 $ "$PYTHON" $TESTDIR/blackbox-readonly-dispatch.py
254 254 running: --debug add foo
255 255 warning: cannot write to blackbox.log: Is a directory (no-windows !)
256 256 warning: cannot write to blackbox.log: $TESTTMP/blackboxtest3/.hg/blackbox.log: Access is denied (windows !)
257 257 adding foo
258 258 result: 0
259 259 running: --debug commit -m commit1 -d 2000-01-01 foo
260 260 warning: cannot write to blackbox.log: Is a directory (no-windows !)
261 261 warning: cannot write to blackbox.log: $TESTTMP/blackboxtest3/.hg/blackbox.log: Access is denied (windows !)
262 262 committing files:
263 263 foo
264 264 committing manifest
265 265 committing changelog
266 266 updating the branch cache
267 267 committed changeset 0:0e46349438790c460c5c9f7546bfcd39b267bbd2
268 268 result: 0
269 269 running: --debug commit -m commit2 -d 2000-01-02 foo
270 270 committing files:
271 271 foo
272 272 committing manifest
273 273 committing changelog
274 274 updating the branch cache
275 275 committed changeset 1:45589e459b2edfbf3dbde7e01f611d2c1e7453d7
276 276 result: 0
277 277 running: --debug log -r 0
278 278 changeset: 0:0e46349438790c460c5c9f7546bfcd39b267bbd2
279 279 phase: draft
280 280 parent: -1:0000000000000000000000000000000000000000
281 281 parent: -1:0000000000000000000000000000000000000000
282 282 manifest: 0:9091aa5df980aea60860a2e39c95182e68d1ddec
283 283 user: test
284 284 date: Sat Jan 01 00:00:00 2000 +0000
285 285 files+: foo
286 286 extra: branch=default
287 287 description:
288 288 commit1
289 289
290 290
291 291 result: 0
292 292 running: --debug log -r tip
293 293 changeset: 1:45589e459b2edfbf3dbde7e01f611d2c1e7453d7
294 294 tag: tip
295 295 phase: draft
296 296 parent: 0:0e46349438790c460c5c9f7546bfcd39b267bbd2
297 297 parent: -1:0000000000000000000000000000000000000000
298 298 manifest: 1:895aa9b7886f89dd017a6d62524e1f9180b04df9
299 299 user: test
300 300 date: Sun Jan 02 00:00:00 2000 +0000
301 301 files: foo
302 302 extra: branch=default
303 303 description:
304 304 commit2
305 305
306 306
307 307 result: 0
308 308 $ hg blackbox
309 309 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> updating the branch cache
310 310 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> updated branch cache (served) in * seconds (glob)
311 311 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> wrote branch cache (served) with 1 labels and 1 nodes
312 312 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug commit -m commit2 -d 2000-01-02 foo exited 0 after *.?? seconds (glob)
313 313 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r 0
314 314 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> writing .hg/cache/tags2-visible with 0 tags
315 315 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r 0 exited 0 after *.?? seconds (glob)
316 316 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r tip
317 317 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r tip exited 0 after *.?? seconds (glob)
318 318 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> blackbox
319 319
320 320 Skip rotation if the .hg is read-only
321 321
322 322 #if unix-permissions
323 323 $ chmod -w .hg
324 324 $ hg log -r. -T '{rev}\n' --config blackbox.maxsize=1 --debug
325 325 warning: cannot rename '$TESTTMP/blackboxtest3/.hg/blackbox.log.1' to '$TESTTMP/blackboxtest3/.hg/blackbox.log': $EACCES$
326 326 warning: cannot write to blackbox.log: $EACCES$
327 327 1
328 328 $ chmod +w .hg
329 329 #endif
330 330
331 331 Test log recursion from dirty status check
332 332
333 333 $ cat > ../r.py <<EOF
334 334 > from mercurial import context, error, extensions
335 335 > x=[False]
336 336 > def status(orig, *args, **opts):
337 337 > args[0].repo().ui.log(b"broken", b"recursion?")
338 338 > return orig(*args, **opts)
339 339 > def reposetup(ui, repo):
340 340 > extensions.wrapfunction(context.basectx, 'status', status)
341 341 > EOF
342 342 $ hg id --config extensions.x=../r.py --config blackbox.dirty=True
343 343 45589e459b2e tip
344 344
345 345 cleanup
346 346 $ cd ..
347 347
348 348 Test missing log directory, which shouldn't be created automatically
349 349
350 350 $ cat <<'EOF' > closeremove.py
351 351 > def reposetup(ui, repo):
352 352 > class rmrepo(repo.__class__):
353 353 > def close(self):
354 354 > super(rmrepo, self).close()
355 355 > self.ui.debug(b'removing %s\n' % self.vfs.base)
356 356 > self.vfs.rmtree()
357 357 > repo.__class__ = rmrepo
358 358 > EOF
359 359
360 360 $ hg init gone
361 361 $ cd gone
362 362 $ cat <<'EOF' > .hg/hgrc
363 363 > [extensions]
364 364 > closeremove = ../closeremove.py
365 365 > EOF
366 366 $ hg log --debug
367 367 removing $TESTTMP/gone/.hg
368 368 warning: cannot write to blackbox.log: $ENOENT$ (no-windows !)
369 369 warning: cannot write to blackbox.log: $TESTTMP/gone/.hg/blackbox.log: $ENOTDIR$ (windows !)
370 370 $ cd ..
371 371
372 372 blackbox should disable itself if track is empty
373 373
374 374 $ hg --config blackbox.track= init nothing_tracked
375 375 $ cd nothing_tracked
376 376 $ cat >> .hg/hgrc << EOF
377 377 > [blackbox]
378 378 > track =
379 379 > EOF
380 380 $ hg blackbox
381 381 $ cd $TESTTMP
382 382
383 383 a '*' entry in blackbox.track is interpreted as log everything
384 384
385 385 $ hg --config blackbox.track='*' \
386 386 > --config blackbox.logsource=True \
387 387 > init track_star
388 388 $ cd track_star
389 389 $ cat >> .hg/hgrc << EOF
390 390 > [blackbox]
391 391 > logsource = True
392 392 > track = *
393 393 > EOF
394 394 (only look for entries with specific logged sources, otherwise this test is
395 395 pretty brittle)
396 396 $ hg blackbox | egrep '\[command(finish)?\]'
397 397 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000) [commandfinish]> --config *blackbox.track=* --config *blackbox.logsource=True* init track_star exited 0 after * seconds (glob)
398 398 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000) [command]> blackbox
399 399 $ cd $TESTTMP
400 400
401 401 #if chg
402 402
403 403 when using chg, blackbox.log should get rotated correctly
404 404
405 405 $ cat > $TESTTMP/noop.py << EOF
406 406 > import time
407 407 > from mercurial import registrar, scmutil
408 408 > cmdtable = {}
409 409 > command = registrar.command(cmdtable)
410 410 > @command(b'noop')
411 411 > def noop(ui, repo):
412 412 > pass
413 413 > EOF
414 414
415 415 $ hg init blackbox-chg
416 416 $ cd blackbox-chg
417 417
418 418 $ cat > .hg/hgrc << EOF
419 419 > [blackbox]
420 420 > maxsize = 500B
421 421 > [extensions]
422 422 > # extension change forces chg to restart
423 423 > noop=$TESTTMP/noop.py
424 424 > EOF
425 425
426 426 $ "$PYTHON" -c 'print("a" * 400)' > .hg/blackbox.log
427 427 $ chg noop
428 428 $ chg noop
429 429 $ chg noop
430 430 $ chg noop
431 431 $ chg noop
432 432
433 433 $ cat > showsize.py << 'EOF'
434 434 > import os
435 435 > import sys
436 436 > limit = 500
437 437 > for p in sys.argv[1:]:
438 438 > size = os.stat(p).st_size
439 439 > if size >= limit:
440 440 > desc = '>='
441 441 > else:
442 442 > desc = '<'
443 443 > print('%s: %s %d' % (p, desc, limit))
444 444 > EOF
445 445
446 446 $ "$PYTHON" showsize.py .hg/blackbox*
447 447 .hg/blackbox.log: < 500
448 448 .hg/blackbox.log.1: >= 500
449 449 .hg/blackbox.log.2: >= 500
450 450
451 451 $ cd ..
452 452
453 453 With chg, blackbox should not create the log file if the repo is gone
454 454
455 455 $ hg init repo1
456 456 $ hg --config extensions.a=! -R repo1 log
457 457 $ rm -rf $TESTTMP/repo1
458 458 $ hg --config extensions.a=! init repo1
459 459
460 460 #endif
461 461
462 462 blackbox should work if repo.ui.log is not called (issue5518)
463 463
464 464 $ cat > $TESTTMP/raise.py << EOF
465 465 > from mercurial import registrar, scmutil
466 466 > cmdtable = {}
467 467 > command = registrar.command(cmdtable)
468 468 > @command(b'raise')
469 469 > def raisecmd(*args):
470 470 > raise RuntimeError('raise')
471 471 > EOF
472 472
473 $ cat >> $HGRCPATH << EOF
473
474 $ hg init $TESTTMP/blackbox-exception-only --config blackbox.track=commandexception
475 $ cat >> $TESTTMP/blackbox-exception-only/.hg/hgrc << EOF
474 476 > [blackbox]
475 477 > track = commandexception
476 478 > [extensions]
477 479 > raise=$TESTTMP/raise.py
478 480 > EOF
481 $ cd $TESTTMP/blackbox-exception-only
479 482
480 $ hg init $TESTTMP/blackbox-exception-only
481 $ cd $TESTTMP/blackbox-exception-only
482 483
483 484 #if chg
484 485 (chg exits 255 because it fails to receive an exit code)
485 486 $ hg raise 2>/dev/null
486 487 [255]
487 488 #else
488 489 (hg exits 1 because Python default exit code for uncaught exception is 1)
489 490 $ hg raise 2>/dev/null
490 491 [1]
491 492 #endif
492 493
493 494 $ head -1 .hg/blackbox.log
494 495 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> ** Unknown exception encountered with possibly-broken third-party extension "mock" (version N/A)
495 496 $ tail -2 .hg/blackbox.log
496 497 RuntimeError: raise
497 498
499 $ cd ..
500
501 Check we did not broke `hg mv`
502 ------------------------------
503 (we did in 6.4rc)
504
505 basic setup
506
507 $ hg init blackbox-file-move
508 $ cd blackbox-file-move
509 $ echo foo > foo
510 $ hg add foo
511 $ hg commit -m 'foo'
512
513 copy a file
514
515 $ hg copy foo bar
516
517 move a file
518
519 $ hg mv foo goo
520
General Comments 0
You need to be logged in to leave comments. Login now