##// END OF EJS Templates
files: speed up `hg files` when no flags change display...
Valentin Gatien-Baron -
r45388:065421e1 default
parent child Browse files
Show More
@@ -1,4200 +1,4213
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import copy as copymod
11 11 import errno
12 12 import os
13 13 import re
14 14
15 15 from .i18n import _
16 16 from .node import (
17 17 hex,
18 18 nullid,
19 19 nullrev,
20 20 short,
21 21 )
22 22 from .pycompat import (
23 23 getattr,
24 24 open,
25 25 setattr,
26 26 )
27 27 from .thirdparty import attr
28 28
29 29 from . import (
30 30 bookmarks,
31 31 changelog,
32 32 copies,
33 33 crecord as crecordmod,
34 34 dirstateguard,
35 35 encoding,
36 36 error,
37 37 formatter,
38 38 logcmdutil,
39 39 match as matchmod,
40 40 merge as mergemod,
41 41 mergestate as mergestatemod,
42 42 mergeutil,
43 43 obsolete,
44 44 patch,
45 45 pathutil,
46 46 phases,
47 47 pycompat,
48 48 repair,
49 49 revlog,
50 50 rewriteutil,
51 51 scmutil,
52 52 smartset,
53 53 state as statemod,
54 54 subrepoutil,
55 55 templatekw,
56 56 templater,
57 57 util,
58 58 vfs as vfsmod,
59 59 )
60 60
61 61 from .utils import (
62 62 dateutil,
63 63 stringutil,
64 64 )
65 65
66 66 if pycompat.TYPE_CHECKING:
67 67 from typing import (
68 68 Any,
69 69 Dict,
70 70 )
71 71
72 72 for t in (Any, Dict):
73 73 assert t
74 74
75 75 stringio = util.stringio
76 76
77 77 # templates of common command options
78 78
79 79 dryrunopts = [
80 80 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
81 81 ]
82 82
83 83 confirmopts = [
84 84 (b'', b'confirm', None, _(b'ask before applying actions')),
85 85 ]
86 86
87 87 remoteopts = [
88 88 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
89 89 (
90 90 b'',
91 91 b'remotecmd',
92 92 b'',
93 93 _(b'specify hg command to run on the remote side'),
94 94 _(b'CMD'),
95 95 ),
96 96 (
97 97 b'',
98 98 b'insecure',
99 99 None,
100 100 _(b'do not verify server certificate (ignoring web.cacerts config)'),
101 101 ),
102 102 ]
103 103
104 104 walkopts = [
105 105 (
106 106 b'I',
107 107 b'include',
108 108 [],
109 109 _(b'include names matching the given patterns'),
110 110 _(b'PATTERN'),
111 111 ),
112 112 (
113 113 b'X',
114 114 b'exclude',
115 115 [],
116 116 _(b'exclude names matching the given patterns'),
117 117 _(b'PATTERN'),
118 118 ),
119 119 ]
120 120
121 121 commitopts = [
122 122 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
123 123 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
124 124 ]
125 125
126 126 commitopts2 = [
127 127 (
128 128 b'd',
129 129 b'date',
130 130 b'',
131 131 _(b'record the specified date as commit date'),
132 132 _(b'DATE'),
133 133 ),
134 134 (
135 135 b'u',
136 136 b'user',
137 137 b'',
138 138 _(b'record the specified user as committer'),
139 139 _(b'USER'),
140 140 ),
141 141 ]
142 142
143 143 commitopts3 = [
144 144 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
145 145 (b'U', b'currentuser', None, _(b'record the current user as committer')),
146 146 ]
147 147
148 148 formatteropts = [
149 149 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
150 150 ]
151 151
152 152 templateopts = [
153 153 (
154 154 b'',
155 155 b'style',
156 156 b'',
157 157 _(b'display using template map file (DEPRECATED)'),
158 158 _(b'STYLE'),
159 159 ),
160 160 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
161 161 ]
162 162
163 163 logopts = [
164 164 (b'p', b'patch', None, _(b'show patch')),
165 165 (b'g', b'git', None, _(b'use git extended diff format')),
166 166 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
167 167 (b'M', b'no-merges', None, _(b'do not show merges')),
168 168 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
169 169 (b'G', b'graph', None, _(b"show the revision DAG")),
170 170 ] + templateopts
171 171
172 172 diffopts = [
173 173 (b'a', b'text', None, _(b'treat all files as text')),
174 174 (
175 175 b'g',
176 176 b'git',
177 177 None,
178 178 _(b'use git extended diff format (DEFAULT: diff.git)'),
179 179 ),
180 180 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
181 181 (b'', b'nodates', None, _(b'omit dates from diff headers')),
182 182 ]
183 183
184 184 diffwsopts = [
185 185 (
186 186 b'w',
187 187 b'ignore-all-space',
188 188 None,
189 189 _(b'ignore white space when comparing lines'),
190 190 ),
191 191 (
192 192 b'b',
193 193 b'ignore-space-change',
194 194 None,
195 195 _(b'ignore changes in the amount of white space'),
196 196 ),
197 197 (
198 198 b'B',
199 199 b'ignore-blank-lines',
200 200 None,
201 201 _(b'ignore changes whose lines are all blank'),
202 202 ),
203 203 (
204 204 b'Z',
205 205 b'ignore-space-at-eol',
206 206 None,
207 207 _(b'ignore changes in whitespace at EOL'),
208 208 ),
209 209 ]
210 210
211 211 diffopts2 = (
212 212 [
213 213 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
214 214 (
215 215 b'p',
216 216 b'show-function',
217 217 None,
218 218 _(
219 219 b'show which function each change is in (DEFAULT: diff.showfunc)'
220 220 ),
221 221 ),
222 222 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
223 223 ]
224 224 + diffwsopts
225 225 + [
226 226 (
227 227 b'U',
228 228 b'unified',
229 229 b'',
230 230 _(b'number of lines of context to show'),
231 231 _(b'NUM'),
232 232 ),
233 233 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
234 234 (
235 235 b'',
236 236 b'root',
237 237 b'',
238 238 _(b'produce diffs relative to subdirectory'),
239 239 _(b'DIR'),
240 240 ),
241 241 ]
242 242 )
243 243
244 244 mergetoolopts = [
245 245 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
246 246 ]
247 247
248 248 similarityopts = [
249 249 (
250 250 b's',
251 251 b'similarity',
252 252 b'',
253 253 _(b'guess renamed files by similarity (0<=s<=100)'),
254 254 _(b'SIMILARITY'),
255 255 )
256 256 ]
257 257
258 258 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
259 259
260 260 debugrevlogopts = [
261 261 (b'c', b'changelog', False, _(b'open changelog')),
262 262 (b'm', b'manifest', False, _(b'open manifest')),
263 263 (b'', b'dir', b'', _(b'open directory manifest')),
264 264 ]
265 265
266 266 # special string such that everything below this line will be ingored in the
267 267 # editor text
268 268 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
269 269
270 270
271 271 def check_at_most_one_arg(opts, *args):
272 272 """abort if more than one of the arguments are in opts
273 273
274 274 Returns the unique argument or None if none of them were specified.
275 275 """
276 276
277 277 def to_display(name):
278 278 return pycompat.sysbytes(name).replace(b'_', b'-')
279 279
280 280 previous = None
281 281 for x in args:
282 282 if opts.get(x):
283 283 if previous:
284 284 raise error.Abort(
285 285 _(b'cannot specify both --%s and --%s')
286 286 % (to_display(previous), to_display(x))
287 287 )
288 288 previous = x
289 289 return previous
290 290
291 291
292 292 def check_incompatible_arguments(opts, first, others):
293 293 """abort if the first argument is given along with any of the others
294 294
295 295 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
296 296 among themselves, and they're passed as a single collection.
297 297 """
298 298 for other in others:
299 299 check_at_most_one_arg(opts, first, other)
300 300
301 301
302 302 def resolvecommitoptions(ui, opts):
303 303 """modify commit options dict to handle related options
304 304
305 305 The return value indicates that ``rewrite.update-timestamp`` is the reason
306 306 the ``date`` option is set.
307 307 """
308 308 check_at_most_one_arg(opts, b'date', b'currentdate')
309 309 check_at_most_one_arg(opts, b'user', b'currentuser')
310 310
311 311 datemaydiffer = False # date-only change should be ignored?
312 312
313 313 if opts.get(b'currentdate'):
314 314 opts[b'date'] = b'%d %d' % dateutil.makedate()
315 315 elif (
316 316 not opts.get(b'date')
317 317 and ui.configbool(b'rewrite', b'update-timestamp')
318 318 and opts.get(b'currentdate') is None
319 319 ):
320 320 opts[b'date'] = b'%d %d' % dateutil.makedate()
321 321 datemaydiffer = True
322 322
323 323 if opts.get(b'currentuser'):
324 324 opts[b'user'] = ui.username()
325 325
326 326 return datemaydiffer
327 327
328 328
329 329 def checknotesize(ui, opts):
330 330 """ make sure note is of valid format """
331 331
332 332 note = opts.get(b'note')
333 333 if not note:
334 334 return
335 335
336 336 if len(note) > 255:
337 337 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
338 338 if b'\n' in note:
339 339 raise error.Abort(_(b"note cannot contain a newline"))
340 340
341 341
342 342 def ishunk(x):
343 343 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
344 344 return isinstance(x, hunkclasses)
345 345
346 346
347 347 def newandmodified(chunks, originalchunks):
348 348 newlyaddedandmodifiedfiles = set()
349 349 alsorestore = set()
350 350 for chunk in chunks:
351 351 if (
352 352 ishunk(chunk)
353 353 and chunk.header.isnewfile()
354 354 and chunk not in originalchunks
355 355 ):
356 356 newlyaddedandmodifiedfiles.add(chunk.header.filename())
357 357 alsorestore.update(
358 358 set(chunk.header.files()) - {chunk.header.filename()}
359 359 )
360 360 return newlyaddedandmodifiedfiles, alsorestore
361 361
362 362
363 363 def parsealiases(cmd):
364 364 return cmd.split(b"|")
365 365
366 366
367 367 def setupwrapcolorwrite(ui):
368 368 # wrap ui.write so diff output can be labeled/colorized
369 369 def wrapwrite(orig, *args, **kw):
370 370 label = kw.pop('label', b'')
371 371 for chunk, l in patch.difflabel(lambda: args):
372 372 orig(chunk, label=label + l)
373 373
374 374 oldwrite = ui.write
375 375
376 376 def wrap(*args, **kwargs):
377 377 return wrapwrite(oldwrite, *args, **kwargs)
378 378
379 379 setattr(ui, 'write', wrap)
380 380 return oldwrite
381 381
382 382
383 383 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
384 384 try:
385 385 if usecurses:
386 386 if testfile:
387 387 recordfn = crecordmod.testdecorator(
388 388 testfile, crecordmod.testchunkselector
389 389 )
390 390 else:
391 391 recordfn = crecordmod.chunkselector
392 392
393 393 return crecordmod.filterpatch(
394 394 ui, originalhunks, recordfn, operation
395 395 )
396 396 except crecordmod.fallbackerror as e:
397 397 ui.warn(b'%s\n' % e)
398 398 ui.warn(_(b'falling back to text mode\n'))
399 399
400 400 return patch.filterpatch(ui, originalhunks, match, operation)
401 401
402 402
403 403 def recordfilter(ui, originalhunks, match, operation=None):
404 404 """ Prompts the user to filter the originalhunks and return a list of
405 405 selected hunks.
406 406 *operation* is used for to build ui messages to indicate the user what
407 407 kind of filtering they are doing: reverting, committing, shelving, etc.
408 408 (see patch.filterpatch).
409 409 """
410 410 usecurses = crecordmod.checkcurses(ui)
411 411 testfile = ui.config(b'experimental', b'crecordtest')
412 412 oldwrite = setupwrapcolorwrite(ui)
413 413 try:
414 414 newchunks, newopts = filterchunks(
415 415 ui, originalhunks, usecurses, testfile, match, operation
416 416 )
417 417 finally:
418 418 ui.write = oldwrite
419 419 return newchunks, newopts
420 420
421 421
422 422 def dorecord(
423 423 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
424 424 ):
425 425 opts = pycompat.byteskwargs(opts)
426 426 if not ui.interactive():
427 427 if cmdsuggest:
428 428 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
429 429 else:
430 430 msg = _(b'running non-interactively')
431 431 raise error.Abort(msg)
432 432
433 433 # make sure username is set before going interactive
434 434 if not opts.get(b'user'):
435 435 ui.username() # raise exception, username not provided
436 436
437 437 def recordfunc(ui, repo, message, match, opts):
438 438 """This is generic record driver.
439 439
440 440 Its job is to interactively filter local changes, and
441 441 accordingly prepare working directory into a state in which the
442 442 job can be delegated to a non-interactive commit command such as
443 443 'commit' or 'qrefresh'.
444 444
445 445 After the actual job is done by non-interactive command, the
446 446 working directory is restored to its original state.
447 447
448 448 In the end we'll record interesting changes, and everything else
449 449 will be left in place, so the user can continue working.
450 450 """
451 451 if not opts.get(b'interactive-unshelve'):
452 452 checkunfinished(repo, commit=True)
453 453 wctx = repo[None]
454 454 merge = len(wctx.parents()) > 1
455 455 if merge:
456 456 raise error.Abort(
457 457 _(
458 458 b'cannot partially commit a merge '
459 459 b'(use "hg commit" instead)'
460 460 )
461 461 )
462 462
463 463 def fail(f, msg):
464 464 raise error.Abort(b'%s: %s' % (f, msg))
465 465
466 466 force = opts.get(b'force')
467 467 if not force:
468 468 match = matchmod.badmatch(match, fail)
469 469
470 470 status = repo.status(match=match)
471 471
472 472 overrides = {(b'ui', b'commitsubrepos'): True}
473 473
474 474 with repo.ui.configoverride(overrides, b'record'):
475 475 # subrepoutil.precommit() modifies the status
476 476 tmpstatus = scmutil.status(
477 477 copymod.copy(status.modified),
478 478 copymod.copy(status.added),
479 479 copymod.copy(status.removed),
480 480 copymod.copy(status.deleted),
481 481 copymod.copy(status.unknown),
482 482 copymod.copy(status.ignored),
483 483 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
484 484 )
485 485
486 486 # Force allows -X subrepo to skip the subrepo.
487 487 subs, commitsubs, newstate = subrepoutil.precommit(
488 488 repo.ui, wctx, tmpstatus, match, force=True
489 489 )
490 490 for s in subs:
491 491 if s in commitsubs:
492 492 dirtyreason = wctx.sub(s).dirtyreason(True)
493 493 raise error.Abort(dirtyreason)
494 494
495 495 if not force:
496 496 repo.checkcommitpatterns(wctx, match, status, fail)
497 497 diffopts = patch.difffeatureopts(
498 498 ui,
499 499 opts=opts,
500 500 whitespace=True,
501 501 section=b'commands',
502 502 configprefix=b'commit.interactive.',
503 503 )
504 504 diffopts.nodates = True
505 505 diffopts.git = True
506 506 diffopts.showfunc = True
507 507 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
508 508 originalchunks = patch.parsepatch(originaldiff)
509 509 match = scmutil.match(repo[None], pats)
510 510
511 511 # 1. filter patch, since we are intending to apply subset of it
512 512 try:
513 513 chunks, newopts = filterfn(ui, originalchunks, match)
514 514 except error.PatchError as err:
515 515 raise error.Abort(_(b'error parsing patch: %s') % err)
516 516 opts.update(newopts)
517 517
518 518 # We need to keep a backup of files that have been newly added and
519 519 # modified during the recording process because there is a previous
520 520 # version without the edit in the workdir. We also will need to restore
521 521 # files that were the sources of renames so that the patch application
522 522 # works.
523 523 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
524 524 chunks, originalchunks
525 525 )
526 526 contenders = set()
527 527 for h in chunks:
528 528 try:
529 529 contenders.update(set(h.files()))
530 530 except AttributeError:
531 531 pass
532 532
533 533 changed = status.modified + status.added + status.removed
534 534 newfiles = [f for f in changed if f in contenders]
535 535 if not newfiles:
536 536 ui.status(_(b'no changes to record\n'))
537 537 return 0
538 538
539 539 modified = set(status.modified)
540 540
541 541 # 2. backup changed files, so we can restore them in the end
542 542
543 543 if backupall:
544 544 tobackup = changed
545 545 else:
546 546 tobackup = [
547 547 f
548 548 for f in newfiles
549 549 if f in modified or f in newlyaddedandmodifiedfiles
550 550 ]
551 551 backups = {}
552 552 if tobackup:
553 553 backupdir = repo.vfs.join(b'record-backups')
554 554 try:
555 555 os.mkdir(backupdir)
556 556 except OSError as err:
557 557 if err.errno != errno.EEXIST:
558 558 raise
559 559 try:
560 560 # backup continues
561 561 for f in tobackup:
562 562 fd, tmpname = pycompat.mkstemp(
563 563 prefix=f.replace(b'/', b'_') + b'.', dir=backupdir
564 564 )
565 565 os.close(fd)
566 566 ui.debug(b'backup %r as %r\n' % (f, tmpname))
567 567 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
568 568 backups[f] = tmpname
569 569
570 570 fp = stringio()
571 571 for c in chunks:
572 572 fname = c.filename()
573 573 if fname in backups:
574 574 c.write(fp)
575 575 dopatch = fp.tell()
576 576 fp.seek(0)
577 577
578 578 # 2.5 optionally review / modify patch in text editor
579 579 if opts.get(b'review', False):
580 580 patchtext = (
581 581 crecordmod.diffhelptext
582 582 + crecordmod.patchhelptext
583 583 + fp.read()
584 584 )
585 585 reviewedpatch = ui.edit(
586 586 patchtext, b"", action=b"diff", repopath=repo.path
587 587 )
588 588 fp.truncate(0)
589 589 fp.write(reviewedpatch)
590 590 fp.seek(0)
591 591
592 592 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
593 593 # 3a. apply filtered patch to clean repo (clean)
594 594 if backups:
595 595 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
596 596 mergemod.revert_to(repo[b'.'], matcher=m)
597 597
598 598 # 3b. (apply)
599 599 if dopatch:
600 600 try:
601 601 ui.debug(b'applying patch\n')
602 602 ui.debug(fp.getvalue())
603 603 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
604 604 except error.PatchError as err:
605 605 raise error.Abort(pycompat.bytestr(err))
606 606 del fp
607 607
608 608 # 4. We prepared working directory according to filtered
609 609 # patch. Now is the time to delegate the job to
610 610 # commit/qrefresh or the like!
611 611
612 612 # Make all of the pathnames absolute.
613 613 newfiles = [repo.wjoin(nf) for nf in newfiles]
614 614 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
615 615 finally:
616 616 # 5. finally restore backed-up files
617 617 try:
618 618 dirstate = repo.dirstate
619 619 for realname, tmpname in pycompat.iteritems(backups):
620 620 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
621 621
622 622 if dirstate[realname] == b'n':
623 623 # without normallookup, restoring timestamp
624 624 # may cause partially committed files
625 625 # to be treated as unmodified
626 626 dirstate.normallookup(realname)
627 627
628 628 # copystat=True here and above are a hack to trick any
629 629 # editors that have f open that we haven't modified them.
630 630 #
631 631 # Also note that this racy as an editor could notice the
632 632 # file's mtime before we've finished writing it.
633 633 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
634 634 os.unlink(tmpname)
635 635 if tobackup:
636 636 os.rmdir(backupdir)
637 637 except OSError:
638 638 pass
639 639
640 640 def recordinwlock(ui, repo, message, match, opts):
641 641 with repo.wlock():
642 642 return recordfunc(ui, repo, message, match, opts)
643 643
644 644 return commit(ui, repo, recordinwlock, pats, opts)
645 645
646 646
647 647 class dirnode(object):
648 648 """
649 649 Represent a directory in user working copy with information required for
650 650 the purpose of tersing its status.
651 651
652 652 path is the path to the directory, without a trailing '/'
653 653
654 654 statuses is a set of statuses of all files in this directory (this includes
655 655 all the files in all the subdirectories too)
656 656
657 657 files is a list of files which are direct child of this directory
658 658
659 659 subdirs is a dictionary of sub-directory name as the key and it's own
660 660 dirnode object as the value
661 661 """
662 662
663 663 def __init__(self, dirpath):
664 664 self.path = dirpath
665 665 self.statuses = set()
666 666 self.files = []
667 667 self.subdirs = {}
668 668
669 669 def _addfileindir(self, filename, status):
670 670 """Add a file in this directory as a direct child."""
671 671 self.files.append((filename, status))
672 672
673 673 def addfile(self, filename, status):
674 674 """
675 675 Add a file to this directory or to its direct parent directory.
676 676
677 677 If the file is not direct child of this directory, we traverse to the
678 678 directory of which this file is a direct child of and add the file
679 679 there.
680 680 """
681 681
682 682 # the filename contains a path separator, it means it's not the direct
683 683 # child of this directory
684 684 if b'/' in filename:
685 685 subdir, filep = filename.split(b'/', 1)
686 686
687 687 # does the dirnode object for subdir exists
688 688 if subdir not in self.subdirs:
689 689 subdirpath = pathutil.join(self.path, subdir)
690 690 self.subdirs[subdir] = dirnode(subdirpath)
691 691
692 692 # try adding the file in subdir
693 693 self.subdirs[subdir].addfile(filep, status)
694 694
695 695 else:
696 696 self._addfileindir(filename, status)
697 697
698 698 if status not in self.statuses:
699 699 self.statuses.add(status)
700 700
701 701 def iterfilepaths(self):
702 702 """Yield (status, path) for files directly under this directory."""
703 703 for f, st in self.files:
704 704 yield st, pathutil.join(self.path, f)
705 705
706 706 def tersewalk(self, terseargs):
707 707 """
708 708 Yield (status, path) obtained by processing the status of this
709 709 dirnode.
710 710
711 711 terseargs is the string of arguments passed by the user with `--terse`
712 712 flag.
713 713
714 714 Following are the cases which can happen:
715 715
716 716 1) All the files in the directory (including all the files in its
717 717 subdirectories) share the same status and the user has asked us to terse
718 718 that status. -> yield (status, dirpath). dirpath will end in '/'.
719 719
720 720 2) Otherwise, we do following:
721 721
722 722 a) Yield (status, filepath) for all the files which are in this
723 723 directory (only the ones in this directory, not the subdirs)
724 724
725 725 b) Recurse the function on all the subdirectories of this
726 726 directory
727 727 """
728 728
729 729 if len(self.statuses) == 1:
730 730 onlyst = self.statuses.pop()
731 731
732 732 # Making sure we terse only when the status abbreviation is
733 733 # passed as terse argument
734 734 if onlyst in terseargs:
735 735 yield onlyst, self.path + b'/'
736 736 return
737 737
738 738 # add the files to status list
739 739 for st, fpath in self.iterfilepaths():
740 740 yield st, fpath
741 741
742 742 # recurse on the subdirs
743 743 for dirobj in self.subdirs.values():
744 744 for st, fpath in dirobj.tersewalk(terseargs):
745 745 yield st, fpath
746 746
747 747
748 748 def tersedir(statuslist, terseargs):
749 749 """
750 750 Terse the status if all the files in a directory shares the same status.
751 751
752 752 statuslist is scmutil.status() object which contains a list of files for
753 753 each status.
754 754 terseargs is string which is passed by the user as the argument to `--terse`
755 755 flag.
756 756
757 757 The function makes a tree of objects of dirnode class, and at each node it
758 758 stores the information required to know whether we can terse a certain
759 759 directory or not.
760 760 """
761 761 # the order matters here as that is used to produce final list
762 762 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
763 763
764 764 # checking the argument validity
765 765 for s in pycompat.bytestr(terseargs):
766 766 if s not in allst:
767 767 raise error.Abort(_(b"'%s' not recognized") % s)
768 768
769 769 # creating a dirnode object for the root of the repo
770 770 rootobj = dirnode(b'')
771 771 pstatus = (
772 772 b'modified',
773 773 b'added',
774 774 b'deleted',
775 775 b'clean',
776 776 b'unknown',
777 777 b'ignored',
778 778 b'removed',
779 779 )
780 780
781 781 tersedict = {}
782 782 for attrname in pstatus:
783 783 statuschar = attrname[0:1]
784 784 for f in getattr(statuslist, attrname):
785 785 rootobj.addfile(f, statuschar)
786 786 tersedict[statuschar] = []
787 787
788 788 # we won't be tersing the root dir, so add files in it
789 789 for st, fpath in rootobj.iterfilepaths():
790 790 tersedict[st].append(fpath)
791 791
792 792 # process each sub-directory and build tersedict
793 793 for subdir in rootobj.subdirs.values():
794 794 for st, f in subdir.tersewalk(terseargs):
795 795 tersedict[st].append(f)
796 796
797 797 tersedlist = []
798 798 for st in allst:
799 799 tersedict[st].sort()
800 800 tersedlist.append(tersedict[st])
801 801
802 802 return scmutil.status(*tersedlist)
803 803
804 804
805 805 def _commentlines(raw):
806 806 '''Surround lineswith a comment char and a new line'''
807 807 lines = raw.splitlines()
808 808 commentedlines = [b'# %s' % line for line in lines]
809 809 return b'\n'.join(commentedlines) + b'\n'
810 810
811 811
812 812 @attr.s(frozen=True)
813 813 class morestatus(object):
814 814 reporoot = attr.ib()
815 815 unfinishedop = attr.ib()
816 816 unfinishedmsg = attr.ib()
817 817 activemerge = attr.ib()
818 818 unresolvedpaths = attr.ib()
819 819 _formattedpaths = attr.ib(init=False, default=set())
820 820 _label = b'status.morestatus'
821 821
822 822 def formatfile(self, path, fm):
823 823 self._formattedpaths.add(path)
824 824 if self.activemerge and path in self.unresolvedpaths:
825 825 fm.data(unresolved=True)
826 826
827 827 def formatfooter(self, fm):
828 828 if self.unfinishedop or self.unfinishedmsg:
829 829 fm.startitem()
830 830 fm.data(itemtype=b'morestatus')
831 831
832 832 if self.unfinishedop:
833 833 fm.data(unfinished=self.unfinishedop)
834 834 statemsg = (
835 835 _(b'The repository is in an unfinished *%s* state.')
836 836 % self.unfinishedop
837 837 )
838 838 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
839 839 if self.unfinishedmsg:
840 840 fm.data(unfinishedmsg=self.unfinishedmsg)
841 841
842 842 # May also start new data items.
843 843 self._formatconflicts(fm)
844 844
845 845 if self.unfinishedmsg:
846 846 fm.plain(
847 847 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
848 848 )
849 849
850 850 def _formatconflicts(self, fm):
851 851 if not self.activemerge:
852 852 return
853 853
854 854 if self.unresolvedpaths:
855 855 mergeliststr = b'\n'.join(
856 856 [
857 857 b' %s'
858 858 % util.pathto(self.reporoot, encoding.getcwd(), path)
859 859 for path in self.unresolvedpaths
860 860 ]
861 861 )
862 862 msg = (
863 863 _(
864 864 '''Unresolved merge conflicts:
865 865
866 866 %s
867 867
868 868 To mark files as resolved: hg resolve --mark FILE'''
869 869 )
870 870 % mergeliststr
871 871 )
872 872
873 873 # If any paths with unresolved conflicts were not previously
874 874 # formatted, output them now.
875 875 for f in self.unresolvedpaths:
876 876 if f in self._formattedpaths:
877 877 # Already output.
878 878 continue
879 879 fm.startitem()
880 880 # We can't claim to know the status of the file - it may just
881 881 # have been in one of the states that were not requested for
882 882 # display, so it could be anything.
883 883 fm.data(itemtype=b'file', path=f, unresolved=True)
884 884
885 885 else:
886 886 msg = _(b'No unresolved merge conflicts.')
887 887
888 888 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
889 889
890 890
891 891 def readmorestatus(repo):
892 892 """Returns a morestatus object if the repo has unfinished state."""
893 893 statetuple = statemod.getrepostate(repo)
894 894 mergestate = mergestatemod.mergestate.read(repo)
895 895 activemerge = mergestate.active()
896 896 if not statetuple and not activemerge:
897 897 return None
898 898
899 899 unfinishedop = unfinishedmsg = unresolved = None
900 900 if statetuple:
901 901 unfinishedop, unfinishedmsg = statetuple
902 902 if activemerge:
903 903 unresolved = sorted(mergestate.unresolved())
904 904 return morestatus(
905 905 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
906 906 )
907 907
908 908
909 909 def findpossible(cmd, table, strict=False):
910 910 """
911 911 Return cmd -> (aliases, command table entry)
912 912 for each matching command.
913 913 Return debug commands (or their aliases) only if no normal command matches.
914 914 """
915 915 choice = {}
916 916 debugchoice = {}
917 917
918 918 if cmd in table:
919 919 # short-circuit exact matches, "log" alias beats "log|history"
920 920 keys = [cmd]
921 921 else:
922 922 keys = table.keys()
923 923
924 924 allcmds = []
925 925 for e in keys:
926 926 aliases = parsealiases(e)
927 927 allcmds.extend(aliases)
928 928 found = None
929 929 if cmd in aliases:
930 930 found = cmd
931 931 elif not strict:
932 932 for a in aliases:
933 933 if a.startswith(cmd):
934 934 found = a
935 935 break
936 936 if found is not None:
937 937 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
938 938 debugchoice[found] = (aliases, table[e])
939 939 else:
940 940 choice[found] = (aliases, table[e])
941 941
942 942 if not choice and debugchoice:
943 943 choice = debugchoice
944 944
945 945 return choice, allcmds
946 946
947 947
948 948 def findcmd(cmd, table, strict=True):
949 949 """Return (aliases, command table entry) for command string."""
950 950 choice, allcmds = findpossible(cmd, table, strict)
951 951
952 952 if cmd in choice:
953 953 return choice[cmd]
954 954
955 955 if len(choice) > 1:
956 956 clist = sorted(choice)
957 957 raise error.AmbiguousCommand(cmd, clist)
958 958
959 959 if choice:
960 960 return list(choice.values())[0]
961 961
962 962 raise error.UnknownCommand(cmd, allcmds)
963 963
964 964
965 965 def changebranch(ui, repo, revs, label, opts):
966 966 """ Change the branch name of given revs to label """
967 967
968 968 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
969 969 # abort in case of uncommitted merge or dirty wdir
970 970 bailifchanged(repo)
971 971 revs = scmutil.revrange(repo, revs)
972 972 if not revs:
973 973 raise error.Abort(b"empty revision set")
974 974 roots = repo.revs(b'roots(%ld)', revs)
975 975 if len(roots) > 1:
976 976 raise error.Abort(
977 977 _(b"cannot change branch of non-linear revisions")
978 978 )
979 979 rewriteutil.precheck(repo, revs, b'change branch of')
980 980
981 981 root = repo[roots.first()]
982 982 rpb = {parent.branch() for parent in root.parents()}
983 983 if (
984 984 not opts.get(b'force')
985 985 and label not in rpb
986 986 and label in repo.branchmap()
987 987 ):
988 988 raise error.Abort(_(b"a branch of the same name already exists"))
989 989
990 990 if repo.revs(b'obsolete() and %ld', revs):
991 991 raise error.Abort(
992 992 _(b"cannot change branch of a obsolete changeset")
993 993 )
994 994
995 995 # make sure only topological heads
996 996 if repo.revs(b'heads(%ld) - head()', revs):
997 997 raise error.Abort(_(b"cannot change branch in middle of a stack"))
998 998
999 999 replacements = {}
1000 1000 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
1001 1001 # mercurial.subrepo -> mercurial.cmdutil
1002 1002 from . import context
1003 1003
1004 1004 for rev in revs:
1005 1005 ctx = repo[rev]
1006 1006 oldbranch = ctx.branch()
1007 1007 # check if ctx has same branch
1008 1008 if oldbranch == label:
1009 1009 continue
1010 1010
1011 1011 def filectxfn(repo, newctx, path):
1012 1012 try:
1013 1013 return ctx[path]
1014 1014 except error.ManifestLookupError:
1015 1015 return None
1016 1016
1017 1017 ui.debug(
1018 1018 b"changing branch of '%s' from '%s' to '%s'\n"
1019 1019 % (hex(ctx.node()), oldbranch, label)
1020 1020 )
1021 1021 extra = ctx.extra()
1022 1022 extra[b'branch_change'] = hex(ctx.node())
1023 1023 # While changing branch of set of linear commits, make sure that
1024 1024 # we base our commits on new parent rather than old parent which
1025 1025 # was obsoleted while changing the branch
1026 1026 p1 = ctx.p1().node()
1027 1027 p2 = ctx.p2().node()
1028 1028 if p1 in replacements:
1029 1029 p1 = replacements[p1][0]
1030 1030 if p2 in replacements:
1031 1031 p2 = replacements[p2][0]
1032 1032
1033 1033 mc = context.memctx(
1034 1034 repo,
1035 1035 (p1, p2),
1036 1036 ctx.description(),
1037 1037 ctx.files(),
1038 1038 filectxfn,
1039 1039 user=ctx.user(),
1040 1040 date=ctx.date(),
1041 1041 extra=extra,
1042 1042 branch=label,
1043 1043 )
1044 1044
1045 1045 newnode = repo.commitctx(mc)
1046 1046 replacements[ctx.node()] = (newnode,)
1047 1047 ui.debug(b'new node id is %s\n' % hex(newnode))
1048 1048
1049 1049 # create obsmarkers and move bookmarks
1050 1050 scmutil.cleanupnodes(
1051 1051 repo, replacements, b'branch-change', fixphase=True
1052 1052 )
1053 1053
1054 1054 # move the working copy too
1055 1055 wctx = repo[None]
1056 1056 # in-progress merge is a bit too complex for now.
1057 1057 if len(wctx.parents()) == 1:
1058 1058 newid = replacements.get(wctx.p1().node())
1059 1059 if newid is not None:
1060 1060 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1061 1061 # mercurial.cmdutil
1062 1062 from . import hg
1063 1063
1064 1064 hg.update(repo, newid[0], quietempty=True)
1065 1065
1066 1066 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1067 1067
1068 1068
1069 1069 def findrepo(p):
1070 1070 while not os.path.isdir(os.path.join(p, b".hg")):
1071 1071 oldp, p = p, os.path.dirname(p)
1072 1072 if p == oldp:
1073 1073 return None
1074 1074
1075 1075 return p
1076 1076
1077 1077
1078 1078 def bailifchanged(repo, merge=True, hint=None):
1079 1079 """ enforce the precondition that working directory must be clean.
1080 1080
1081 1081 'merge' can be set to false if a pending uncommitted merge should be
1082 1082 ignored (such as when 'update --check' runs).
1083 1083
1084 1084 'hint' is the usual hint given to Abort exception.
1085 1085 """
1086 1086
1087 1087 if merge and repo.dirstate.p2() != nullid:
1088 1088 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1089 1089 st = repo.status()
1090 1090 if st.modified or st.added or st.removed or st.deleted:
1091 1091 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1092 1092 ctx = repo[None]
1093 1093 for s in sorted(ctx.substate):
1094 1094 ctx.sub(s).bailifchanged(hint=hint)
1095 1095
1096 1096
1097 1097 def logmessage(ui, opts):
1098 1098 """ get the log message according to -m and -l option """
1099 1099
1100 1100 check_at_most_one_arg(opts, b'message', b'logfile')
1101 1101
1102 1102 message = opts.get(b'message')
1103 1103 logfile = opts.get(b'logfile')
1104 1104
1105 1105 if not message and logfile:
1106 1106 try:
1107 1107 if isstdiofilename(logfile):
1108 1108 message = ui.fin.read()
1109 1109 else:
1110 1110 message = b'\n'.join(util.readfile(logfile).splitlines())
1111 1111 except IOError as inst:
1112 1112 raise error.Abort(
1113 1113 _(b"can't read commit message '%s': %s")
1114 1114 % (logfile, encoding.strtolocal(inst.strerror))
1115 1115 )
1116 1116 return message
1117 1117
1118 1118
1119 1119 def mergeeditform(ctxorbool, baseformname):
1120 1120 """return appropriate editform name (referencing a committemplate)
1121 1121
1122 1122 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1123 1123 merging is committed.
1124 1124
1125 1125 This returns baseformname with '.merge' appended if it is a merge,
1126 1126 otherwise '.normal' is appended.
1127 1127 """
1128 1128 if isinstance(ctxorbool, bool):
1129 1129 if ctxorbool:
1130 1130 return baseformname + b".merge"
1131 1131 elif len(ctxorbool.parents()) > 1:
1132 1132 return baseformname + b".merge"
1133 1133
1134 1134 return baseformname + b".normal"
1135 1135
1136 1136
1137 1137 def getcommiteditor(
1138 1138 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1139 1139 ):
1140 1140 """get appropriate commit message editor according to '--edit' option
1141 1141
1142 1142 'finishdesc' is a function to be called with edited commit message
1143 1143 (= 'description' of the new changeset) just after editing, but
1144 1144 before checking empty-ness. It should return actual text to be
1145 1145 stored into history. This allows to change description before
1146 1146 storing.
1147 1147
1148 1148 'extramsg' is a extra message to be shown in the editor instead of
1149 1149 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1150 1150 is automatically added.
1151 1151
1152 1152 'editform' is a dot-separated list of names, to distinguish
1153 1153 the purpose of commit text editing.
1154 1154
1155 1155 'getcommiteditor' returns 'commitforceeditor' regardless of
1156 1156 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1157 1157 they are specific for usage in MQ.
1158 1158 """
1159 1159 if edit or finishdesc or extramsg:
1160 1160 return lambda r, c, s: commitforceeditor(
1161 1161 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1162 1162 )
1163 1163 elif editform:
1164 1164 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1165 1165 else:
1166 1166 return commiteditor
1167 1167
1168 1168
1169 1169 def _escapecommandtemplate(tmpl):
1170 1170 parts = []
1171 1171 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1172 1172 if typ == b'string':
1173 1173 parts.append(stringutil.escapestr(tmpl[start:end]))
1174 1174 else:
1175 1175 parts.append(tmpl[start:end])
1176 1176 return b''.join(parts)
1177 1177
1178 1178
1179 1179 def rendercommandtemplate(ui, tmpl, props):
1180 1180 r"""Expand a literal template 'tmpl' in a way suitable for command line
1181 1181
1182 1182 '\' in outermost string is not taken as an escape character because it
1183 1183 is a directory separator on Windows.
1184 1184
1185 1185 >>> from . import ui as uimod
1186 1186 >>> ui = uimod.ui()
1187 1187 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1188 1188 'c:\\foo'
1189 1189 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1190 1190 'c:{path}'
1191 1191 """
1192 1192 if not tmpl:
1193 1193 return tmpl
1194 1194 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1195 1195 return t.renderdefault(props)
1196 1196
1197 1197
1198 1198 def rendertemplate(ctx, tmpl, props=None):
1199 1199 """Expand a literal template 'tmpl' byte-string against one changeset
1200 1200
1201 1201 Each props item must be a stringify-able value or a callable returning
1202 1202 such value, i.e. no bare list nor dict should be passed.
1203 1203 """
1204 1204 repo = ctx.repo()
1205 1205 tres = formatter.templateresources(repo.ui, repo)
1206 1206 t = formatter.maketemplater(
1207 1207 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1208 1208 )
1209 1209 mapping = {b'ctx': ctx}
1210 1210 if props:
1211 1211 mapping.update(props)
1212 1212 return t.renderdefault(mapping)
1213 1213
1214 1214
1215 1215 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1216 1216 r"""Convert old-style filename format string to template string
1217 1217
1218 1218 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1219 1219 'foo-{reporoot|basename}-{seqno}.patch'
1220 1220 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1221 1221 '{rev}{tags % "{tag}"}{node}'
1222 1222
1223 1223 '\' in outermost strings has to be escaped because it is a directory
1224 1224 separator on Windows:
1225 1225
1226 1226 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1227 1227 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1228 1228 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1229 1229 '\\\\\\\\foo\\\\bar.patch'
1230 1230 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1231 1231 '\\\\{tags % "{tag}"}'
1232 1232
1233 1233 but inner strings follow the template rules (i.e. '\' is taken as an
1234 1234 escape character):
1235 1235
1236 1236 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1237 1237 '{"c:\\tmp"}'
1238 1238 """
1239 1239 expander = {
1240 1240 b'H': b'{node}',
1241 1241 b'R': b'{rev}',
1242 1242 b'h': b'{node|short}',
1243 1243 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1244 1244 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1245 1245 b'%': b'%',
1246 1246 b'b': b'{reporoot|basename}',
1247 1247 }
1248 1248 if total is not None:
1249 1249 expander[b'N'] = b'{total}'
1250 1250 if seqno is not None:
1251 1251 expander[b'n'] = b'{seqno}'
1252 1252 if total is not None and seqno is not None:
1253 1253 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1254 1254 if pathname is not None:
1255 1255 expander[b's'] = b'{pathname|basename}'
1256 1256 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1257 1257 expander[b'p'] = b'{pathname}'
1258 1258
1259 1259 newname = []
1260 1260 for typ, start, end in templater.scantemplate(pat, raw=True):
1261 1261 if typ != b'string':
1262 1262 newname.append(pat[start:end])
1263 1263 continue
1264 1264 i = start
1265 1265 while i < end:
1266 1266 n = pat.find(b'%', i, end)
1267 1267 if n < 0:
1268 1268 newname.append(stringutil.escapestr(pat[i:end]))
1269 1269 break
1270 1270 newname.append(stringutil.escapestr(pat[i:n]))
1271 1271 if n + 2 > end:
1272 1272 raise error.Abort(
1273 1273 _(b"incomplete format spec in output filename")
1274 1274 )
1275 1275 c = pat[n + 1 : n + 2]
1276 1276 i = n + 2
1277 1277 try:
1278 1278 newname.append(expander[c])
1279 1279 except KeyError:
1280 1280 raise error.Abort(
1281 1281 _(b"invalid format spec '%%%s' in output filename") % c
1282 1282 )
1283 1283 return b''.join(newname)
1284 1284
1285 1285
1286 1286 def makefilename(ctx, pat, **props):
1287 1287 if not pat:
1288 1288 return pat
1289 1289 tmpl = _buildfntemplate(pat, **props)
1290 1290 # BUG: alias expansion shouldn't be made against template fragments
1291 1291 # rewritten from %-format strings, but we have no easy way to partially
1292 1292 # disable the expansion.
1293 1293 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1294 1294
1295 1295
1296 1296 def isstdiofilename(pat):
1297 1297 """True if the given pat looks like a filename denoting stdin/stdout"""
1298 1298 return not pat or pat == b'-'
1299 1299
1300 1300
1301 1301 class _unclosablefile(object):
1302 1302 def __init__(self, fp):
1303 1303 self._fp = fp
1304 1304
1305 1305 def close(self):
1306 1306 pass
1307 1307
1308 1308 def __iter__(self):
1309 1309 return iter(self._fp)
1310 1310
1311 1311 def __getattr__(self, attr):
1312 1312 return getattr(self._fp, attr)
1313 1313
1314 1314 def __enter__(self):
1315 1315 return self
1316 1316
1317 1317 def __exit__(self, exc_type, exc_value, exc_tb):
1318 1318 pass
1319 1319
1320 1320
1321 1321 def makefileobj(ctx, pat, mode=b'wb', **props):
1322 1322 writable = mode not in (b'r', b'rb')
1323 1323
1324 1324 if isstdiofilename(pat):
1325 1325 repo = ctx.repo()
1326 1326 if writable:
1327 1327 fp = repo.ui.fout
1328 1328 else:
1329 1329 fp = repo.ui.fin
1330 1330 return _unclosablefile(fp)
1331 1331 fn = makefilename(ctx, pat, **props)
1332 1332 return open(fn, mode)
1333 1333
1334 1334
1335 1335 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1336 1336 """opens the changelog, manifest, a filelog or a given revlog"""
1337 1337 cl = opts[b'changelog']
1338 1338 mf = opts[b'manifest']
1339 1339 dir = opts[b'dir']
1340 1340 msg = None
1341 1341 if cl and mf:
1342 1342 msg = _(b'cannot specify --changelog and --manifest at the same time')
1343 1343 elif cl and dir:
1344 1344 msg = _(b'cannot specify --changelog and --dir at the same time')
1345 1345 elif cl or mf or dir:
1346 1346 if file_:
1347 1347 msg = _(b'cannot specify filename with --changelog or --manifest')
1348 1348 elif not repo:
1349 1349 msg = _(
1350 1350 b'cannot specify --changelog or --manifest or --dir '
1351 1351 b'without a repository'
1352 1352 )
1353 1353 if msg:
1354 1354 raise error.Abort(msg)
1355 1355
1356 1356 r = None
1357 1357 if repo:
1358 1358 if cl:
1359 1359 r = repo.unfiltered().changelog
1360 1360 elif dir:
1361 1361 if b'treemanifest' not in repo.requirements:
1362 1362 raise error.Abort(
1363 1363 _(
1364 1364 b"--dir can only be used on repos with "
1365 1365 b"treemanifest enabled"
1366 1366 )
1367 1367 )
1368 1368 if not dir.endswith(b'/'):
1369 1369 dir = dir + b'/'
1370 1370 dirlog = repo.manifestlog.getstorage(dir)
1371 1371 if len(dirlog):
1372 1372 r = dirlog
1373 1373 elif mf:
1374 1374 r = repo.manifestlog.getstorage(b'')
1375 1375 elif file_:
1376 1376 filelog = repo.file(file_)
1377 1377 if len(filelog):
1378 1378 r = filelog
1379 1379
1380 1380 # Not all storage may be revlogs. If requested, try to return an actual
1381 1381 # revlog instance.
1382 1382 if returnrevlog:
1383 1383 if isinstance(r, revlog.revlog):
1384 1384 pass
1385 1385 elif util.safehasattr(r, b'_revlog'):
1386 1386 r = r._revlog # pytype: disable=attribute-error
1387 1387 elif r is not None:
1388 1388 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1389 1389
1390 1390 if not r:
1391 1391 if not returnrevlog:
1392 1392 raise error.Abort(_(b'cannot give path to non-revlog'))
1393 1393
1394 1394 if not file_:
1395 1395 raise error.CommandError(cmd, _(b'invalid arguments'))
1396 1396 if not os.path.isfile(file_):
1397 1397 raise error.Abort(_(b"revlog '%s' not found") % file_)
1398 1398 r = revlog.revlog(
1399 1399 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1400 1400 )
1401 1401 return r
1402 1402
1403 1403
1404 1404 def openrevlog(repo, cmd, file_, opts):
1405 1405 """Obtain a revlog backing storage of an item.
1406 1406
1407 1407 This is similar to ``openstorage()`` except it always returns a revlog.
1408 1408
1409 1409 In most cases, a caller cares about the main storage object - not the
1410 1410 revlog backing it. Therefore, this function should only be used by code
1411 1411 that needs to examine low-level revlog implementation details. e.g. debug
1412 1412 commands.
1413 1413 """
1414 1414 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1415 1415
1416 1416
1417 1417 def copy(ui, repo, pats, opts, rename=False):
1418 1418 check_incompatible_arguments(opts, b'forget', [b'dry_run'])
1419 1419
1420 1420 # called with the repo lock held
1421 1421 #
1422 1422 # hgsep => pathname that uses "/" to separate directories
1423 1423 # ossep => pathname that uses os.sep to separate directories
1424 1424 cwd = repo.getcwd()
1425 1425 targets = {}
1426 1426 forget = opts.get(b"forget")
1427 1427 after = opts.get(b"after")
1428 1428 dryrun = opts.get(b"dry_run")
1429 1429 rev = opts.get(b'at_rev')
1430 1430 if rev:
1431 1431 if not forget and not after:
1432 1432 # TODO: Remove this restriction and make it also create the copy
1433 1433 # targets (and remove the rename source if rename==True).
1434 1434 raise error.Abort(_(b'--at-rev requires --after'))
1435 1435 ctx = scmutil.revsingle(repo, rev)
1436 1436 if len(ctx.parents()) > 1:
1437 1437 raise error.Abort(_(b'cannot mark/unmark copy in merge commit'))
1438 1438 else:
1439 1439 ctx = repo[None]
1440 1440
1441 1441 pctx = ctx.p1()
1442 1442
1443 1443 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1444 1444
1445 1445 if forget:
1446 1446 if ctx.rev() is None:
1447 1447 new_ctx = ctx
1448 1448 else:
1449 1449 if len(ctx.parents()) > 1:
1450 1450 raise error.Abort(_(b'cannot unmark copy in merge commit'))
1451 1451 # avoid cycle context -> subrepo -> cmdutil
1452 1452 from . import context
1453 1453
1454 1454 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1455 1455 new_ctx = context.overlayworkingctx(repo)
1456 1456 new_ctx.setbase(ctx.p1())
1457 1457 mergemod.graft(repo, ctx, wctx=new_ctx)
1458 1458
1459 1459 match = scmutil.match(ctx, pats, opts)
1460 1460
1461 1461 current_copies = ctx.p1copies()
1462 1462 current_copies.update(ctx.p2copies())
1463 1463
1464 1464 uipathfn = scmutil.getuipathfn(repo)
1465 1465 for f in ctx.walk(match):
1466 1466 if f in current_copies:
1467 1467 new_ctx[f].markcopied(None)
1468 1468 elif match.exact(f):
1469 1469 ui.warn(
1470 1470 _(
1471 1471 b'%s: not unmarking as copy - file is not marked as copied\n'
1472 1472 )
1473 1473 % uipathfn(f)
1474 1474 )
1475 1475
1476 1476 if ctx.rev() is not None:
1477 1477 with repo.lock():
1478 1478 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1479 1479 new_node = mem_ctx.commit()
1480 1480
1481 1481 if repo.dirstate.p1() == ctx.node():
1482 1482 with repo.dirstate.parentchange():
1483 1483 scmutil.movedirstate(repo, repo[new_node])
1484 1484 replacements = {ctx.node(): [new_node]}
1485 1485 scmutil.cleanupnodes(
1486 1486 repo, replacements, b'uncopy', fixphase=True
1487 1487 )
1488 1488
1489 1489 return
1490 1490
1491 1491 pats = scmutil.expandpats(pats)
1492 1492 if not pats:
1493 1493 raise error.Abort(_(b'no source or destination specified'))
1494 1494 if len(pats) == 1:
1495 1495 raise error.Abort(_(b'no destination specified'))
1496 1496 dest = pats.pop()
1497 1497
1498 1498 def walkpat(pat):
1499 1499 srcs = []
1500 1500 # TODO: Inline and simplify the non-working-copy version of this code
1501 1501 # since it shares very little with the working-copy version of it.
1502 1502 ctx_to_walk = ctx if ctx.rev() is None else pctx
1503 1503 m = scmutil.match(ctx_to_walk, [pat], opts, globbed=True)
1504 1504 for abs in ctx_to_walk.walk(m):
1505 1505 rel = uipathfn(abs)
1506 1506 exact = m.exact(abs)
1507 1507 if abs not in ctx:
1508 1508 if abs in pctx:
1509 1509 if not after:
1510 1510 if exact:
1511 1511 ui.warn(
1512 1512 _(
1513 1513 b'%s: not copying - file has been marked '
1514 1514 b'for remove\n'
1515 1515 )
1516 1516 % rel
1517 1517 )
1518 1518 continue
1519 1519 else:
1520 1520 if exact:
1521 1521 ui.warn(
1522 1522 _(b'%s: not copying - file is not managed\n') % rel
1523 1523 )
1524 1524 continue
1525 1525
1526 1526 # abs: hgsep
1527 1527 # rel: ossep
1528 1528 srcs.append((abs, rel, exact))
1529 1529 return srcs
1530 1530
1531 1531 if ctx.rev() is not None:
1532 1532 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1533 1533 absdest = pathutil.canonpath(repo.root, cwd, dest)
1534 1534 if ctx.hasdir(absdest):
1535 1535 raise error.Abort(
1536 1536 _(b'%s: --at-rev does not support a directory as destination')
1537 1537 % uipathfn(absdest)
1538 1538 )
1539 1539 if absdest not in ctx:
1540 1540 raise error.Abort(
1541 1541 _(b'%s: copy destination does not exist in %s')
1542 1542 % (uipathfn(absdest), ctx)
1543 1543 )
1544 1544
1545 1545 # avoid cycle context -> subrepo -> cmdutil
1546 1546 from . import context
1547 1547
1548 1548 copylist = []
1549 1549 for pat in pats:
1550 1550 srcs = walkpat(pat)
1551 1551 if not srcs:
1552 1552 continue
1553 1553 for abs, rel, exact in srcs:
1554 1554 copylist.append(abs)
1555 1555
1556 1556 if not copylist:
1557 1557 raise error.Abort(_(b'no files to copy'))
1558 1558 # TODO: Add support for `hg cp --at-rev . foo bar dir` and
1559 1559 # `hg cp --at-rev . dir1 dir2`, preferably unifying the code with the
1560 1560 # existing functions below.
1561 1561 if len(copylist) != 1:
1562 1562 raise error.Abort(_(b'--at-rev requires a single source'))
1563 1563
1564 1564 new_ctx = context.overlayworkingctx(repo)
1565 1565 new_ctx.setbase(ctx.p1())
1566 1566 mergemod.graft(repo, ctx, wctx=new_ctx)
1567 1567
1568 1568 new_ctx.markcopied(absdest, copylist[0])
1569 1569
1570 1570 with repo.lock():
1571 1571 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1572 1572 new_node = mem_ctx.commit()
1573 1573
1574 1574 if repo.dirstate.p1() == ctx.node():
1575 1575 with repo.dirstate.parentchange():
1576 1576 scmutil.movedirstate(repo, repo[new_node])
1577 1577 replacements = {ctx.node(): [new_node]}
1578 1578 scmutil.cleanupnodes(repo, replacements, b'copy', fixphase=True)
1579 1579
1580 1580 return
1581 1581
1582 1582 # abssrc: hgsep
1583 1583 # relsrc: ossep
1584 1584 # otarget: ossep
1585 1585 def copyfile(abssrc, relsrc, otarget, exact):
1586 1586 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1587 1587 if b'/' in abstarget:
1588 1588 # We cannot normalize abstarget itself, this would prevent
1589 1589 # case only renames, like a => A.
1590 1590 abspath, absname = abstarget.rsplit(b'/', 1)
1591 1591 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1592 1592 reltarget = repo.pathto(abstarget, cwd)
1593 1593 target = repo.wjoin(abstarget)
1594 1594 src = repo.wjoin(abssrc)
1595 1595 state = repo.dirstate[abstarget]
1596 1596
1597 1597 scmutil.checkportable(ui, abstarget)
1598 1598
1599 1599 # check for collisions
1600 1600 prevsrc = targets.get(abstarget)
1601 1601 if prevsrc is not None:
1602 1602 ui.warn(
1603 1603 _(b'%s: not overwriting - %s collides with %s\n')
1604 1604 % (
1605 1605 reltarget,
1606 1606 repo.pathto(abssrc, cwd),
1607 1607 repo.pathto(prevsrc, cwd),
1608 1608 )
1609 1609 )
1610 1610 return True # report a failure
1611 1611
1612 1612 # check for overwrites
1613 1613 exists = os.path.lexists(target)
1614 1614 samefile = False
1615 1615 if exists and abssrc != abstarget:
1616 1616 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1617 1617 abstarget
1618 1618 ):
1619 1619 if not rename:
1620 1620 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1621 1621 return True # report a failure
1622 1622 exists = False
1623 1623 samefile = True
1624 1624
1625 1625 if not after and exists or after and state in b'mn':
1626 1626 if not opts[b'force']:
1627 1627 if state in b'mn':
1628 1628 msg = _(b'%s: not overwriting - file already committed\n')
1629 1629 if after:
1630 1630 flags = b'--after --force'
1631 1631 else:
1632 1632 flags = b'--force'
1633 1633 if rename:
1634 1634 hint = (
1635 1635 _(
1636 1636 b"('hg rename %s' to replace the file by "
1637 1637 b'recording a rename)\n'
1638 1638 )
1639 1639 % flags
1640 1640 )
1641 1641 else:
1642 1642 hint = (
1643 1643 _(
1644 1644 b"('hg copy %s' to replace the file by "
1645 1645 b'recording a copy)\n'
1646 1646 )
1647 1647 % flags
1648 1648 )
1649 1649 else:
1650 1650 msg = _(b'%s: not overwriting - file exists\n')
1651 1651 if rename:
1652 1652 hint = _(
1653 1653 b"('hg rename --after' to record the rename)\n"
1654 1654 )
1655 1655 else:
1656 1656 hint = _(b"('hg copy --after' to record the copy)\n")
1657 1657 ui.warn(msg % reltarget)
1658 1658 ui.warn(hint)
1659 1659 return True # report a failure
1660 1660
1661 1661 if after:
1662 1662 if not exists:
1663 1663 if rename:
1664 1664 ui.warn(
1665 1665 _(b'%s: not recording move - %s does not exist\n')
1666 1666 % (relsrc, reltarget)
1667 1667 )
1668 1668 else:
1669 1669 ui.warn(
1670 1670 _(b'%s: not recording copy - %s does not exist\n')
1671 1671 % (relsrc, reltarget)
1672 1672 )
1673 1673 return True # report a failure
1674 1674 elif not dryrun:
1675 1675 try:
1676 1676 if exists:
1677 1677 os.unlink(target)
1678 1678 targetdir = os.path.dirname(target) or b'.'
1679 1679 if not os.path.isdir(targetdir):
1680 1680 os.makedirs(targetdir)
1681 1681 if samefile:
1682 1682 tmp = target + b"~hgrename"
1683 1683 os.rename(src, tmp)
1684 1684 os.rename(tmp, target)
1685 1685 else:
1686 1686 # Preserve stat info on renames, not on copies; this matches
1687 1687 # Linux CLI behavior.
1688 1688 util.copyfile(src, target, copystat=rename)
1689 1689 srcexists = True
1690 1690 except IOError as inst:
1691 1691 if inst.errno == errno.ENOENT:
1692 1692 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1693 1693 srcexists = False
1694 1694 else:
1695 1695 ui.warn(
1696 1696 _(b'%s: cannot copy - %s\n')
1697 1697 % (relsrc, encoding.strtolocal(inst.strerror))
1698 1698 )
1699 1699 return True # report a failure
1700 1700
1701 1701 if ui.verbose or not exact:
1702 1702 if rename:
1703 1703 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1704 1704 else:
1705 1705 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1706 1706
1707 1707 targets[abstarget] = abssrc
1708 1708
1709 1709 # fix up dirstate
1710 1710 scmutil.dirstatecopy(
1711 1711 ui, repo, ctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1712 1712 )
1713 1713 if rename and not dryrun:
1714 1714 if not after and srcexists and not samefile:
1715 1715 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1716 1716 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1717 1717 ctx.forget([abssrc])
1718 1718
1719 1719 # pat: ossep
1720 1720 # dest ossep
1721 1721 # srcs: list of (hgsep, hgsep, ossep, bool)
1722 1722 # return: function that takes hgsep and returns ossep
1723 1723 def targetpathfn(pat, dest, srcs):
1724 1724 if os.path.isdir(pat):
1725 1725 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1726 1726 abspfx = util.localpath(abspfx)
1727 1727 if destdirexists:
1728 1728 striplen = len(os.path.split(abspfx)[0])
1729 1729 else:
1730 1730 striplen = len(abspfx)
1731 1731 if striplen:
1732 1732 striplen += len(pycompat.ossep)
1733 1733 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1734 1734 elif destdirexists:
1735 1735 res = lambda p: os.path.join(
1736 1736 dest, os.path.basename(util.localpath(p))
1737 1737 )
1738 1738 else:
1739 1739 res = lambda p: dest
1740 1740 return res
1741 1741
1742 1742 # pat: ossep
1743 1743 # dest ossep
1744 1744 # srcs: list of (hgsep, hgsep, ossep, bool)
1745 1745 # return: function that takes hgsep and returns ossep
1746 1746 def targetpathafterfn(pat, dest, srcs):
1747 1747 if matchmod.patkind(pat):
1748 1748 # a mercurial pattern
1749 1749 res = lambda p: os.path.join(
1750 1750 dest, os.path.basename(util.localpath(p))
1751 1751 )
1752 1752 else:
1753 1753 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1754 1754 if len(abspfx) < len(srcs[0][0]):
1755 1755 # A directory. Either the target path contains the last
1756 1756 # component of the source path or it does not.
1757 1757 def evalpath(striplen):
1758 1758 score = 0
1759 1759 for s in srcs:
1760 1760 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1761 1761 if os.path.lexists(t):
1762 1762 score += 1
1763 1763 return score
1764 1764
1765 1765 abspfx = util.localpath(abspfx)
1766 1766 striplen = len(abspfx)
1767 1767 if striplen:
1768 1768 striplen += len(pycompat.ossep)
1769 1769 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1770 1770 score = evalpath(striplen)
1771 1771 striplen1 = len(os.path.split(abspfx)[0])
1772 1772 if striplen1:
1773 1773 striplen1 += len(pycompat.ossep)
1774 1774 if evalpath(striplen1) > score:
1775 1775 striplen = striplen1
1776 1776 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1777 1777 else:
1778 1778 # a file
1779 1779 if destdirexists:
1780 1780 res = lambda p: os.path.join(
1781 1781 dest, os.path.basename(util.localpath(p))
1782 1782 )
1783 1783 else:
1784 1784 res = lambda p: dest
1785 1785 return res
1786 1786
1787 1787 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1788 1788 if not destdirexists:
1789 1789 if len(pats) > 1 or matchmod.patkind(pats[0]):
1790 1790 raise error.Abort(
1791 1791 _(
1792 1792 b'with multiple sources, destination must be an '
1793 1793 b'existing directory'
1794 1794 )
1795 1795 )
1796 1796 if util.endswithsep(dest):
1797 1797 raise error.Abort(_(b'destination %s is not a directory') % dest)
1798 1798
1799 1799 tfn = targetpathfn
1800 1800 if after:
1801 1801 tfn = targetpathafterfn
1802 1802 copylist = []
1803 1803 for pat in pats:
1804 1804 srcs = walkpat(pat)
1805 1805 if not srcs:
1806 1806 continue
1807 1807 copylist.append((tfn(pat, dest, srcs), srcs))
1808 1808 if not copylist:
1809 1809 raise error.Abort(_(b'no files to copy'))
1810 1810
1811 1811 errors = 0
1812 1812 for targetpath, srcs in copylist:
1813 1813 for abssrc, relsrc, exact in srcs:
1814 1814 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1815 1815 errors += 1
1816 1816
1817 1817 return errors != 0
1818 1818
1819 1819
1820 1820 ## facility to let extension process additional data into an import patch
1821 1821 # list of identifier to be executed in order
1822 1822 extrapreimport = [] # run before commit
1823 1823 extrapostimport = [] # run after commit
1824 1824 # mapping from identifier to actual import function
1825 1825 #
1826 1826 # 'preimport' are run before the commit is made and are provided the following
1827 1827 # arguments:
1828 1828 # - repo: the localrepository instance,
1829 1829 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1830 1830 # - extra: the future extra dictionary of the changeset, please mutate it,
1831 1831 # - opts: the import options.
1832 1832 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1833 1833 # mutation of in memory commit and more. Feel free to rework the code to get
1834 1834 # there.
1835 1835 extrapreimportmap = {}
1836 1836 # 'postimport' are run after the commit is made and are provided the following
1837 1837 # argument:
1838 1838 # - ctx: the changectx created by import.
1839 1839 extrapostimportmap = {}
1840 1840
1841 1841
1842 1842 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1843 1843 """Utility function used by commands.import to import a single patch
1844 1844
1845 1845 This function is explicitly defined here to help the evolve extension to
1846 1846 wrap this part of the import logic.
1847 1847
1848 1848 The API is currently a bit ugly because it a simple code translation from
1849 1849 the import command. Feel free to make it better.
1850 1850
1851 1851 :patchdata: a dictionary containing parsed patch data (such as from
1852 1852 ``patch.extract()``)
1853 1853 :parents: nodes that will be parent of the created commit
1854 1854 :opts: the full dict of option passed to the import command
1855 1855 :msgs: list to save commit message to.
1856 1856 (used in case we need to save it when failing)
1857 1857 :updatefunc: a function that update a repo to a given node
1858 1858 updatefunc(<repo>, <node>)
1859 1859 """
1860 1860 # avoid cycle context -> subrepo -> cmdutil
1861 1861 from . import context
1862 1862
1863 1863 tmpname = patchdata.get(b'filename')
1864 1864 message = patchdata.get(b'message')
1865 1865 user = opts.get(b'user') or patchdata.get(b'user')
1866 1866 date = opts.get(b'date') or patchdata.get(b'date')
1867 1867 branch = patchdata.get(b'branch')
1868 1868 nodeid = patchdata.get(b'nodeid')
1869 1869 p1 = patchdata.get(b'p1')
1870 1870 p2 = patchdata.get(b'p2')
1871 1871
1872 1872 nocommit = opts.get(b'no_commit')
1873 1873 importbranch = opts.get(b'import_branch')
1874 1874 update = not opts.get(b'bypass')
1875 1875 strip = opts[b"strip"]
1876 1876 prefix = opts[b"prefix"]
1877 1877 sim = float(opts.get(b'similarity') or 0)
1878 1878
1879 1879 if not tmpname:
1880 1880 return None, None, False
1881 1881
1882 1882 rejects = False
1883 1883
1884 1884 cmdline_message = logmessage(ui, opts)
1885 1885 if cmdline_message:
1886 1886 # pickup the cmdline msg
1887 1887 message = cmdline_message
1888 1888 elif message:
1889 1889 # pickup the patch msg
1890 1890 message = message.strip()
1891 1891 else:
1892 1892 # launch the editor
1893 1893 message = None
1894 1894 ui.debug(b'message:\n%s\n' % (message or b''))
1895 1895
1896 1896 if len(parents) == 1:
1897 1897 parents.append(repo[nullid])
1898 1898 if opts.get(b'exact'):
1899 1899 if not nodeid or not p1:
1900 1900 raise error.Abort(_(b'not a Mercurial patch'))
1901 1901 p1 = repo[p1]
1902 1902 p2 = repo[p2 or nullid]
1903 1903 elif p2:
1904 1904 try:
1905 1905 p1 = repo[p1]
1906 1906 p2 = repo[p2]
1907 1907 # Without any options, consider p2 only if the
1908 1908 # patch is being applied on top of the recorded
1909 1909 # first parent.
1910 1910 if p1 != parents[0]:
1911 1911 p1 = parents[0]
1912 1912 p2 = repo[nullid]
1913 1913 except error.RepoError:
1914 1914 p1, p2 = parents
1915 1915 if p2.node() == nullid:
1916 1916 ui.warn(
1917 1917 _(
1918 1918 b"warning: import the patch as a normal revision\n"
1919 1919 b"(use --exact to import the patch as a merge)\n"
1920 1920 )
1921 1921 )
1922 1922 else:
1923 1923 p1, p2 = parents
1924 1924
1925 1925 n = None
1926 1926 if update:
1927 1927 if p1 != parents[0]:
1928 1928 updatefunc(repo, p1.node())
1929 1929 if p2 != parents[1]:
1930 1930 repo.setparents(p1.node(), p2.node())
1931 1931
1932 1932 if opts.get(b'exact') or importbranch:
1933 1933 repo.dirstate.setbranch(branch or b'default')
1934 1934
1935 1935 partial = opts.get(b'partial', False)
1936 1936 files = set()
1937 1937 try:
1938 1938 patch.patch(
1939 1939 ui,
1940 1940 repo,
1941 1941 tmpname,
1942 1942 strip=strip,
1943 1943 prefix=prefix,
1944 1944 files=files,
1945 1945 eolmode=None,
1946 1946 similarity=sim / 100.0,
1947 1947 )
1948 1948 except error.PatchError as e:
1949 1949 if not partial:
1950 1950 raise error.Abort(pycompat.bytestr(e))
1951 1951 if partial:
1952 1952 rejects = True
1953 1953
1954 1954 files = list(files)
1955 1955 if nocommit:
1956 1956 if message:
1957 1957 msgs.append(message)
1958 1958 else:
1959 1959 if opts.get(b'exact') or p2:
1960 1960 # If you got here, you either use --force and know what
1961 1961 # you are doing or used --exact or a merge patch while
1962 1962 # being updated to its first parent.
1963 1963 m = None
1964 1964 else:
1965 1965 m = scmutil.matchfiles(repo, files or [])
1966 1966 editform = mergeeditform(repo[None], b'import.normal')
1967 1967 if opts.get(b'exact'):
1968 1968 editor = None
1969 1969 else:
1970 1970 editor = getcommiteditor(
1971 1971 editform=editform, **pycompat.strkwargs(opts)
1972 1972 )
1973 1973 extra = {}
1974 1974 for idfunc in extrapreimport:
1975 1975 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1976 1976 overrides = {}
1977 1977 if partial:
1978 1978 overrides[(b'ui', b'allowemptycommit')] = True
1979 1979 if opts.get(b'secret'):
1980 1980 overrides[(b'phases', b'new-commit')] = b'secret'
1981 1981 with repo.ui.configoverride(overrides, b'import'):
1982 1982 n = repo.commit(
1983 1983 message, user, date, match=m, editor=editor, extra=extra
1984 1984 )
1985 1985 for idfunc in extrapostimport:
1986 1986 extrapostimportmap[idfunc](repo[n])
1987 1987 else:
1988 1988 if opts.get(b'exact') or importbranch:
1989 1989 branch = branch or b'default'
1990 1990 else:
1991 1991 branch = p1.branch()
1992 1992 store = patch.filestore()
1993 1993 try:
1994 1994 files = set()
1995 1995 try:
1996 1996 patch.patchrepo(
1997 1997 ui,
1998 1998 repo,
1999 1999 p1,
2000 2000 store,
2001 2001 tmpname,
2002 2002 strip,
2003 2003 prefix,
2004 2004 files,
2005 2005 eolmode=None,
2006 2006 )
2007 2007 except error.PatchError as e:
2008 2008 raise error.Abort(stringutil.forcebytestr(e))
2009 2009 if opts.get(b'exact'):
2010 2010 editor = None
2011 2011 else:
2012 2012 editor = getcommiteditor(editform=b'import.bypass')
2013 2013 memctx = context.memctx(
2014 2014 repo,
2015 2015 (p1.node(), p2.node()),
2016 2016 message,
2017 2017 files=files,
2018 2018 filectxfn=store,
2019 2019 user=user,
2020 2020 date=date,
2021 2021 branch=branch,
2022 2022 editor=editor,
2023 2023 )
2024 2024
2025 2025 overrides = {}
2026 2026 if opts.get(b'secret'):
2027 2027 overrides[(b'phases', b'new-commit')] = b'secret'
2028 2028 with repo.ui.configoverride(overrides, b'import'):
2029 2029 n = memctx.commit()
2030 2030 finally:
2031 2031 store.close()
2032 2032 if opts.get(b'exact') and nocommit:
2033 2033 # --exact with --no-commit is still useful in that it does merge
2034 2034 # and branch bits
2035 2035 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
2036 2036 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
2037 2037 raise error.Abort(_(b'patch is damaged or loses information'))
2038 2038 msg = _(b'applied to working directory')
2039 2039 if n:
2040 2040 # i18n: refers to a short changeset id
2041 2041 msg = _(b'created %s') % short(n)
2042 2042 return msg, n, rejects
2043 2043
2044 2044
2045 2045 # facility to let extensions include additional data in an exported patch
2046 2046 # list of identifiers to be executed in order
2047 2047 extraexport = []
2048 2048 # mapping from identifier to actual export function
2049 2049 # function as to return a string to be added to the header or None
2050 2050 # it is given two arguments (sequencenumber, changectx)
2051 2051 extraexportmap = {}
2052 2052
2053 2053
2054 2054 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
2055 2055 node = scmutil.binnode(ctx)
2056 2056 parents = [p.node() for p in ctx.parents() if p]
2057 2057 branch = ctx.branch()
2058 2058 if switch_parent:
2059 2059 parents.reverse()
2060 2060
2061 2061 if parents:
2062 2062 prev = parents[0]
2063 2063 else:
2064 2064 prev = nullid
2065 2065
2066 2066 fm.context(ctx=ctx)
2067 2067 fm.plain(b'# HG changeset patch\n')
2068 2068 fm.write(b'user', b'# User %s\n', ctx.user())
2069 2069 fm.plain(b'# Date %d %d\n' % ctx.date())
2070 2070 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
2071 2071 fm.condwrite(
2072 2072 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
2073 2073 )
2074 2074 fm.write(b'node', b'# Node ID %s\n', hex(node))
2075 2075 fm.plain(b'# Parent %s\n' % hex(prev))
2076 2076 if len(parents) > 1:
2077 2077 fm.plain(b'# Parent %s\n' % hex(parents[1]))
2078 2078 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
2079 2079
2080 2080 # TODO: redesign extraexportmap function to support formatter
2081 2081 for headerid in extraexport:
2082 2082 header = extraexportmap[headerid](seqno, ctx)
2083 2083 if header is not None:
2084 2084 fm.plain(b'# %s\n' % header)
2085 2085
2086 2086 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
2087 2087 fm.plain(b'\n')
2088 2088
2089 2089 if fm.isplain():
2090 2090 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
2091 2091 for chunk, label in chunkiter:
2092 2092 fm.plain(chunk, label=label)
2093 2093 else:
2094 2094 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
2095 2095 # TODO: make it structured?
2096 2096 fm.data(diff=b''.join(chunkiter))
2097 2097
2098 2098
2099 2099 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
2100 2100 """Export changesets to stdout or a single file"""
2101 2101 for seqno, rev in enumerate(revs, 1):
2102 2102 ctx = repo[rev]
2103 2103 if not dest.startswith(b'<'):
2104 2104 repo.ui.note(b"%s\n" % dest)
2105 2105 fm.startitem()
2106 2106 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
2107 2107
2108 2108
2109 2109 def _exportfntemplate(
2110 2110 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
2111 2111 ):
2112 2112 """Export changesets to possibly multiple files"""
2113 2113 total = len(revs)
2114 2114 revwidth = max(len(str(rev)) for rev in revs)
2115 2115 filemap = util.sortdict() # filename: [(seqno, rev), ...]
2116 2116
2117 2117 for seqno, rev in enumerate(revs, 1):
2118 2118 ctx = repo[rev]
2119 2119 dest = makefilename(
2120 2120 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
2121 2121 )
2122 2122 filemap.setdefault(dest, []).append((seqno, rev))
2123 2123
2124 2124 for dest in filemap:
2125 2125 with formatter.maybereopen(basefm, dest) as fm:
2126 2126 repo.ui.note(b"%s\n" % dest)
2127 2127 for seqno, rev in filemap[dest]:
2128 2128 fm.startitem()
2129 2129 ctx = repo[rev]
2130 2130 _exportsingle(
2131 2131 repo, ctx, fm, match, switch_parent, seqno, diffopts
2132 2132 )
2133 2133
2134 2134
2135 2135 def _prefetchchangedfiles(repo, revs, match):
2136 2136 allfiles = set()
2137 2137 for rev in revs:
2138 2138 for file in repo[rev].files():
2139 2139 if not match or match(file):
2140 2140 allfiles.add(file)
2141 2141 scmutil.prefetchfiles(repo, revs, scmutil.matchfiles(repo, allfiles))
2142 2142
2143 2143
2144 2144 def export(
2145 2145 repo,
2146 2146 revs,
2147 2147 basefm,
2148 2148 fntemplate=b'hg-%h.patch',
2149 2149 switch_parent=False,
2150 2150 opts=None,
2151 2151 match=None,
2152 2152 ):
2153 2153 '''export changesets as hg patches
2154 2154
2155 2155 Args:
2156 2156 repo: The repository from which we're exporting revisions.
2157 2157 revs: A list of revisions to export as revision numbers.
2158 2158 basefm: A formatter to which patches should be written.
2159 2159 fntemplate: An optional string to use for generating patch file names.
2160 2160 switch_parent: If True, show diffs against second parent when not nullid.
2161 2161 Default is false, which always shows diff against p1.
2162 2162 opts: diff options to use for generating the patch.
2163 2163 match: If specified, only export changes to files matching this matcher.
2164 2164
2165 2165 Returns:
2166 2166 Nothing.
2167 2167
2168 2168 Side Effect:
2169 2169 "HG Changeset Patch" data is emitted to one of the following
2170 2170 destinations:
2171 2171 fntemplate specified: Each rev is written to a unique file named using
2172 2172 the given template.
2173 2173 Otherwise: All revs will be written to basefm.
2174 2174 '''
2175 2175 _prefetchchangedfiles(repo, revs, match)
2176 2176
2177 2177 if not fntemplate:
2178 2178 _exportfile(
2179 2179 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2180 2180 )
2181 2181 else:
2182 2182 _exportfntemplate(
2183 2183 repo, revs, basefm, fntemplate, switch_parent, opts, match
2184 2184 )
2185 2185
2186 2186
2187 2187 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2188 2188 """Export changesets to the given file stream"""
2189 2189 _prefetchchangedfiles(repo, revs, match)
2190 2190
2191 2191 dest = getattr(fp, 'name', b'<unnamed>')
2192 2192 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2193 2193 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2194 2194
2195 2195
2196 2196 def showmarker(fm, marker, index=None):
2197 2197 """utility function to display obsolescence marker in a readable way
2198 2198
2199 2199 To be used by debug function."""
2200 2200 if index is not None:
2201 2201 fm.write(b'index', b'%i ', index)
2202 2202 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2203 2203 succs = marker.succnodes()
2204 2204 fm.condwrite(
2205 2205 succs,
2206 2206 b'succnodes',
2207 2207 b'%s ',
2208 2208 fm.formatlist(map(hex, succs), name=b'node'),
2209 2209 )
2210 2210 fm.write(b'flag', b'%X ', marker.flags())
2211 2211 parents = marker.parentnodes()
2212 2212 if parents is not None:
2213 2213 fm.write(
2214 2214 b'parentnodes',
2215 2215 b'{%s} ',
2216 2216 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2217 2217 )
2218 2218 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2219 2219 meta = marker.metadata().copy()
2220 2220 meta.pop(b'date', None)
2221 2221 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2222 2222 fm.write(
2223 2223 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2224 2224 )
2225 2225 fm.plain(b'\n')
2226 2226
2227 2227
2228 2228 def finddate(ui, repo, date):
2229 2229 """Find the tipmost changeset that matches the given date spec"""
2230 2230
2231 2231 df = dateutil.matchdate(date)
2232 2232 m = scmutil.matchall(repo)
2233 2233 results = {}
2234 2234
2235 2235 def prep(ctx, fns):
2236 2236 d = ctx.date()
2237 2237 if df(d[0]):
2238 2238 results[ctx.rev()] = d
2239 2239
2240 2240 for ctx in walkchangerevs(repo, m, {b'rev': None}, prep):
2241 2241 rev = ctx.rev()
2242 2242 if rev in results:
2243 2243 ui.status(
2244 2244 _(b"found revision %d from %s\n")
2245 2245 % (rev, dateutil.datestr(results[rev]))
2246 2246 )
2247 2247 return b'%d' % rev
2248 2248
2249 2249 raise error.Abort(_(b"revision matching date not found"))
2250 2250
2251 2251
2252 2252 def increasingwindows(windowsize=8, sizelimit=512):
2253 2253 while True:
2254 2254 yield windowsize
2255 2255 if windowsize < sizelimit:
2256 2256 windowsize *= 2
2257 2257
2258 2258
2259 2259 def _walkrevs(repo, opts):
2260 2260 # Default --rev value depends on --follow but --follow behavior
2261 2261 # depends on revisions resolved from --rev...
2262 2262 follow = opts.get(b'follow') or opts.get(b'follow_first')
2263 2263 if opts.get(b'rev'):
2264 2264 revs = scmutil.revrange(repo, opts[b'rev'])
2265 2265 elif follow and repo.dirstate.p1() == nullid:
2266 2266 revs = smartset.baseset()
2267 2267 elif follow:
2268 2268 revs = repo.revs(b'reverse(:.)')
2269 2269 else:
2270 2270 revs = smartset.spanset(repo)
2271 2271 revs.reverse()
2272 2272 return revs
2273 2273
2274 2274
2275 2275 class FileWalkError(Exception):
2276 2276 pass
2277 2277
2278 2278
2279 2279 def walkfilerevs(repo, match, follow, revs, fncache):
2280 2280 '''Walks the file history for the matched files.
2281 2281
2282 2282 Returns the changeset revs that are involved in the file history.
2283 2283
2284 2284 Throws FileWalkError if the file history can't be walked using
2285 2285 filelogs alone.
2286 2286 '''
2287 2287 wanted = set()
2288 2288 copies = []
2289 2289 minrev, maxrev = min(revs), max(revs)
2290 2290
2291 2291 def filerevs(filelog, last):
2292 2292 """
2293 2293 Only files, no patterns. Check the history of each file.
2294 2294
2295 2295 Examines filelog entries within minrev, maxrev linkrev range
2296 2296 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2297 2297 tuples in backwards order
2298 2298 """
2299 2299 cl_count = len(repo)
2300 2300 revs = []
2301 2301 for j in pycompat.xrange(0, last + 1):
2302 2302 linkrev = filelog.linkrev(j)
2303 2303 if linkrev < minrev:
2304 2304 continue
2305 2305 # only yield rev for which we have the changelog, it can
2306 2306 # happen while doing "hg log" during a pull or commit
2307 2307 if linkrev >= cl_count:
2308 2308 break
2309 2309
2310 2310 parentlinkrevs = []
2311 2311 for p in filelog.parentrevs(j):
2312 2312 if p != nullrev:
2313 2313 parentlinkrevs.append(filelog.linkrev(p))
2314 2314 n = filelog.node(j)
2315 2315 revs.append(
2316 2316 (linkrev, parentlinkrevs, follow and filelog.renamed(n))
2317 2317 )
2318 2318
2319 2319 return reversed(revs)
2320 2320
2321 2321 def iterfiles():
2322 2322 pctx = repo[b'.']
2323 2323 for filename in match.files():
2324 2324 if follow:
2325 2325 if filename not in pctx:
2326 2326 raise error.Abort(
2327 2327 _(
2328 2328 b'cannot follow file not in parent '
2329 2329 b'revision: "%s"'
2330 2330 )
2331 2331 % filename
2332 2332 )
2333 2333 yield filename, pctx[filename].filenode()
2334 2334 else:
2335 2335 yield filename, None
2336 2336 for filename_node in copies:
2337 2337 yield filename_node
2338 2338
2339 2339 for file_, node in iterfiles():
2340 2340 filelog = repo.file(file_)
2341 2341 if not len(filelog):
2342 2342 if node is None:
2343 2343 # A zero count may be a directory or deleted file, so
2344 2344 # try to find matching entries on the slow path.
2345 2345 if follow:
2346 2346 raise error.Abort(
2347 2347 _(b'cannot follow nonexistent file: "%s"') % file_
2348 2348 )
2349 2349 raise FileWalkError(b"Cannot walk via filelog")
2350 2350 else:
2351 2351 continue
2352 2352
2353 2353 if node is None:
2354 2354 last = len(filelog) - 1
2355 2355 else:
2356 2356 last = filelog.rev(node)
2357 2357
2358 2358 # keep track of all ancestors of the file
2359 2359 ancestors = {filelog.linkrev(last)}
2360 2360
2361 2361 # iterate from latest to oldest revision
2362 2362 for rev, flparentlinkrevs, copied in filerevs(filelog, last):
2363 2363 if not follow:
2364 2364 if rev > maxrev:
2365 2365 continue
2366 2366 else:
2367 2367 # Note that last might not be the first interesting
2368 2368 # rev to us:
2369 2369 # if the file has been changed after maxrev, we'll
2370 2370 # have linkrev(last) > maxrev, and we still need
2371 2371 # to explore the file graph
2372 2372 if rev not in ancestors:
2373 2373 continue
2374 2374 # XXX insert 1327 fix here
2375 2375 if flparentlinkrevs:
2376 2376 ancestors.update(flparentlinkrevs)
2377 2377
2378 2378 fncache.setdefault(rev, []).append(file_)
2379 2379 wanted.add(rev)
2380 2380 if copied:
2381 2381 copies.append(copied)
2382 2382
2383 2383 return wanted
2384 2384
2385 2385
2386 2386 class _followfilter(object):
2387 2387 def __init__(self, repo, onlyfirst=False):
2388 2388 self.repo = repo
2389 2389 self.startrev = nullrev
2390 2390 self.roots = set()
2391 2391 self.onlyfirst = onlyfirst
2392 2392
2393 2393 def match(self, rev):
2394 2394 def realparents(rev):
2395 2395 if self.onlyfirst:
2396 2396 return self.repo.changelog.parentrevs(rev)[0:1]
2397 2397 else:
2398 2398 return filter(
2399 2399 lambda x: x != nullrev, self.repo.changelog.parentrevs(rev)
2400 2400 )
2401 2401
2402 2402 if self.startrev == nullrev:
2403 2403 self.startrev = rev
2404 2404 return True
2405 2405
2406 2406 if rev > self.startrev:
2407 2407 # forward: all descendants
2408 2408 if not self.roots:
2409 2409 self.roots.add(self.startrev)
2410 2410 for parent in realparents(rev):
2411 2411 if parent in self.roots:
2412 2412 self.roots.add(rev)
2413 2413 return True
2414 2414 else:
2415 2415 # backwards: all parents
2416 2416 if not self.roots:
2417 2417 self.roots.update(realparents(self.startrev))
2418 2418 if rev in self.roots:
2419 2419 self.roots.remove(rev)
2420 2420 self.roots.update(realparents(rev))
2421 2421 return True
2422 2422
2423 2423 return False
2424 2424
2425 2425
2426 2426 def walkchangerevs(repo, match, opts, prepare):
2427 2427 '''Iterate over files and the revs in which they changed.
2428 2428
2429 2429 Callers most commonly need to iterate backwards over the history
2430 2430 in which they are interested. Doing so has awful (quadratic-looking)
2431 2431 performance, so we use iterators in a "windowed" way.
2432 2432
2433 2433 We walk a window of revisions in the desired order. Within the
2434 2434 window, we first walk forwards to gather data, then in the desired
2435 2435 order (usually backwards) to display it.
2436 2436
2437 2437 This function returns an iterator yielding contexts. Before
2438 2438 yielding each context, the iterator will first call the prepare
2439 2439 function on each context in the window in forward order.'''
2440 2440
2441 2441 allfiles = opts.get(b'all_files')
2442 2442 follow = opts.get(b'follow') or opts.get(b'follow_first')
2443 2443 revs = _walkrevs(repo, opts)
2444 2444 if not revs:
2445 2445 return []
2446 2446 wanted = set()
2447 2447 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
2448 2448 fncache = {}
2449 2449 change = repo.__getitem__
2450 2450
2451 2451 # First step is to fill wanted, the set of revisions that we want to yield.
2452 2452 # When it does not induce extra cost, we also fill fncache for revisions in
2453 2453 # wanted: a cache of filenames that were changed (ctx.files()) and that
2454 2454 # match the file filtering conditions.
2455 2455
2456 2456 if match.always() or allfiles:
2457 2457 # No files, no patterns. Display all revs.
2458 2458 wanted = revs
2459 2459 elif not slowpath:
2460 2460 # We only have to read through the filelog to find wanted revisions
2461 2461
2462 2462 try:
2463 2463 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2464 2464 except FileWalkError:
2465 2465 slowpath = True
2466 2466
2467 2467 # We decided to fall back to the slowpath because at least one
2468 2468 # of the paths was not a file. Check to see if at least one of them
2469 2469 # existed in history, otherwise simply return
2470 2470 for path in match.files():
2471 2471 if path == b'.' or path in repo.store:
2472 2472 break
2473 2473 else:
2474 2474 return []
2475 2475
2476 2476 if slowpath:
2477 2477 # We have to read the changelog to match filenames against
2478 2478 # changed files
2479 2479
2480 2480 if follow:
2481 2481 raise error.Abort(
2482 2482 _(b'can only follow copies/renames for explicit filenames')
2483 2483 )
2484 2484
2485 2485 # The slow path checks files modified in every changeset.
2486 2486 # This is really slow on large repos, so compute the set lazily.
2487 2487 class lazywantedset(object):
2488 2488 def __init__(self):
2489 2489 self.set = set()
2490 2490 self.revs = set(revs)
2491 2491
2492 2492 # No need to worry about locality here because it will be accessed
2493 2493 # in the same order as the increasing window below.
2494 2494 def __contains__(self, value):
2495 2495 if value in self.set:
2496 2496 return True
2497 2497 elif not value in self.revs:
2498 2498 return False
2499 2499 else:
2500 2500 self.revs.discard(value)
2501 2501 ctx = change(value)
2502 2502 if allfiles:
2503 2503 matches = list(ctx.manifest().walk(match))
2504 2504 else:
2505 2505 matches = [f for f in ctx.files() if match(f)]
2506 2506 if matches:
2507 2507 fncache[value] = matches
2508 2508 self.set.add(value)
2509 2509 return True
2510 2510 return False
2511 2511
2512 2512 def discard(self, value):
2513 2513 self.revs.discard(value)
2514 2514 self.set.discard(value)
2515 2515
2516 2516 wanted = lazywantedset()
2517 2517
2518 2518 # it might be worthwhile to do this in the iterator if the rev range
2519 2519 # is descending and the prune args are all within that range
2520 2520 for rev in opts.get(b'prune', ()):
2521 2521 rev = repo[rev].rev()
2522 2522 ff = _followfilter(repo)
2523 2523 stop = min(revs[0], revs[-1])
2524 2524 for x in pycompat.xrange(rev, stop - 1, -1):
2525 2525 if ff.match(x):
2526 2526 wanted = wanted - [x]
2527 2527
2528 2528 # Now that wanted is correctly initialized, we can iterate over the
2529 2529 # revision range, yielding only revisions in wanted.
2530 2530 def iterate():
2531 2531 if follow and match.always():
2532 2532 ff = _followfilter(repo, onlyfirst=opts.get(b'follow_first'))
2533 2533
2534 2534 def want(rev):
2535 2535 return ff.match(rev) and rev in wanted
2536 2536
2537 2537 else:
2538 2538
2539 2539 def want(rev):
2540 2540 return rev in wanted
2541 2541
2542 2542 it = iter(revs)
2543 2543 stopiteration = False
2544 2544 for windowsize in increasingwindows():
2545 2545 nrevs = []
2546 2546 for i in pycompat.xrange(windowsize):
2547 2547 rev = next(it, None)
2548 2548 if rev is None:
2549 2549 stopiteration = True
2550 2550 break
2551 2551 elif want(rev):
2552 2552 nrevs.append(rev)
2553 2553 for rev in sorted(nrevs):
2554 2554 fns = fncache.get(rev)
2555 2555 ctx = change(rev)
2556 2556 if not fns:
2557 2557
2558 2558 def fns_generator():
2559 2559 if allfiles:
2560 2560
2561 2561 def bad(f, msg):
2562 2562 pass
2563 2563
2564 2564 for f in ctx.matches(matchmod.badmatch(match, bad)):
2565 2565 yield f
2566 2566 else:
2567 2567 for f in ctx.files():
2568 2568 if match(f):
2569 2569 yield f
2570 2570
2571 2571 fns = fns_generator()
2572 2572 prepare(ctx, fns)
2573 2573 for rev in nrevs:
2574 2574 yield change(rev)
2575 2575
2576 2576 if stopiteration:
2577 2577 break
2578 2578
2579 2579 return iterate()
2580 2580
2581 2581
2582 2582 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2583 2583 bad = []
2584 2584
2585 2585 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2586 2586 names = []
2587 2587 wctx = repo[None]
2588 2588 cca = None
2589 2589 abort, warn = scmutil.checkportabilityalert(ui)
2590 2590 if abort or warn:
2591 2591 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2592 2592
2593 2593 match = repo.narrowmatch(match, includeexact=True)
2594 2594 badmatch = matchmod.badmatch(match, badfn)
2595 2595 dirstate = repo.dirstate
2596 2596 # We don't want to just call wctx.walk here, since it would return a lot of
2597 2597 # clean files, which we aren't interested in and takes time.
2598 2598 for f in sorted(
2599 2599 dirstate.walk(
2600 2600 badmatch,
2601 2601 subrepos=sorted(wctx.substate),
2602 2602 unknown=True,
2603 2603 ignored=False,
2604 2604 full=False,
2605 2605 )
2606 2606 ):
2607 2607 exact = match.exact(f)
2608 2608 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2609 2609 if cca:
2610 2610 cca(f)
2611 2611 names.append(f)
2612 2612 if ui.verbose or not exact:
2613 2613 ui.status(
2614 2614 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2615 2615 )
2616 2616
2617 2617 for subpath in sorted(wctx.substate):
2618 2618 sub = wctx.sub(subpath)
2619 2619 try:
2620 2620 submatch = matchmod.subdirmatcher(subpath, match)
2621 2621 subprefix = repo.wvfs.reljoin(prefix, subpath)
2622 2622 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2623 2623 if opts.get('subrepos'):
2624 2624 bad.extend(
2625 2625 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2626 2626 )
2627 2627 else:
2628 2628 bad.extend(
2629 2629 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2630 2630 )
2631 2631 except error.LookupError:
2632 2632 ui.status(
2633 2633 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2634 2634 )
2635 2635
2636 2636 if not opts.get('dry_run'):
2637 2637 rejected = wctx.add(names, prefix)
2638 2638 bad.extend(f for f in rejected if f in match.files())
2639 2639 return bad
2640 2640
2641 2641
2642 2642 def addwebdirpath(repo, serverpath, webconf):
2643 2643 webconf[serverpath] = repo.root
2644 2644 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2645 2645
2646 2646 for r in repo.revs(b'filelog("path:.hgsub")'):
2647 2647 ctx = repo[r]
2648 2648 for subpath in ctx.substate:
2649 2649 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2650 2650
2651 2651
2652 2652 def forget(
2653 2653 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2654 2654 ):
2655 2655 if dryrun and interactive:
2656 2656 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2657 2657 bad = []
2658 2658 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2659 2659 wctx = repo[None]
2660 2660 forgot = []
2661 2661
2662 2662 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2663 2663 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2664 2664 if explicitonly:
2665 2665 forget = [f for f in forget if match.exact(f)]
2666 2666
2667 2667 for subpath in sorted(wctx.substate):
2668 2668 sub = wctx.sub(subpath)
2669 2669 submatch = matchmod.subdirmatcher(subpath, match)
2670 2670 subprefix = repo.wvfs.reljoin(prefix, subpath)
2671 2671 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2672 2672 try:
2673 2673 subbad, subforgot = sub.forget(
2674 2674 submatch,
2675 2675 subprefix,
2676 2676 subuipathfn,
2677 2677 dryrun=dryrun,
2678 2678 interactive=interactive,
2679 2679 )
2680 2680 bad.extend([subpath + b'/' + f for f in subbad])
2681 2681 forgot.extend([subpath + b'/' + f for f in subforgot])
2682 2682 except error.LookupError:
2683 2683 ui.status(
2684 2684 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2685 2685 )
2686 2686
2687 2687 if not explicitonly:
2688 2688 for f in match.files():
2689 2689 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2690 2690 if f not in forgot:
2691 2691 if repo.wvfs.exists(f):
2692 2692 # Don't complain if the exact case match wasn't given.
2693 2693 # But don't do this until after checking 'forgot', so
2694 2694 # that subrepo files aren't normalized, and this op is
2695 2695 # purely from data cached by the status walk above.
2696 2696 if repo.dirstate.normalize(f) in repo.dirstate:
2697 2697 continue
2698 2698 ui.warn(
2699 2699 _(
2700 2700 b'not removing %s: '
2701 2701 b'file is already untracked\n'
2702 2702 )
2703 2703 % uipathfn(f)
2704 2704 )
2705 2705 bad.append(f)
2706 2706
2707 2707 if interactive:
2708 2708 responses = _(
2709 2709 b'[Ynsa?]'
2710 2710 b'$$ &Yes, forget this file'
2711 2711 b'$$ &No, skip this file'
2712 2712 b'$$ &Skip remaining files'
2713 2713 b'$$ Include &all remaining files'
2714 2714 b'$$ &? (display help)'
2715 2715 )
2716 2716 for filename in forget[:]:
2717 2717 r = ui.promptchoice(
2718 2718 _(b'forget %s %s') % (uipathfn(filename), responses)
2719 2719 )
2720 2720 if r == 4: # ?
2721 2721 while r == 4:
2722 2722 for c, t in ui.extractchoices(responses)[1]:
2723 2723 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2724 2724 r = ui.promptchoice(
2725 2725 _(b'forget %s %s') % (uipathfn(filename), responses)
2726 2726 )
2727 2727 if r == 0: # yes
2728 2728 continue
2729 2729 elif r == 1: # no
2730 2730 forget.remove(filename)
2731 2731 elif r == 2: # Skip
2732 2732 fnindex = forget.index(filename)
2733 2733 del forget[fnindex:]
2734 2734 break
2735 2735 elif r == 3: # All
2736 2736 break
2737 2737
2738 2738 for f in forget:
2739 2739 if ui.verbose or not match.exact(f) or interactive:
2740 2740 ui.status(
2741 2741 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2742 2742 )
2743 2743
2744 2744 if not dryrun:
2745 2745 rejected = wctx.forget(forget, prefix)
2746 2746 bad.extend(f for f in rejected if f in match.files())
2747 2747 forgot.extend(f for f in forget if f not in rejected)
2748 2748 return bad, forgot
2749 2749
2750 2750
2751 2751 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2752 2752 ret = 1
2753 2753
2754 2754 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2755 for f in ctx.matches(m):
2756 fm.startitem()
2757 fm.context(ctx=ctx)
2758 if needsfctx:
2759 fc = ctx[f]
2760 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2761 fm.data(path=f)
2762 fm.plain(fmt % uipathfn(f))
2763 ret = 0
2755 if fm.isplain() and not needsfctx:
2756 # Fast path. The speed-up comes from skipping the formatter, and batching
2757 # calls to ui.write.
2758 buf = []
2759 for f in ctx.matches(m):
2760 buf.append(fmt % uipathfn(f))
2761 if len(buf) > 100:
2762 ui.write(b''.join(buf))
2763 del buf[:]
2764 ret = 0
2765 if buf:
2766 ui.write(b''.join(buf))
2767 else:
2768 for f in ctx.matches(m):
2769 fm.startitem()
2770 fm.context(ctx=ctx)
2771 if needsfctx:
2772 fc = ctx[f]
2773 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2774 fm.data(path=f)
2775 fm.plain(fmt % uipathfn(f))
2776 ret = 0
2764 2777
2765 2778 for subpath in sorted(ctx.substate):
2766 2779 submatch = matchmod.subdirmatcher(subpath, m)
2767 2780 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2768 2781 if subrepos or m.exact(subpath) or any(submatch.files()):
2769 2782 sub = ctx.sub(subpath)
2770 2783 try:
2771 2784 recurse = m.exact(subpath) or subrepos
2772 2785 if (
2773 2786 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2774 2787 == 0
2775 2788 ):
2776 2789 ret = 0
2777 2790 except error.LookupError:
2778 2791 ui.status(
2779 2792 _(b"skipping missing subrepository: %s\n")
2780 2793 % uipathfn(subpath)
2781 2794 )
2782 2795
2783 2796 return ret
2784 2797
2785 2798
2786 2799 def remove(
2787 2800 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2788 2801 ):
2789 2802 ret = 0
2790 2803 s = repo.status(match=m, clean=True)
2791 2804 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2792 2805
2793 2806 wctx = repo[None]
2794 2807
2795 2808 if warnings is None:
2796 2809 warnings = []
2797 2810 warn = True
2798 2811 else:
2799 2812 warn = False
2800 2813
2801 2814 subs = sorted(wctx.substate)
2802 2815 progress = ui.makeprogress(
2803 2816 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2804 2817 )
2805 2818 for subpath in subs:
2806 2819 submatch = matchmod.subdirmatcher(subpath, m)
2807 2820 subprefix = repo.wvfs.reljoin(prefix, subpath)
2808 2821 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2809 2822 if subrepos or m.exact(subpath) or any(submatch.files()):
2810 2823 progress.increment()
2811 2824 sub = wctx.sub(subpath)
2812 2825 try:
2813 2826 if sub.removefiles(
2814 2827 submatch,
2815 2828 subprefix,
2816 2829 subuipathfn,
2817 2830 after,
2818 2831 force,
2819 2832 subrepos,
2820 2833 dryrun,
2821 2834 warnings,
2822 2835 ):
2823 2836 ret = 1
2824 2837 except error.LookupError:
2825 2838 warnings.append(
2826 2839 _(b"skipping missing subrepository: %s\n")
2827 2840 % uipathfn(subpath)
2828 2841 )
2829 2842 progress.complete()
2830 2843
2831 2844 # warn about failure to delete explicit files/dirs
2832 2845 deleteddirs = pathutil.dirs(deleted)
2833 2846 files = m.files()
2834 2847 progress = ui.makeprogress(
2835 2848 _(b'deleting'), total=len(files), unit=_(b'files')
2836 2849 )
2837 2850 for f in files:
2838 2851
2839 2852 def insubrepo():
2840 2853 for subpath in wctx.substate:
2841 2854 if f.startswith(subpath + b'/'):
2842 2855 return True
2843 2856 return False
2844 2857
2845 2858 progress.increment()
2846 2859 isdir = f in deleteddirs or wctx.hasdir(f)
2847 2860 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2848 2861 continue
2849 2862
2850 2863 if repo.wvfs.exists(f):
2851 2864 if repo.wvfs.isdir(f):
2852 2865 warnings.append(
2853 2866 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2854 2867 )
2855 2868 else:
2856 2869 warnings.append(
2857 2870 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2858 2871 )
2859 2872 # missing files will generate a warning elsewhere
2860 2873 ret = 1
2861 2874 progress.complete()
2862 2875
2863 2876 if force:
2864 2877 list = modified + deleted + clean + added
2865 2878 elif after:
2866 2879 list = deleted
2867 2880 remaining = modified + added + clean
2868 2881 progress = ui.makeprogress(
2869 2882 _(b'skipping'), total=len(remaining), unit=_(b'files')
2870 2883 )
2871 2884 for f in remaining:
2872 2885 progress.increment()
2873 2886 if ui.verbose or (f in files):
2874 2887 warnings.append(
2875 2888 _(b'not removing %s: file still exists\n') % uipathfn(f)
2876 2889 )
2877 2890 ret = 1
2878 2891 progress.complete()
2879 2892 else:
2880 2893 list = deleted + clean
2881 2894 progress = ui.makeprogress(
2882 2895 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2883 2896 )
2884 2897 for f in modified:
2885 2898 progress.increment()
2886 2899 warnings.append(
2887 2900 _(
2888 2901 b'not removing %s: file is modified (use -f'
2889 2902 b' to force removal)\n'
2890 2903 )
2891 2904 % uipathfn(f)
2892 2905 )
2893 2906 ret = 1
2894 2907 for f in added:
2895 2908 progress.increment()
2896 2909 warnings.append(
2897 2910 _(
2898 2911 b"not removing %s: file has been marked for add"
2899 2912 b" (use 'hg forget' to undo add)\n"
2900 2913 )
2901 2914 % uipathfn(f)
2902 2915 )
2903 2916 ret = 1
2904 2917 progress.complete()
2905 2918
2906 2919 list = sorted(list)
2907 2920 progress = ui.makeprogress(
2908 2921 _(b'deleting'), total=len(list), unit=_(b'files')
2909 2922 )
2910 2923 for f in list:
2911 2924 if ui.verbose or not m.exact(f):
2912 2925 progress.increment()
2913 2926 ui.status(
2914 2927 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2915 2928 )
2916 2929 progress.complete()
2917 2930
2918 2931 if not dryrun:
2919 2932 with repo.wlock():
2920 2933 if not after:
2921 2934 for f in list:
2922 2935 if f in added:
2923 2936 continue # we never unlink added files on remove
2924 2937 rmdir = repo.ui.configbool(
2925 2938 b'experimental', b'removeemptydirs'
2926 2939 )
2927 2940 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2928 2941 repo[None].forget(list)
2929 2942
2930 2943 if warn:
2931 2944 for warning in warnings:
2932 2945 ui.warn(warning)
2933 2946
2934 2947 return ret
2935 2948
2936 2949
2937 2950 def _catfmtneedsdata(fm):
2938 2951 return not fm.datahint() or b'data' in fm.datahint()
2939 2952
2940 2953
2941 2954 def _updatecatformatter(fm, ctx, matcher, path, decode):
2942 2955 """Hook for adding data to the formatter used by ``hg cat``.
2943 2956
2944 2957 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2945 2958 this method first."""
2946 2959
2947 2960 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2948 2961 # wasn't requested.
2949 2962 data = b''
2950 2963 if _catfmtneedsdata(fm):
2951 2964 data = ctx[path].data()
2952 2965 if decode:
2953 2966 data = ctx.repo().wwritedata(path, data)
2954 2967 fm.startitem()
2955 2968 fm.context(ctx=ctx)
2956 2969 fm.write(b'data', b'%s', data)
2957 2970 fm.data(path=path)
2958 2971
2959 2972
2960 2973 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2961 2974 err = 1
2962 2975 opts = pycompat.byteskwargs(opts)
2963 2976
2964 2977 def write(path):
2965 2978 filename = None
2966 2979 if fntemplate:
2967 2980 filename = makefilename(
2968 2981 ctx, fntemplate, pathname=os.path.join(prefix, path)
2969 2982 )
2970 2983 # attempt to create the directory if it does not already exist
2971 2984 try:
2972 2985 os.makedirs(os.path.dirname(filename))
2973 2986 except OSError:
2974 2987 pass
2975 2988 with formatter.maybereopen(basefm, filename) as fm:
2976 2989 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2977 2990
2978 2991 # Automation often uses hg cat on single files, so special case it
2979 2992 # for performance to avoid the cost of parsing the manifest.
2980 2993 if len(matcher.files()) == 1 and not matcher.anypats():
2981 2994 file = matcher.files()[0]
2982 2995 mfl = repo.manifestlog
2983 2996 mfnode = ctx.manifestnode()
2984 2997 try:
2985 2998 if mfnode and mfl[mfnode].find(file)[0]:
2986 2999 if _catfmtneedsdata(basefm):
2987 3000 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2988 3001 write(file)
2989 3002 return 0
2990 3003 except KeyError:
2991 3004 pass
2992 3005
2993 3006 if _catfmtneedsdata(basefm):
2994 3007 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2995 3008
2996 3009 for abs in ctx.walk(matcher):
2997 3010 write(abs)
2998 3011 err = 0
2999 3012
3000 3013 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3001 3014 for subpath in sorted(ctx.substate):
3002 3015 sub = ctx.sub(subpath)
3003 3016 try:
3004 3017 submatch = matchmod.subdirmatcher(subpath, matcher)
3005 3018 subprefix = os.path.join(prefix, subpath)
3006 3019 if not sub.cat(
3007 3020 submatch,
3008 3021 basefm,
3009 3022 fntemplate,
3010 3023 subprefix,
3011 3024 **pycompat.strkwargs(opts)
3012 3025 ):
3013 3026 err = 0
3014 3027 except error.RepoLookupError:
3015 3028 ui.status(
3016 3029 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
3017 3030 )
3018 3031
3019 3032 return err
3020 3033
3021 3034
3022 3035 def commit(ui, repo, commitfunc, pats, opts):
3023 3036 '''commit the specified files or all outstanding changes'''
3024 3037 date = opts.get(b'date')
3025 3038 if date:
3026 3039 opts[b'date'] = dateutil.parsedate(date)
3027 3040 message = logmessage(ui, opts)
3028 3041 matcher = scmutil.match(repo[None], pats, opts)
3029 3042
3030 3043 dsguard = None
3031 3044 # extract addremove carefully -- this function can be called from a command
3032 3045 # that doesn't support addremove
3033 3046 if opts.get(b'addremove'):
3034 3047 dsguard = dirstateguard.dirstateguard(repo, b'commit')
3035 3048 with dsguard or util.nullcontextmanager():
3036 3049 if dsguard:
3037 3050 relative = scmutil.anypats(pats, opts)
3038 3051 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
3039 3052 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
3040 3053 raise error.Abort(
3041 3054 _(b"failed to mark all new/missing files as added/removed")
3042 3055 )
3043 3056
3044 3057 return commitfunc(ui, repo, message, matcher, opts)
3045 3058
3046 3059
3047 3060 def samefile(f, ctx1, ctx2):
3048 3061 if f in ctx1.manifest():
3049 3062 a = ctx1.filectx(f)
3050 3063 if f in ctx2.manifest():
3051 3064 b = ctx2.filectx(f)
3052 3065 return not a.cmp(b) and a.flags() == b.flags()
3053 3066 else:
3054 3067 return False
3055 3068 else:
3056 3069 return f not in ctx2.manifest()
3057 3070
3058 3071
3059 3072 def amend(ui, repo, old, extra, pats, opts):
3060 3073 # avoid cycle context -> subrepo -> cmdutil
3061 3074 from . import context
3062 3075
3063 3076 # amend will reuse the existing user if not specified, but the obsolete
3064 3077 # marker creation requires that the current user's name is specified.
3065 3078 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3066 3079 ui.username() # raise exception if username not set
3067 3080
3068 3081 ui.note(_(b'amending changeset %s\n') % old)
3069 3082 base = old.p1()
3070 3083
3071 3084 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
3072 3085 # Participating changesets:
3073 3086 #
3074 3087 # wctx o - workingctx that contains changes from working copy
3075 3088 # | to go into amending commit
3076 3089 # |
3077 3090 # old o - changeset to amend
3078 3091 # |
3079 3092 # base o - first parent of the changeset to amend
3080 3093 wctx = repo[None]
3081 3094
3082 3095 # Copy to avoid mutating input
3083 3096 extra = extra.copy()
3084 3097 # Update extra dict from amended commit (e.g. to preserve graft
3085 3098 # source)
3086 3099 extra.update(old.extra())
3087 3100
3088 3101 # Also update it from the from the wctx
3089 3102 extra.update(wctx.extra())
3090 3103
3091 3104 # date-only change should be ignored?
3092 3105 datemaydiffer = resolvecommitoptions(ui, opts)
3093 3106
3094 3107 date = old.date()
3095 3108 if opts.get(b'date'):
3096 3109 date = dateutil.parsedate(opts.get(b'date'))
3097 3110 user = opts.get(b'user') or old.user()
3098 3111
3099 3112 if len(old.parents()) > 1:
3100 3113 # ctx.files() isn't reliable for merges, so fall back to the
3101 3114 # slower repo.status() method
3102 3115 st = base.status(old)
3103 3116 files = set(st.modified) | set(st.added) | set(st.removed)
3104 3117 else:
3105 3118 files = set(old.files())
3106 3119
3107 3120 # add/remove the files to the working copy if the "addremove" option
3108 3121 # was specified.
3109 3122 matcher = scmutil.match(wctx, pats, opts)
3110 3123 relative = scmutil.anypats(pats, opts)
3111 3124 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
3112 3125 if opts.get(b'addremove') and scmutil.addremove(
3113 3126 repo, matcher, b"", uipathfn, opts
3114 3127 ):
3115 3128 raise error.Abort(
3116 3129 _(b"failed to mark all new/missing files as added/removed")
3117 3130 )
3118 3131
3119 3132 # Check subrepos. This depends on in-place wctx._status update in
3120 3133 # subrepo.precommit(). To minimize the risk of this hack, we do
3121 3134 # nothing if .hgsub does not exist.
3122 3135 if b'.hgsub' in wctx or b'.hgsub' in old:
3123 3136 subs, commitsubs, newsubstate = subrepoutil.precommit(
3124 3137 ui, wctx, wctx._status, matcher
3125 3138 )
3126 3139 # amend should abort if commitsubrepos is enabled
3127 3140 assert not commitsubs
3128 3141 if subs:
3129 3142 subrepoutil.writestate(repo, newsubstate)
3130 3143
3131 3144 ms = mergestatemod.mergestate.read(repo)
3132 3145 mergeutil.checkunresolved(ms)
3133 3146
3134 3147 filestoamend = {f for f in wctx.files() if matcher(f)}
3135 3148
3136 3149 changes = len(filestoamend) > 0
3137 3150 if changes:
3138 3151 # Recompute copies (avoid recording a -> b -> a)
3139 3152 copied = copies.pathcopies(base, wctx, matcher)
3140 3153 if old.p2:
3141 3154 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3142 3155
3143 3156 # Prune files which were reverted by the updates: if old
3144 3157 # introduced file X and the file was renamed in the working
3145 3158 # copy, then those two files are the same and
3146 3159 # we can discard X from our list of files. Likewise if X
3147 3160 # was removed, it's no longer relevant. If X is missing (aka
3148 3161 # deleted), old X must be preserved.
3149 3162 files.update(filestoamend)
3150 3163 files = [
3151 3164 f
3152 3165 for f in files
3153 3166 if (f not in filestoamend or not samefile(f, wctx, base))
3154 3167 ]
3155 3168
3156 3169 def filectxfn(repo, ctx_, path):
3157 3170 try:
3158 3171 # If the file being considered is not amongst the files
3159 3172 # to be amended, we should return the file context from the
3160 3173 # old changeset. This avoids issues when only some files in
3161 3174 # the working copy are being amended but there are also
3162 3175 # changes to other files from the old changeset.
3163 3176 if path not in filestoamend:
3164 3177 return old.filectx(path)
3165 3178
3166 3179 # Return None for removed files.
3167 3180 if path in wctx.removed():
3168 3181 return None
3169 3182
3170 3183 fctx = wctx[path]
3171 3184 flags = fctx.flags()
3172 3185 mctx = context.memfilectx(
3173 3186 repo,
3174 3187 ctx_,
3175 3188 fctx.path(),
3176 3189 fctx.data(),
3177 3190 islink=b'l' in flags,
3178 3191 isexec=b'x' in flags,
3179 3192 copysource=copied.get(path),
3180 3193 )
3181 3194 return mctx
3182 3195 except KeyError:
3183 3196 return None
3184 3197
3185 3198 else:
3186 3199 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
3187 3200
3188 3201 # Use version of files as in the old cset
3189 3202 def filectxfn(repo, ctx_, path):
3190 3203 try:
3191 3204 return old.filectx(path)
3192 3205 except KeyError:
3193 3206 return None
3194 3207
3195 3208 # See if we got a message from -m or -l, if not, open the editor with
3196 3209 # the message of the changeset to amend.
3197 3210 message = logmessage(ui, opts)
3198 3211
3199 3212 editform = mergeeditform(old, b'commit.amend')
3200 3213
3201 3214 if not message:
3202 3215 message = old.description()
3203 3216 # Default if message isn't provided and --edit is not passed is to
3204 3217 # invoke editor, but allow --no-edit. If somehow we don't have any
3205 3218 # description, let's always start the editor.
3206 3219 doedit = not message or opts.get(b'edit') in [True, None]
3207 3220 else:
3208 3221 # Default if message is provided is to not invoke editor, but allow
3209 3222 # --edit.
3210 3223 doedit = opts.get(b'edit') is True
3211 3224 editor = getcommiteditor(edit=doedit, editform=editform)
3212 3225
3213 3226 pureextra = extra.copy()
3214 3227 extra[b'amend_source'] = old.hex()
3215 3228
3216 3229 new = context.memctx(
3217 3230 repo,
3218 3231 parents=[base.node(), old.p2().node()],
3219 3232 text=message,
3220 3233 files=files,
3221 3234 filectxfn=filectxfn,
3222 3235 user=user,
3223 3236 date=date,
3224 3237 extra=extra,
3225 3238 editor=editor,
3226 3239 )
3227 3240
3228 3241 newdesc = changelog.stripdesc(new.description())
3229 3242 if (
3230 3243 (not changes)
3231 3244 and newdesc == old.description()
3232 3245 and user == old.user()
3233 3246 and (date == old.date() or datemaydiffer)
3234 3247 and pureextra == old.extra()
3235 3248 ):
3236 3249 # nothing changed. continuing here would create a new node
3237 3250 # anyway because of the amend_source noise.
3238 3251 #
3239 3252 # This not what we expect from amend.
3240 3253 return old.node()
3241 3254
3242 3255 commitphase = None
3243 3256 if opts.get(b'secret'):
3244 3257 commitphase = phases.secret
3245 3258 newid = repo.commitctx(new)
3246 3259
3247 3260 # Reroute the working copy parent to the new changeset
3248 3261 repo.setparents(newid, nullid)
3249 3262 mapping = {old.node(): (newid,)}
3250 3263 obsmetadata = None
3251 3264 if opts.get(b'note'):
3252 3265 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3253 3266 backup = ui.configbool(b'rewrite', b'backup-bundle')
3254 3267 scmutil.cleanupnodes(
3255 3268 repo,
3256 3269 mapping,
3257 3270 b'amend',
3258 3271 metadata=obsmetadata,
3259 3272 fixphase=True,
3260 3273 targetphase=commitphase,
3261 3274 backup=backup,
3262 3275 )
3263 3276
3264 3277 # Fixing the dirstate because localrepo.commitctx does not update
3265 3278 # it. This is rather convenient because we did not need to update
3266 3279 # the dirstate for all the files in the new commit which commitctx
3267 3280 # could have done if it updated the dirstate. Now, we can
3268 3281 # selectively update the dirstate only for the amended files.
3269 3282 dirstate = repo.dirstate
3270 3283
3271 3284 # Update the state of the files which were added and modified in the
3272 3285 # amend to "normal" in the dirstate. We need to use "normallookup" since
3273 3286 # the files may have changed since the command started; using "normal"
3274 3287 # would mark them as clean but with uncommitted contents.
3275 3288 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3276 3289 for f in normalfiles:
3277 3290 dirstate.normallookup(f)
3278 3291
3279 3292 # Update the state of files which were removed in the amend
3280 3293 # to "removed" in the dirstate.
3281 3294 removedfiles = set(wctx.removed()) & filestoamend
3282 3295 for f in removedfiles:
3283 3296 dirstate.drop(f)
3284 3297
3285 3298 return newid
3286 3299
3287 3300
3288 3301 def commiteditor(repo, ctx, subs, editform=b''):
3289 3302 if ctx.description():
3290 3303 return ctx.description()
3291 3304 return commitforceeditor(
3292 3305 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3293 3306 )
3294 3307
3295 3308
3296 3309 def commitforceeditor(
3297 3310 repo,
3298 3311 ctx,
3299 3312 subs,
3300 3313 finishdesc=None,
3301 3314 extramsg=None,
3302 3315 editform=b'',
3303 3316 unchangedmessagedetection=False,
3304 3317 ):
3305 3318 if not extramsg:
3306 3319 extramsg = _(b"Leave message empty to abort commit.")
3307 3320
3308 3321 forms = [e for e in editform.split(b'.') if e]
3309 3322 forms.insert(0, b'changeset')
3310 3323 templatetext = None
3311 3324 while forms:
3312 3325 ref = b'.'.join(forms)
3313 3326 if repo.ui.config(b'committemplate', ref):
3314 3327 templatetext = committext = buildcommittemplate(
3315 3328 repo, ctx, subs, extramsg, ref
3316 3329 )
3317 3330 break
3318 3331 forms.pop()
3319 3332 else:
3320 3333 committext = buildcommittext(repo, ctx, subs, extramsg)
3321 3334
3322 3335 # run editor in the repository root
3323 3336 olddir = encoding.getcwd()
3324 3337 os.chdir(repo.root)
3325 3338
3326 3339 # make in-memory changes visible to external process
3327 3340 tr = repo.currenttransaction()
3328 3341 repo.dirstate.write(tr)
3329 3342 pending = tr and tr.writepending() and repo.root
3330 3343
3331 3344 editortext = repo.ui.edit(
3332 3345 committext,
3333 3346 ctx.user(),
3334 3347 ctx.extra(),
3335 3348 editform=editform,
3336 3349 pending=pending,
3337 3350 repopath=repo.path,
3338 3351 action=b'commit',
3339 3352 )
3340 3353 text = editortext
3341 3354
3342 3355 # strip away anything below this special string (used for editors that want
3343 3356 # to display the diff)
3344 3357 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3345 3358 if stripbelow:
3346 3359 text = text[: stripbelow.start()]
3347 3360
3348 3361 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3349 3362 os.chdir(olddir)
3350 3363
3351 3364 if finishdesc:
3352 3365 text = finishdesc(text)
3353 3366 if not text.strip():
3354 3367 raise error.Abort(_(b"empty commit message"))
3355 3368 if unchangedmessagedetection and editortext == templatetext:
3356 3369 raise error.Abort(_(b"commit message unchanged"))
3357 3370
3358 3371 return text
3359 3372
3360 3373
3361 3374 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3362 3375 ui = repo.ui
3363 3376 spec = formatter.templatespec(ref, None, None)
3364 3377 t = logcmdutil.changesettemplater(ui, repo, spec)
3365 3378 t.t.cache.update(
3366 3379 (k, templater.unquotestring(v))
3367 3380 for k, v in repo.ui.configitems(b'committemplate')
3368 3381 )
3369 3382
3370 3383 if not extramsg:
3371 3384 extramsg = b'' # ensure that extramsg is string
3372 3385
3373 3386 ui.pushbuffer()
3374 3387 t.show(ctx, extramsg=extramsg)
3375 3388 return ui.popbuffer()
3376 3389
3377 3390
3378 3391 def hgprefix(msg):
3379 3392 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3380 3393
3381 3394
3382 3395 def buildcommittext(repo, ctx, subs, extramsg):
3383 3396 edittext = []
3384 3397 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3385 3398 if ctx.description():
3386 3399 edittext.append(ctx.description())
3387 3400 edittext.append(b"")
3388 3401 edittext.append(b"") # Empty line between message and comments.
3389 3402 edittext.append(
3390 3403 hgprefix(
3391 3404 _(
3392 3405 b"Enter commit message."
3393 3406 b" Lines beginning with 'HG:' are removed."
3394 3407 )
3395 3408 )
3396 3409 )
3397 3410 edittext.append(hgprefix(extramsg))
3398 3411 edittext.append(b"HG: --")
3399 3412 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3400 3413 if ctx.p2():
3401 3414 edittext.append(hgprefix(_(b"branch merge")))
3402 3415 if ctx.branch():
3403 3416 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3404 3417 if bookmarks.isactivewdirparent(repo):
3405 3418 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3406 3419 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3407 3420 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3408 3421 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3409 3422 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3410 3423 if not added and not modified and not removed:
3411 3424 edittext.append(hgprefix(_(b"no files changed")))
3412 3425 edittext.append(b"")
3413 3426
3414 3427 return b"\n".join(edittext)
3415 3428
3416 3429
3417 3430 def commitstatus(repo, node, branch, bheads=None, opts=None):
3418 3431 if opts is None:
3419 3432 opts = {}
3420 3433 ctx = repo[node]
3421 3434 parents = ctx.parents()
3422 3435
3423 3436 if (
3424 3437 not opts.get(b'amend')
3425 3438 and bheads
3426 3439 and node not in bheads
3427 3440 and not [
3428 3441 x for x in parents if x.node() in bheads and x.branch() == branch
3429 3442 ]
3430 3443 ):
3431 3444 repo.ui.status(_(b'created new head\n'))
3432 3445 # The message is not printed for initial roots. For the other
3433 3446 # changesets, it is printed in the following situations:
3434 3447 #
3435 3448 # Par column: for the 2 parents with ...
3436 3449 # N: null or no parent
3437 3450 # B: parent is on another named branch
3438 3451 # C: parent is a regular non head changeset
3439 3452 # H: parent was a branch head of the current branch
3440 3453 # Msg column: whether we print "created new head" message
3441 3454 # In the following, it is assumed that there already exists some
3442 3455 # initial branch heads of the current branch, otherwise nothing is
3443 3456 # printed anyway.
3444 3457 #
3445 3458 # Par Msg Comment
3446 3459 # N N y additional topo root
3447 3460 #
3448 3461 # B N y additional branch root
3449 3462 # C N y additional topo head
3450 3463 # H N n usual case
3451 3464 #
3452 3465 # B B y weird additional branch root
3453 3466 # C B y branch merge
3454 3467 # H B n merge with named branch
3455 3468 #
3456 3469 # C C y additional head from merge
3457 3470 # C H n merge with a head
3458 3471 #
3459 3472 # H H n head merge: head count decreases
3460 3473
3461 3474 if not opts.get(b'close_branch'):
3462 3475 for r in parents:
3463 3476 if r.closesbranch() and r.branch() == branch:
3464 3477 repo.ui.status(
3465 3478 _(b'reopening closed branch head %d\n') % r.rev()
3466 3479 )
3467 3480
3468 3481 if repo.ui.debugflag:
3469 3482 repo.ui.write(
3470 3483 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3471 3484 )
3472 3485 elif repo.ui.verbose:
3473 3486 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3474 3487
3475 3488
3476 3489 def postcommitstatus(repo, pats, opts):
3477 3490 return repo.status(match=scmutil.match(repo[None], pats, opts))
3478 3491
3479 3492
3480 3493 def revert(ui, repo, ctx, parents, *pats, **opts):
3481 3494 opts = pycompat.byteskwargs(opts)
3482 3495 parent, p2 = parents
3483 3496 node = ctx.node()
3484 3497
3485 3498 mf = ctx.manifest()
3486 3499 if node == p2:
3487 3500 parent = p2
3488 3501
3489 3502 # need all matching names in dirstate and manifest of target rev,
3490 3503 # so have to walk both. do not print errors if files exist in one
3491 3504 # but not other. in both cases, filesets should be evaluated against
3492 3505 # workingctx to get consistent result (issue4497). this means 'set:**'
3493 3506 # cannot be used to select missing files from target rev.
3494 3507
3495 3508 # `names` is a mapping for all elements in working copy and target revision
3496 3509 # The mapping is in the form:
3497 3510 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3498 3511 names = {}
3499 3512 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3500 3513
3501 3514 with repo.wlock():
3502 3515 ## filling of the `names` mapping
3503 3516 # walk dirstate to fill `names`
3504 3517
3505 3518 interactive = opts.get(b'interactive', False)
3506 3519 wctx = repo[None]
3507 3520 m = scmutil.match(wctx, pats, opts)
3508 3521
3509 3522 # we'll need this later
3510 3523 targetsubs = sorted(s for s in wctx.substate if m(s))
3511 3524
3512 3525 if not m.always():
3513 3526 matcher = matchmod.badmatch(m, lambda x, y: False)
3514 3527 for abs in wctx.walk(matcher):
3515 3528 names[abs] = m.exact(abs)
3516 3529
3517 3530 # walk target manifest to fill `names`
3518 3531
3519 3532 def badfn(path, msg):
3520 3533 if path in names:
3521 3534 return
3522 3535 if path in ctx.substate:
3523 3536 return
3524 3537 path_ = path + b'/'
3525 3538 for f in names:
3526 3539 if f.startswith(path_):
3527 3540 return
3528 3541 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3529 3542
3530 3543 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3531 3544 if abs not in names:
3532 3545 names[abs] = m.exact(abs)
3533 3546
3534 3547 # Find status of all file in `names`.
3535 3548 m = scmutil.matchfiles(repo, names)
3536 3549
3537 3550 changes = repo.status(
3538 3551 node1=node, match=m, unknown=True, ignored=True, clean=True
3539 3552 )
3540 3553 else:
3541 3554 changes = repo.status(node1=node, match=m)
3542 3555 for kind in changes:
3543 3556 for abs in kind:
3544 3557 names[abs] = m.exact(abs)
3545 3558
3546 3559 m = scmutil.matchfiles(repo, names)
3547 3560
3548 3561 modified = set(changes.modified)
3549 3562 added = set(changes.added)
3550 3563 removed = set(changes.removed)
3551 3564 _deleted = set(changes.deleted)
3552 3565 unknown = set(changes.unknown)
3553 3566 unknown.update(changes.ignored)
3554 3567 clean = set(changes.clean)
3555 3568 modadded = set()
3556 3569
3557 3570 # We need to account for the state of the file in the dirstate,
3558 3571 # even when we revert against something else than parent. This will
3559 3572 # slightly alter the behavior of revert (doing back up or not, delete
3560 3573 # or just forget etc).
3561 3574 if parent == node:
3562 3575 dsmodified = modified
3563 3576 dsadded = added
3564 3577 dsremoved = removed
3565 3578 # store all local modifications, useful later for rename detection
3566 3579 localchanges = dsmodified | dsadded
3567 3580 modified, added, removed = set(), set(), set()
3568 3581 else:
3569 3582 changes = repo.status(node1=parent, match=m)
3570 3583 dsmodified = set(changes.modified)
3571 3584 dsadded = set(changes.added)
3572 3585 dsremoved = set(changes.removed)
3573 3586 # store all local modifications, useful later for rename detection
3574 3587 localchanges = dsmodified | dsadded
3575 3588
3576 3589 # only take into account for removes between wc and target
3577 3590 clean |= dsremoved - removed
3578 3591 dsremoved &= removed
3579 3592 # distinct between dirstate remove and other
3580 3593 removed -= dsremoved
3581 3594
3582 3595 modadded = added & dsmodified
3583 3596 added -= modadded
3584 3597
3585 3598 # tell newly modified apart.
3586 3599 dsmodified &= modified
3587 3600 dsmodified |= modified & dsadded # dirstate added may need backup
3588 3601 modified -= dsmodified
3589 3602
3590 3603 # We need to wait for some post-processing to update this set
3591 3604 # before making the distinction. The dirstate will be used for
3592 3605 # that purpose.
3593 3606 dsadded = added
3594 3607
3595 3608 # in case of merge, files that are actually added can be reported as
3596 3609 # modified, we need to post process the result
3597 3610 if p2 != nullid:
3598 3611 mergeadd = set(dsmodified)
3599 3612 for path in dsmodified:
3600 3613 if path in mf:
3601 3614 mergeadd.remove(path)
3602 3615 dsadded |= mergeadd
3603 3616 dsmodified -= mergeadd
3604 3617
3605 3618 # if f is a rename, update `names` to also revert the source
3606 3619 for f in localchanges:
3607 3620 src = repo.dirstate.copied(f)
3608 3621 # XXX should we check for rename down to target node?
3609 3622 if src and src not in names and repo.dirstate[src] == b'r':
3610 3623 dsremoved.add(src)
3611 3624 names[src] = True
3612 3625
3613 3626 # determine the exact nature of the deleted changesets
3614 3627 deladded = set(_deleted)
3615 3628 for path in _deleted:
3616 3629 if path in mf:
3617 3630 deladded.remove(path)
3618 3631 deleted = _deleted - deladded
3619 3632
3620 3633 # distinguish between file to forget and the other
3621 3634 added = set()
3622 3635 for abs in dsadded:
3623 3636 if repo.dirstate[abs] != b'a':
3624 3637 added.add(abs)
3625 3638 dsadded -= added
3626 3639
3627 3640 for abs in deladded:
3628 3641 if repo.dirstate[abs] == b'a':
3629 3642 dsadded.add(abs)
3630 3643 deladded -= dsadded
3631 3644
3632 3645 # For files marked as removed, we check if an unknown file is present at
3633 3646 # the same path. If a such file exists it may need to be backed up.
3634 3647 # Making the distinction at this stage helps have simpler backup
3635 3648 # logic.
3636 3649 removunk = set()
3637 3650 for abs in removed:
3638 3651 target = repo.wjoin(abs)
3639 3652 if os.path.lexists(target):
3640 3653 removunk.add(abs)
3641 3654 removed -= removunk
3642 3655
3643 3656 dsremovunk = set()
3644 3657 for abs in dsremoved:
3645 3658 target = repo.wjoin(abs)
3646 3659 if os.path.lexists(target):
3647 3660 dsremovunk.add(abs)
3648 3661 dsremoved -= dsremovunk
3649 3662
3650 3663 # action to be actually performed by revert
3651 3664 # (<list of file>, message>) tuple
3652 3665 actions = {
3653 3666 b'revert': ([], _(b'reverting %s\n')),
3654 3667 b'add': ([], _(b'adding %s\n')),
3655 3668 b'remove': ([], _(b'removing %s\n')),
3656 3669 b'drop': ([], _(b'removing %s\n')),
3657 3670 b'forget': ([], _(b'forgetting %s\n')),
3658 3671 b'undelete': ([], _(b'undeleting %s\n')),
3659 3672 b'noop': (None, _(b'no changes needed to %s\n')),
3660 3673 b'unknown': (None, _(b'file not managed: %s\n')),
3661 3674 }
3662 3675
3663 3676 # "constant" that convey the backup strategy.
3664 3677 # All set to `discard` if `no-backup` is set do avoid checking
3665 3678 # no_backup lower in the code.
3666 3679 # These values are ordered for comparison purposes
3667 3680 backupinteractive = 3 # do backup if interactively modified
3668 3681 backup = 2 # unconditionally do backup
3669 3682 check = 1 # check if the existing file differs from target
3670 3683 discard = 0 # never do backup
3671 3684 if opts.get(b'no_backup'):
3672 3685 backupinteractive = backup = check = discard
3673 3686 if interactive:
3674 3687 dsmodifiedbackup = backupinteractive
3675 3688 else:
3676 3689 dsmodifiedbackup = backup
3677 3690 tobackup = set()
3678 3691
3679 3692 backupanddel = actions[b'remove']
3680 3693 if not opts.get(b'no_backup'):
3681 3694 backupanddel = actions[b'drop']
3682 3695
3683 3696 disptable = (
3684 3697 # dispatch table:
3685 3698 # file state
3686 3699 # action
3687 3700 # make backup
3688 3701 ## Sets that results that will change file on disk
3689 3702 # Modified compared to target, no local change
3690 3703 (modified, actions[b'revert'], discard),
3691 3704 # Modified compared to target, but local file is deleted
3692 3705 (deleted, actions[b'revert'], discard),
3693 3706 # Modified compared to target, local change
3694 3707 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3695 3708 # Added since target
3696 3709 (added, actions[b'remove'], discard),
3697 3710 # Added in working directory
3698 3711 (dsadded, actions[b'forget'], discard),
3699 3712 # Added since target, have local modification
3700 3713 (modadded, backupanddel, backup),
3701 3714 # Added since target but file is missing in working directory
3702 3715 (deladded, actions[b'drop'], discard),
3703 3716 # Removed since target, before working copy parent
3704 3717 (removed, actions[b'add'], discard),
3705 3718 # Same as `removed` but an unknown file exists at the same path
3706 3719 (removunk, actions[b'add'], check),
3707 3720 # Removed since targe, marked as such in working copy parent
3708 3721 (dsremoved, actions[b'undelete'], discard),
3709 3722 # Same as `dsremoved` but an unknown file exists at the same path
3710 3723 (dsremovunk, actions[b'undelete'], check),
3711 3724 ## the following sets does not result in any file changes
3712 3725 # File with no modification
3713 3726 (clean, actions[b'noop'], discard),
3714 3727 # Existing file, not tracked anywhere
3715 3728 (unknown, actions[b'unknown'], discard),
3716 3729 )
3717 3730
3718 3731 for abs, exact in sorted(names.items()):
3719 3732 # target file to be touch on disk (relative to cwd)
3720 3733 target = repo.wjoin(abs)
3721 3734 # search the entry in the dispatch table.
3722 3735 # if the file is in any of these sets, it was touched in the working
3723 3736 # directory parent and we are sure it needs to be reverted.
3724 3737 for table, (xlist, msg), dobackup in disptable:
3725 3738 if abs not in table:
3726 3739 continue
3727 3740 if xlist is not None:
3728 3741 xlist.append(abs)
3729 3742 if dobackup:
3730 3743 # If in interactive mode, don't automatically create
3731 3744 # .orig files (issue4793)
3732 3745 if dobackup == backupinteractive:
3733 3746 tobackup.add(abs)
3734 3747 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3735 3748 absbakname = scmutil.backuppath(ui, repo, abs)
3736 3749 bakname = os.path.relpath(
3737 3750 absbakname, start=repo.root
3738 3751 )
3739 3752 ui.note(
3740 3753 _(b'saving current version of %s as %s\n')
3741 3754 % (uipathfn(abs), uipathfn(bakname))
3742 3755 )
3743 3756 if not opts.get(b'dry_run'):
3744 3757 if interactive:
3745 3758 util.copyfile(target, absbakname)
3746 3759 else:
3747 3760 util.rename(target, absbakname)
3748 3761 if opts.get(b'dry_run'):
3749 3762 if ui.verbose or not exact:
3750 3763 ui.status(msg % uipathfn(abs))
3751 3764 elif exact:
3752 3765 ui.warn(msg % uipathfn(abs))
3753 3766 break
3754 3767
3755 3768 if not opts.get(b'dry_run'):
3756 3769 needdata = (b'revert', b'add', b'undelete')
3757 3770 oplist = [actions[name][0] for name in needdata]
3758 3771 prefetch = scmutil.prefetchfiles
3759 3772 matchfiles = scmutil.matchfiles
3760 3773 prefetch(
3761 3774 repo,
3762 3775 [ctx.rev()],
3763 3776 matchfiles(repo, [f for sublist in oplist for f in sublist]),
3764 3777 )
3765 3778 match = scmutil.match(repo[None], pats)
3766 3779 _performrevert(
3767 3780 repo,
3768 3781 parents,
3769 3782 ctx,
3770 3783 names,
3771 3784 uipathfn,
3772 3785 actions,
3773 3786 match,
3774 3787 interactive,
3775 3788 tobackup,
3776 3789 )
3777 3790
3778 3791 if targetsubs:
3779 3792 # Revert the subrepos on the revert list
3780 3793 for sub in targetsubs:
3781 3794 try:
3782 3795 wctx.sub(sub).revert(
3783 3796 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3784 3797 )
3785 3798 except KeyError:
3786 3799 raise error.Abort(
3787 3800 b"subrepository '%s' does not exist in %s!"
3788 3801 % (sub, short(ctx.node()))
3789 3802 )
3790 3803
3791 3804
3792 3805 def _performrevert(
3793 3806 repo,
3794 3807 parents,
3795 3808 ctx,
3796 3809 names,
3797 3810 uipathfn,
3798 3811 actions,
3799 3812 match,
3800 3813 interactive=False,
3801 3814 tobackup=None,
3802 3815 ):
3803 3816 """function that actually perform all the actions computed for revert
3804 3817
3805 3818 This is an independent function to let extension to plug in and react to
3806 3819 the imminent revert.
3807 3820
3808 3821 Make sure you have the working directory locked when calling this function.
3809 3822 """
3810 3823 parent, p2 = parents
3811 3824 node = ctx.node()
3812 3825 excluded_files = []
3813 3826
3814 3827 def checkout(f):
3815 3828 fc = ctx[f]
3816 3829 repo.wwrite(f, fc.data(), fc.flags())
3817 3830
3818 3831 def doremove(f):
3819 3832 try:
3820 3833 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3821 3834 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3822 3835 except OSError:
3823 3836 pass
3824 3837 repo.dirstate.remove(f)
3825 3838
3826 3839 def prntstatusmsg(action, f):
3827 3840 exact = names[f]
3828 3841 if repo.ui.verbose or not exact:
3829 3842 repo.ui.status(actions[action][1] % uipathfn(f))
3830 3843
3831 3844 audit_path = pathutil.pathauditor(repo.root, cached=True)
3832 3845 for f in actions[b'forget'][0]:
3833 3846 if interactive:
3834 3847 choice = repo.ui.promptchoice(
3835 3848 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3836 3849 )
3837 3850 if choice == 0:
3838 3851 prntstatusmsg(b'forget', f)
3839 3852 repo.dirstate.drop(f)
3840 3853 else:
3841 3854 excluded_files.append(f)
3842 3855 else:
3843 3856 prntstatusmsg(b'forget', f)
3844 3857 repo.dirstate.drop(f)
3845 3858 for f in actions[b'remove'][0]:
3846 3859 audit_path(f)
3847 3860 if interactive:
3848 3861 choice = repo.ui.promptchoice(
3849 3862 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3850 3863 )
3851 3864 if choice == 0:
3852 3865 prntstatusmsg(b'remove', f)
3853 3866 doremove(f)
3854 3867 else:
3855 3868 excluded_files.append(f)
3856 3869 else:
3857 3870 prntstatusmsg(b'remove', f)
3858 3871 doremove(f)
3859 3872 for f in actions[b'drop'][0]:
3860 3873 audit_path(f)
3861 3874 prntstatusmsg(b'drop', f)
3862 3875 repo.dirstate.remove(f)
3863 3876
3864 3877 normal = None
3865 3878 if node == parent:
3866 3879 # We're reverting to our parent. If possible, we'd like status
3867 3880 # to report the file as clean. We have to use normallookup for
3868 3881 # merges to avoid losing information about merged/dirty files.
3869 3882 if p2 != nullid:
3870 3883 normal = repo.dirstate.normallookup
3871 3884 else:
3872 3885 normal = repo.dirstate.normal
3873 3886
3874 3887 newlyaddedandmodifiedfiles = set()
3875 3888 if interactive:
3876 3889 # Prompt the user for changes to revert
3877 3890 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3878 3891 m = scmutil.matchfiles(repo, torevert)
3879 3892 diffopts = patch.difffeatureopts(
3880 3893 repo.ui,
3881 3894 whitespace=True,
3882 3895 section=b'commands',
3883 3896 configprefix=b'revert.interactive.',
3884 3897 )
3885 3898 diffopts.nodates = True
3886 3899 diffopts.git = True
3887 3900 operation = b'apply'
3888 3901 if node == parent:
3889 3902 if repo.ui.configbool(
3890 3903 b'experimental', b'revert.interactive.select-to-keep'
3891 3904 ):
3892 3905 operation = b'keep'
3893 3906 else:
3894 3907 operation = b'discard'
3895 3908
3896 3909 if operation == b'apply':
3897 3910 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3898 3911 else:
3899 3912 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3900 3913 originalchunks = patch.parsepatch(diff)
3901 3914
3902 3915 try:
3903 3916
3904 3917 chunks, opts = recordfilter(
3905 3918 repo.ui, originalchunks, match, operation=operation
3906 3919 )
3907 3920 if operation == b'discard':
3908 3921 chunks = patch.reversehunks(chunks)
3909 3922
3910 3923 except error.PatchError as err:
3911 3924 raise error.Abort(_(b'error parsing patch: %s') % err)
3912 3925
3913 3926 # FIXME: when doing an interactive revert of a copy, there's no way of
3914 3927 # performing a partial revert of the added file, the only option is
3915 3928 # "remove added file <name> (Yn)?", so we don't need to worry about the
3916 3929 # alsorestore value. Ideally we'd be able to partially revert
3917 3930 # copied/renamed files.
3918 3931 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3919 3932 chunks, originalchunks
3920 3933 )
3921 3934 if tobackup is None:
3922 3935 tobackup = set()
3923 3936 # Apply changes
3924 3937 fp = stringio()
3925 3938 # chunks are serialized per file, but files aren't sorted
3926 3939 for f in sorted({c.header.filename() for c in chunks if ishunk(c)}):
3927 3940 prntstatusmsg(b'revert', f)
3928 3941 files = set()
3929 3942 for c in chunks:
3930 3943 if ishunk(c):
3931 3944 abs = c.header.filename()
3932 3945 # Create a backup file only if this hunk should be backed up
3933 3946 if c.header.filename() in tobackup:
3934 3947 target = repo.wjoin(abs)
3935 3948 bakname = scmutil.backuppath(repo.ui, repo, abs)
3936 3949 util.copyfile(target, bakname)
3937 3950 tobackup.remove(abs)
3938 3951 if abs not in files:
3939 3952 files.add(abs)
3940 3953 if operation == b'keep':
3941 3954 checkout(abs)
3942 3955 c.write(fp)
3943 3956 dopatch = fp.tell()
3944 3957 fp.seek(0)
3945 3958 if dopatch:
3946 3959 try:
3947 3960 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3948 3961 except error.PatchError as err:
3949 3962 raise error.Abort(pycompat.bytestr(err))
3950 3963 del fp
3951 3964 else:
3952 3965 for f in actions[b'revert'][0]:
3953 3966 prntstatusmsg(b'revert', f)
3954 3967 checkout(f)
3955 3968 if normal:
3956 3969 normal(f)
3957 3970
3958 3971 for f in actions[b'add'][0]:
3959 3972 # Don't checkout modified files, they are already created by the diff
3960 3973 if f not in newlyaddedandmodifiedfiles:
3961 3974 prntstatusmsg(b'add', f)
3962 3975 checkout(f)
3963 3976 repo.dirstate.add(f)
3964 3977
3965 3978 normal = repo.dirstate.normallookup
3966 3979 if node == parent and p2 == nullid:
3967 3980 normal = repo.dirstate.normal
3968 3981 for f in actions[b'undelete'][0]:
3969 3982 if interactive:
3970 3983 choice = repo.ui.promptchoice(
3971 3984 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3972 3985 )
3973 3986 if choice == 0:
3974 3987 prntstatusmsg(b'undelete', f)
3975 3988 checkout(f)
3976 3989 normal(f)
3977 3990 else:
3978 3991 excluded_files.append(f)
3979 3992 else:
3980 3993 prntstatusmsg(b'undelete', f)
3981 3994 checkout(f)
3982 3995 normal(f)
3983 3996
3984 3997 copied = copies.pathcopies(repo[parent], ctx)
3985 3998
3986 3999 for f in (
3987 4000 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
3988 4001 ):
3989 4002 if f in copied:
3990 4003 repo.dirstate.copy(copied[f], f)
3991 4004
3992 4005
3993 4006 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3994 4007 # commands.outgoing. "missing" is "missing" of the result of
3995 4008 # "findcommonoutgoing()"
3996 4009 outgoinghooks = util.hooks()
3997 4010
3998 4011 # a list of (ui, repo) functions called by commands.summary
3999 4012 summaryhooks = util.hooks()
4000 4013
4001 4014 # a list of (ui, repo, opts, changes) functions called by commands.summary.
4002 4015 #
4003 4016 # functions should return tuple of booleans below, if 'changes' is None:
4004 4017 # (whether-incomings-are-needed, whether-outgoings-are-needed)
4005 4018 #
4006 4019 # otherwise, 'changes' is a tuple of tuples below:
4007 4020 # - (sourceurl, sourcebranch, sourcepeer, incoming)
4008 4021 # - (desturl, destbranch, destpeer, outgoing)
4009 4022 summaryremotehooks = util.hooks()
4010 4023
4011 4024
4012 4025 def checkunfinished(repo, commit=False, skipmerge=False):
4013 4026 '''Look for an unfinished multistep operation, like graft, and abort
4014 4027 if found. It's probably good to check this right before
4015 4028 bailifchanged().
4016 4029 '''
4017 4030 # Check for non-clearable states first, so things like rebase will take
4018 4031 # precedence over update.
4019 4032 for state in statemod._unfinishedstates:
4020 4033 if (
4021 4034 state._clearable
4022 4035 or (commit and state._allowcommit)
4023 4036 or state._reportonly
4024 4037 ):
4025 4038 continue
4026 4039 if state.isunfinished(repo):
4027 4040 raise error.Abort(state.msg(), hint=state.hint())
4028 4041
4029 4042 for s in statemod._unfinishedstates:
4030 4043 if (
4031 4044 not s._clearable
4032 4045 or (commit and s._allowcommit)
4033 4046 or (s._opname == b'merge' and skipmerge)
4034 4047 or s._reportonly
4035 4048 ):
4036 4049 continue
4037 4050 if s.isunfinished(repo):
4038 4051 raise error.Abort(s.msg(), hint=s.hint())
4039 4052
4040 4053
4041 4054 def clearunfinished(repo):
4042 4055 '''Check for unfinished operations (as above), and clear the ones
4043 4056 that are clearable.
4044 4057 '''
4045 4058 for state in statemod._unfinishedstates:
4046 4059 if state._reportonly:
4047 4060 continue
4048 4061 if not state._clearable and state.isunfinished(repo):
4049 4062 raise error.Abort(state.msg(), hint=state.hint())
4050 4063
4051 4064 for s in statemod._unfinishedstates:
4052 4065 if s._opname == b'merge' or state._reportonly:
4053 4066 continue
4054 4067 if s._clearable and s.isunfinished(repo):
4055 4068 util.unlink(repo.vfs.join(s._fname))
4056 4069
4057 4070
4058 4071 def getunfinishedstate(repo):
4059 4072 ''' Checks for unfinished operations and returns statecheck object
4060 4073 for it'''
4061 4074 for state in statemod._unfinishedstates:
4062 4075 if state.isunfinished(repo):
4063 4076 return state
4064 4077 return None
4065 4078
4066 4079
4067 4080 def howtocontinue(repo):
4068 4081 '''Check for an unfinished operation and return the command to finish
4069 4082 it.
4070 4083
4071 4084 statemod._unfinishedstates list is checked for an unfinished operation
4072 4085 and the corresponding message to finish it is generated if a method to
4073 4086 continue is supported by the operation.
4074 4087
4075 4088 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
4076 4089 a boolean.
4077 4090 '''
4078 4091 contmsg = _(b"continue: %s")
4079 4092 for state in statemod._unfinishedstates:
4080 4093 if not state._continueflag:
4081 4094 continue
4082 4095 if state.isunfinished(repo):
4083 4096 return contmsg % state.continuemsg(), True
4084 4097 if repo[None].dirty(missing=True, merge=False, branch=False):
4085 4098 return contmsg % _(b"hg commit"), False
4086 4099 return None, None
4087 4100
4088 4101
4089 4102 def checkafterresolved(repo):
4090 4103 '''Inform the user about the next action after completing hg resolve
4091 4104
4092 4105 If there's a an unfinished operation that supports continue flag,
4093 4106 howtocontinue will yield repo.ui.warn as the reporter.
4094 4107
4095 4108 Otherwise, it will yield repo.ui.note.
4096 4109 '''
4097 4110 msg, warning = howtocontinue(repo)
4098 4111 if msg is not None:
4099 4112 if warning:
4100 4113 repo.ui.warn(b"%s\n" % msg)
4101 4114 else:
4102 4115 repo.ui.note(b"%s\n" % msg)
4103 4116
4104 4117
4105 4118 def wrongtooltocontinue(repo, task):
4106 4119 '''Raise an abort suggesting how to properly continue if there is an
4107 4120 active task.
4108 4121
4109 4122 Uses howtocontinue() to find the active task.
4110 4123
4111 4124 If there's no task (repo.ui.note for 'hg commit'), it does not offer
4112 4125 a hint.
4113 4126 '''
4114 4127 after = howtocontinue(repo)
4115 4128 hint = None
4116 4129 if after[1]:
4117 4130 hint = after[0]
4118 4131 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
4119 4132
4120 4133
4121 4134 def abortgraft(ui, repo, graftstate):
4122 4135 """abort the interrupted graft and rollbacks to the state before interrupted
4123 4136 graft"""
4124 4137 if not graftstate.exists():
4125 4138 raise error.Abort(_(b"no interrupted graft to abort"))
4126 4139 statedata = readgraftstate(repo, graftstate)
4127 4140 newnodes = statedata.get(b'newnodes')
4128 4141 if newnodes is None:
4129 4142 # and old graft state which does not have all the data required to abort
4130 4143 # the graft
4131 4144 raise error.Abort(_(b"cannot abort using an old graftstate"))
4132 4145
4133 4146 # changeset from which graft operation was started
4134 4147 if len(newnodes) > 0:
4135 4148 startctx = repo[newnodes[0]].p1()
4136 4149 else:
4137 4150 startctx = repo[b'.']
4138 4151 # whether to strip or not
4139 4152 cleanup = False
4140 4153 from . import hg
4141 4154
4142 4155 if newnodes:
4143 4156 newnodes = [repo[r].rev() for r in newnodes]
4144 4157 cleanup = True
4145 4158 # checking that none of the newnodes turned public or is public
4146 4159 immutable = [c for c in newnodes if not repo[c].mutable()]
4147 4160 if immutable:
4148 4161 repo.ui.warn(
4149 4162 _(b"cannot clean up public changesets %s\n")
4150 4163 % b', '.join(bytes(repo[r]) for r in immutable),
4151 4164 hint=_(b"see 'hg help phases' for details"),
4152 4165 )
4153 4166 cleanup = False
4154 4167
4155 4168 # checking that no new nodes are created on top of grafted revs
4156 4169 desc = set(repo.changelog.descendants(newnodes))
4157 4170 if desc - set(newnodes):
4158 4171 repo.ui.warn(
4159 4172 _(
4160 4173 b"new changesets detected on destination "
4161 4174 b"branch, can't strip\n"
4162 4175 )
4163 4176 )
4164 4177 cleanup = False
4165 4178
4166 4179 if cleanup:
4167 4180 with repo.wlock(), repo.lock():
4168 4181 hg.updaterepo(repo, startctx.node(), overwrite=True)
4169 4182 # stripping the new nodes created
4170 4183 strippoints = [
4171 4184 c.node() for c in repo.set(b"roots(%ld)", newnodes)
4172 4185 ]
4173 4186 repair.strip(repo.ui, repo, strippoints, backup=False)
4174 4187
4175 4188 if not cleanup:
4176 4189 # we don't update to the startnode if we can't strip
4177 4190 startctx = repo[b'.']
4178 4191 hg.updaterepo(repo, startctx.node(), overwrite=True)
4179 4192
4180 4193 ui.status(_(b"graft aborted\n"))
4181 4194 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
4182 4195 graftstate.delete()
4183 4196 return 0
4184 4197
4185 4198
4186 4199 def readgraftstate(repo, graftstate):
4187 4200 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
4188 4201 """read the graft state file and return a dict of the data stored in it"""
4189 4202 try:
4190 4203 return graftstate.read()
4191 4204 except error.CorruptedState:
4192 4205 nodes = repo.vfs.read(b'graftstate').splitlines()
4193 4206 return {b'nodes': nodes}
4194 4207
4195 4208
4196 4209 def hgabortgraft(ui, repo):
4197 4210 """ abort logic for aborting graft using 'hg abort'"""
4198 4211 with repo.wlock():
4199 4212 graftstate = statemod.cmdstate(repo, b'graftstate')
4200 4213 return abortgraft(ui, repo, graftstate)
General Comments 0
You need to be logged in to leave comments. Login now