##// END OF EJS Templates
copy: rewrite walkpat() to depend less on dirstate...
Martin von Zweigbergk -
r44839:2bd3b95f default
parent child Browse files
Show More
@@ -1,4072 +1,4075 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import copy as copymod
11 11 import errno
12 12 import os
13 13 import re
14 14
15 15 from .i18n import _
16 16 from .node import (
17 17 hex,
18 18 nullid,
19 19 nullrev,
20 20 short,
21 21 )
22 22 from .pycompat import (
23 23 getattr,
24 24 open,
25 25 setattr,
26 26 )
27 27 from .thirdparty import attr
28 28
29 29 from . import (
30 30 bookmarks,
31 31 changelog,
32 32 copies,
33 33 crecord as crecordmod,
34 34 dirstateguard,
35 35 encoding,
36 36 error,
37 37 formatter,
38 38 logcmdutil,
39 39 match as matchmod,
40 40 merge as mergemod,
41 41 mergeutil,
42 42 obsolete,
43 43 patch,
44 44 pathutil,
45 45 phases,
46 46 pycompat,
47 47 repair,
48 48 revlog,
49 49 rewriteutil,
50 50 scmutil,
51 51 smartset,
52 52 state as statemod,
53 53 subrepoutil,
54 54 templatekw,
55 55 templater,
56 56 util,
57 57 vfs as vfsmod,
58 58 )
59 59
60 60 from .utils import (
61 61 dateutil,
62 62 stringutil,
63 63 )
64 64
65 65 if pycompat.TYPE_CHECKING:
66 66 from typing import (
67 67 Any,
68 68 Dict,
69 69 )
70 70
71 71 for t in (Any, Dict):
72 72 assert t
73 73
74 74 stringio = util.stringio
75 75
76 76 # templates of common command options
77 77
78 78 dryrunopts = [
79 79 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
80 80 ]
81 81
82 82 confirmopts = [
83 83 (b'', b'confirm', None, _(b'ask before applying actions')),
84 84 ]
85 85
86 86 remoteopts = [
87 87 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
88 88 (
89 89 b'',
90 90 b'remotecmd',
91 91 b'',
92 92 _(b'specify hg command to run on the remote side'),
93 93 _(b'CMD'),
94 94 ),
95 95 (
96 96 b'',
97 97 b'insecure',
98 98 None,
99 99 _(b'do not verify server certificate (ignoring web.cacerts config)'),
100 100 ),
101 101 ]
102 102
103 103 walkopts = [
104 104 (
105 105 b'I',
106 106 b'include',
107 107 [],
108 108 _(b'include names matching the given patterns'),
109 109 _(b'PATTERN'),
110 110 ),
111 111 (
112 112 b'X',
113 113 b'exclude',
114 114 [],
115 115 _(b'exclude names matching the given patterns'),
116 116 _(b'PATTERN'),
117 117 ),
118 118 ]
119 119
120 120 commitopts = [
121 121 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
122 122 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
123 123 ]
124 124
125 125 commitopts2 = [
126 126 (
127 127 b'd',
128 128 b'date',
129 129 b'',
130 130 _(b'record the specified date as commit date'),
131 131 _(b'DATE'),
132 132 ),
133 133 (
134 134 b'u',
135 135 b'user',
136 136 b'',
137 137 _(b'record the specified user as committer'),
138 138 _(b'USER'),
139 139 ),
140 140 ]
141 141
142 142 commitopts3 = [
143 143 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
144 144 (b'U', b'currentuser', None, _(b'record the current user as committer')),
145 145 ]
146 146
147 147 formatteropts = [
148 148 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
149 149 ]
150 150
151 151 templateopts = [
152 152 (
153 153 b'',
154 154 b'style',
155 155 b'',
156 156 _(b'display using template map file (DEPRECATED)'),
157 157 _(b'STYLE'),
158 158 ),
159 159 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
160 160 ]
161 161
162 162 logopts = [
163 163 (b'p', b'patch', None, _(b'show patch')),
164 164 (b'g', b'git', None, _(b'use git extended diff format')),
165 165 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
166 166 (b'M', b'no-merges', None, _(b'do not show merges')),
167 167 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
168 168 (b'G', b'graph', None, _(b"show the revision DAG")),
169 169 ] + templateopts
170 170
171 171 diffopts = [
172 172 (b'a', b'text', None, _(b'treat all files as text')),
173 173 (
174 174 b'g',
175 175 b'git',
176 176 None,
177 177 _(b'use git extended diff format (DEFAULT: diff.git)'),
178 178 ),
179 179 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
180 180 (b'', b'nodates', None, _(b'omit dates from diff headers')),
181 181 ]
182 182
183 183 diffwsopts = [
184 184 (
185 185 b'w',
186 186 b'ignore-all-space',
187 187 None,
188 188 _(b'ignore white space when comparing lines'),
189 189 ),
190 190 (
191 191 b'b',
192 192 b'ignore-space-change',
193 193 None,
194 194 _(b'ignore changes in the amount of white space'),
195 195 ),
196 196 (
197 197 b'B',
198 198 b'ignore-blank-lines',
199 199 None,
200 200 _(b'ignore changes whose lines are all blank'),
201 201 ),
202 202 (
203 203 b'Z',
204 204 b'ignore-space-at-eol',
205 205 None,
206 206 _(b'ignore changes in whitespace at EOL'),
207 207 ),
208 208 ]
209 209
210 210 diffopts2 = (
211 211 [
212 212 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
213 213 (
214 214 b'p',
215 215 b'show-function',
216 216 None,
217 217 _(
218 218 b'show which function each change is in (DEFAULT: diff.showfunc)'
219 219 ),
220 220 ),
221 221 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
222 222 ]
223 223 + diffwsopts
224 224 + [
225 225 (
226 226 b'U',
227 227 b'unified',
228 228 b'',
229 229 _(b'number of lines of context to show'),
230 230 _(b'NUM'),
231 231 ),
232 232 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
233 233 (
234 234 b'',
235 235 b'root',
236 236 b'',
237 237 _(b'produce diffs relative to subdirectory'),
238 238 _(b'DIR'),
239 239 ),
240 240 ]
241 241 )
242 242
243 243 mergetoolopts = [
244 244 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
245 245 ]
246 246
247 247 similarityopts = [
248 248 (
249 249 b's',
250 250 b'similarity',
251 251 b'',
252 252 _(b'guess renamed files by similarity (0<=s<=100)'),
253 253 _(b'SIMILARITY'),
254 254 )
255 255 ]
256 256
257 257 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
258 258
259 259 debugrevlogopts = [
260 260 (b'c', b'changelog', False, _(b'open changelog')),
261 261 (b'm', b'manifest', False, _(b'open manifest')),
262 262 (b'', b'dir', b'', _(b'open directory manifest')),
263 263 ]
264 264
265 265 # special string such that everything below this line will be ingored in the
266 266 # editor text
267 267 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
268 268
269 269
270 270 def check_at_most_one_arg(opts, *args):
271 271 """abort if more than one of the arguments are in opts
272 272
273 273 Returns the unique argument or None if none of them were specified.
274 274 """
275 275
276 276 def to_display(name):
277 277 return pycompat.sysbytes(name).replace(b'_', b'-')
278 278
279 279 previous = None
280 280 for x in args:
281 281 if opts.get(x):
282 282 if previous:
283 283 raise error.Abort(
284 284 _(b'cannot specify both --%s and --%s')
285 285 % (to_display(previous), to_display(x))
286 286 )
287 287 previous = x
288 288 return previous
289 289
290 290
291 291 def check_incompatible_arguments(opts, first, others):
292 292 """abort if the first argument is given along with any of the others
293 293
294 294 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
295 295 among themselves, and they're passed as a single collection.
296 296 """
297 297 for other in others:
298 298 check_at_most_one_arg(opts, first, other)
299 299
300 300
301 301 def resolvecommitoptions(ui, opts):
302 302 """modify commit options dict to handle related options
303 303
304 304 The return value indicates that ``rewrite.update-timestamp`` is the reason
305 305 the ``date`` option is set.
306 306 """
307 307 check_at_most_one_arg(opts, b'date', b'currentdate')
308 308 check_at_most_one_arg(opts, b'user', b'currentuser')
309 309
310 310 datemaydiffer = False # date-only change should be ignored?
311 311
312 312 if opts.get(b'currentdate'):
313 313 opts[b'date'] = b'%d %d' % dateutil.makedate()
314 314 elif (
315 315 not opts.get(b'date')
316 316 and ui.configbool(b'rewrite', b'update-timestamp')
317 317 and opts.get(b'currentdate') is None
318 318 ):
319 319 opts[b'date'] = b'%d %d' % dateutil.makedate()
320 320 datemaydiffer = True
321 321
322 322 if opts.get(b'currentuser'):
323 323 opts[b'user'] = ui.username()
324 324
325 325 return datemaydiffer
326 326
327 327
328 328 def checknotesize(ui, opts):
329 329 """ make sure note is of valid format """
330 330
331 331 note = opts.get(b'note')
332 332 if not note:
333 333 return
334 334
335 335 if len(note) > 255:
336 336 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
337 337 if b'\n' in note:
338 338 raise error.Abort(_(b"note cannot contain a newline"))
339 339
340 340
341 341 def ishunk(x):
342 342 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
343 343 return isinstance(x, hunkclasses)
344 344
345 345
346 346 def newandmodified(chunks, originalchunks):
347 347 newlyaddedandmodifiedfiles = set()
348 348 alsorestore = set()
349 349 for chunk in chunks:
350 350 if (
351 351 ishunk(chunk)
352 352 and chunk.header.isnewfile()
353 353 and chunk not in originalchunks
354 354 ):
355 355 newlyaddedandmodifiedfiles.add(chunk.header.filename())
356 356 alsorestore.update(
357 357 set(chunk.header.files()) - {chunk.header.filename()}
358 358 )
359 359 return newlyaddedandmodifiedfiles, alsorestore
360 360
361 361
362 362 def parsealiases(cmd):
363 363 return cmd.split(b"|")
364 364
365 365
366 366 def setupwrapcolorwrite(ui):
367 367 # wrap ui.write so diff output can be labeled/colorized
368 368 def wrapwrite(orig, *args, **kw):
369 369 label = kw.pop('label', b'')
370 370 for chunk, l in patch.difflabel(lambda: args):
371 371 orig(chunk, label=label + l)
372 372
373 373 oldwrite = ui.write
374 374
375 375 def wrap(*args, **kwargs):
376 376 return wrapwrite(oldwrite, *args, **kwargs)
377 377
378 378 setattr(ui, 'write', wrap)
379 379 return oldwrite
380 380
381 381
382 382 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
383 383 try:
384 384 if usecurses:
385 385 if testfile:
386 386 recordfn = crecordmod.testdecorator(
387 387 testfile, crecordmod.testchunkselector
388 388 )
389 389 else:
390 390 recordfn = crecordmod.chunkselector
391 391
392 392 return crecordmod.filterpatch(
393 393 ui, originalhunks, recordfn, operation
394 394 )
395 395 except crecordmod.fallbackerror as e:
396 396 ui.warn(b'%s\n' % e)
397 397 ui.warn(_(b'falling back to text mode\n'))
398 398
399 399 return patch.filterpatch(ui, originalhunks, match, operation)
400 400
401 401
402 402 def recordfilter(ui, originalhunks, match, operation=None):
403 403 """ Prompts the user to filter the originalhunks and return a list of
404 404 selected hunks.
405 405 *operation* is used for to build ui messages to indicate the user what
406 406 kind of filtering they are doing: reverting, committing, shelving, etc.
407 407 (see patch.filterpatch).
408 408 """
409 409 usecurses = crecordmod.checkcurses(ui)
410 410 testfile = ui.config(b'experimental', b'crecordtest')
411 411 oldwrite = setupwrapcolorwrite(ui)
412 412 try:
413 413 newchunks, newopts = filterchunks(
414 414 ui, originalhunks, usecurses, testfile, match, operation
415 415 )
416 416 finally:
417 417 ui.write = oldwrite
418 418 return newchunks, newopts
419 419
420 420
421 421 def dorecord(
422 422 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
423 423 ):
424 424 opts = pycompat.byteskwargs(opts)
425 425 if not ui.interactive():
426 426 if cmdsuggest:
427 427 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
428 428 else:
429 429 msg = _(b'running non-interactively')
430 430 raise error.Abort(msg)
431 431
432 432 # make sure username is set before going interactive
433 433 if not opts.get(b'user'):
434 434 ui.username() # raise exception, username not provided
435 435
436 436 def recordfunc(ui, repo, message, match, opts):
437 437 """This is generic record driver.
438 438
439 439 Its job is to interactively filter local changes, and
440 440 accordingly prepare working directory into a state in which the
441 441 job can be delegated to a non-interactive commit command such as
442 442 'commit' or 'qrefresh'.
443 443
444 444 After the actual job is done by non-interactive command, the
445 445 working directory is restored to its original state.
446 446
447 447 In the end we'll record interesting changes, and everything else
448 448 will be left in place, so the user can continue working.
449 449 """
450 450 if not opts.get(b'interactive-unshelve'):
451 451 checkunfinished(repo, commit=True)
452 452 wctx = repo[None]
453 453 merge = len(wctx.parents()) > 1
454 454 if merge:
455 455 raise error.Abort(
456 456 _(
457 457 b'cannot partially commit a merge '
458 458 b'(use "hg commit" instead)'
459 459 )
460 460 )
461 461
462 462 def fail(f, msg):
463 463 raise error.Abort(b'%s: %s' % (f, msg))
464 464
465 465 force = opts.get(b'force')
466 466 if not force:
467 467 match = matchmod.badmatch(match, fail)
468 468
469 469 status = repo.status(match=match)
470 470
471 471 overrides = {(b'ui', b'commitsubrepos'): True}
472 472
473 473 with repo.ui.configoverride(overrides, b'record'):
474 474 # subrepoutil.precommit() modifies the status
475 475 tmpstatus = scmutil.status(
476 476 copymod.copy(status.modified),
477 477 copymod.copy(status.added),
478 478 copymod.copy(status.removed),
479 479 copymod.copy(status.deleted),
480 480 copymod.copy(status.unknown),
481 481 copymod.copy(status.ignored),
482 482 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
483 483 )
484 484
485 485 # Force allows -X subrepo to skip the subrepo.
486 486 subs, commitsubs, newstate = subrepoutil.precommit(
487 487 repo.ui, wctx, tmpstatus, match, force=True
488 488 )
489 489 for s in subs:
490 490 if s in commitsubs:
491 491 dirtyreason = wctx.sub(s).dirtyreason(True)
492 492 raise error.Abort(dirtyreason)
493 493
494 494 if not force:
495 495 repo.checkcommitpatterns(wctx, match, status, fail)
496 496 diffopts = patch.difffeatureopts(
497 497 ui,
498 498 opts=opts,
499 499 whitespace=True,
500 500 section=b'commands',
501 501 configprefix=b'commit.interactive.',
502 502 )
503 503 diffopts.nodates = True
504 504 diffopts.git = True
505 505 diffopts.showfunc = True
506 506 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
507 507 originalchunks = patch.parsepatch(originaldiff)
508 508 match = scmutil.match(repo[None], pats)
509 509
510 510 # 1. filter patch, since we are intending to apply subset of it
511 511 try:
512 512 chunks, newopts = filterfn(ui, originalchunks, match)
513 513 except error.PatchError as err:
514 514 raise error.Abort(_(b'error parsing patch: %s') % err)
515 515 opts.update(newopts)
516 516
517 517 # We need to keep a backup of files that have been newly added and
518 518 # modified during the recording process because there is a previous
519 519 # version without the edit in the workdir. We also will need to restore
520 520 # files that were the sources of renames so that the patch application
521 521 # works.
522 522 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
523 523 chunks, originalchunks
524 524 )
525 525 contenders = set()
526 526 for h in chunks:
527 527 try:
528 528 contenders.update(set(h.files()))
529 529 except AttributeError:
530 530 pass
531 531
532 532 changed = status.modified + status.added + status.removed
533 533 newfiles = [f for f in changed if f in contenders]
534 534 if not newfiles:
535 535 ui.status(_(b'no changes to record\n'))
536 536 return 0
537 537
538 538 modified = set(status.modified)
539 539
540 540 # 2. backup changed files, so we can restore them in the end
541 541
542 542 if backupall:
543 543 tobackup = changed
544 544 else:
545 545 tobackup = [
546 546 f
547 547 for f in newfiles
548 548 if f in modified or f in newlyaddedandmodifiedfiles
549 549 ]
550 550 backups = {}
551 551 if tobackup:
552 552 backupdir = repo.vfs.join(b'record-backups')
553 553 try:
554 554 os.mkdir(backupdir)
555 555 except OSError as err:
556 556 if err.errno != errno.EEXIST:
557 557 raise
558 558 try:
559 559 # backup continues
560 560 for f in tobackup:
561 561 fd, tmpname = pycompat.mkstemp(
562 562 prefix=f.replace(b'/', b'_') + b'.', dir=backupdir
563 563 )
564 564 os.close(fd)
565 565 ui.debug(b'backup %r as %r\n' % (f, tmpname))
566 566 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
567 567 backups[f] = tmpname
568 568
569 569 fp = stringio()
570 570 for c in chunks:
571 571 fname = c.filename()
572 572 if fname in backups:
573 573 c.write(fp)
574 574 dopatch = fp.tell()
575 575 fp.seek(0)
576 576
577 577 # 2.5 optionally review / modify patch in text editor
578 578 if opts.get(b'review', False):
579 579 patchtext = (
580 580 crecordmod.diffhelptext
581 581 + crecordmod.patchhelptext
582 582 + fp.read()
583 583 )
584 584 reviewedpatch = ui.edit(
585 585 patchtext, b"", action=b"diff", repopath=repo.path
586 586 )
587 587 fp.truncate(0)
588 588 fp.write(reviewedpatch)
589 589 fp.seek(0)
590 590
591 591 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
592 592 # 3a. apply filtered patch to clean repo (clean)
593 593 if backups:
594 594 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
595 595 mergemod.revert_to(repo[b'.'], matcher=m)
596 596
597 597 # 3b. (apply)
598 598 if dopatch:
599 599 try:
600 600 ui.debug(b'applying patch\n')
601 601 ui.debug(fp.getvalue())
602 602 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
603 603 except error.PatchError as err:
604 604 raise error.Abort(pycompat.bytestr(err))
605 605 del fp
606 606
607 607 # 4. We prepared working directory according to filtered
608 608 # patch. Now is the time to delegate the job to
609 609 # commit/qrefresh or the like!
610 610
611 611 # Make all of the pathnames absolute.
612 612 newfiles = [repo.wjoin(nf) for nf in newfiles]
613 613 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
614 614 finally:
615 615 # 5. finally restore backed-up files
616 616 try:
617 617 dirstate = repo.dirstate
618 618 for realname, tmpname in pycompat.iteritems(backups):
619 619 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
620 620
621 621 if dirstate[realname] == b'n':
622 622 # without normallookup, restoring timestamp
623 623 # may cause partially committed files
624 624 # to be treated as unmodified
625 625 dirstate.normallookup(realname)
626 626
627 627 # copystat=True here and above are a hack to trick any
628 628 # editors that have f open that we haven't modified them.
629 629 #
630 630 # Also note that this racy as an editor could notice the
631 631 # file's mtime before we've finished writing it.
632 632 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
633 633 os.unlink(tmpname)
634 634 if tobackup:
635 635 os.rmdir(backupdir)
636 636 except OSError:
637 637 pass
638 638
639 639 def recordinwlock(ui, repo, message, match, opts):
640 640 with repo.wlock():
641 641 return recordfunc(ui, repo, message, match, opts)
642 642
643 643 return commit(ui, repo, recordinwlock, pats, opts)
644 644
645 645
646 646 class dirnode(object):
647 647 """
648 648 Represent a directory in user working copy with information required for
649 649 the purpose of tersing its status.
650 650
651 651 path is the path to the directory, without a trailing '/'
652 652
653 653 statuses is a set of statuses of all files in this directory (this includes
654 654 all the files in all the subdirectories too)
655 655
656 656 files is a list of files which are direct child of this directory
657 657
658 658 subdirs is a dictionary of sub-directory name as the key and it's own
659 659 dirnode object as the value
660 660 """
661 661
662 662 def __init__(self, dirpath):
663 663 self.path = dirpath
664 664 self.statuses = set()
665 665 self.files = []
666 666 self.subdirs = {}
667 667
668 668 def _addfileindir(self, filename, status):
669 669 """Add a file in this directory as a direct child."""
670 670 self.files.append((filename, status))
671 671
672 672 def addfile(self, filename, status):
673 673 """
674 674 Add a file to this directory or to its direct parent directory.
675 675
676 676 If the file is not direct child of this directory, we traverse to the
677 677 directory of which this file is a direct child of and add the file
678 678 there.
679 679 """
680 680
681 681 # the filename contains a path separator, it means it's not the direct
682 682 # child of this directory
683 683 if b'/' in filename:
684 684 subdir, filep = filename.split(b'/', 1)
685 685
686 686 # does the dirnode object for subdir exists
687 687 if subdir not in self.subdirs:
688 688 subdirpath = pathutil.join(self.path, subdir)
689 689 self.subdirs[subdir] = dirnode(subdirpath)
690 690
691 691 # try adding the file in subdir
692 692 self.subdirs[subdir].addfile(filep, status)
693 693
694 694 else:
695 695 self._addfileindir(filename, status)
696 696
697 697 if status not in self.statuses:
698 698 self.statuses.add(status)
699 699
700 700 def iterfilepaths(self):
701 701 """Yield (status, path) for files directly under this directory."""
702 702 for f, st in self.files:
703 703 yield st, pathutil.join(self.path, f)
704 704
705 705 def tersewalk(self, terseargs):
706 706 """
707 707 Yield (status, path) obtained by processing the status of this
708 708 dirnode.
709 709
710 710 terseargs is the string of arguments passed by the user with `--terse`
711 711 flag.
712 712
713 713 Following are the cases which can happen:
714 714
715 715 1) All the files in the directory (including all the files in its
716 716 subdirectories) share the same status and the user has asked us to terse
717 717 that status. -> yield (status, dirpath). dirpath will end in '/'.
718 718
719 719 2) Otherwise, we do following:
720 720
721 721 a) Yield (status, filepath) for all the files which are in this
722 722 directory (only the ones in this directory, not the subdirs)
723 723
724 724 b) Recurse the function on all the subdirectories of this
725 725 directory
726 726 """
727 727
728 728 if len(self.statuses) == 1:
729 729 onlyst = self.statuses.pop()
730 730
731 731 # Making sure we terse only when the status abbreviation is
732 732 # passed as terse argument
733 733 if onlyst in terseargs:
734 734 yield onlyst, self.path + b'/'
735 735 return
736 736
737 737 # add the files to status list
738 738 for st, fpath in self.iterfilepaths():
739 739 yield st, fpath
740 740
741 741 # recurse on the subdirs
742 742 for dirobj in self.subdirs.values():
743 743 for st, fpath in dirobj.tersewalk(terseargs):
744 744 yield st, fpath
745 745
746 746
747 747 def tersedir(statuslist, terseargs):
748 748 """
749 749 Terse the status if all the files in a directory shares the same status.
750 750
751 751 statuslist is scmutil.status() object which contains a list of files for
752 752 each status.
753 753 terseargs is string which is passed by the user as the argument to `--terse`
754 754 flag.
755 755
756 756 The function makes a tree of objects of dirnode class, and at each node it
757 757 stores the information required to know whether we can terse a certain
758 758 directory or not.
759 759 """
760 760 # the order matters here as that is used to produce final list
761 761 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
762 762
763 763 # checking the argument validity
764 764 for s in pycompat.bytestr(terseargs):
765 765 if s not in allst:
766 766 raise error.Abort(_(b"'%s' not recognized") % s)
767 767
768 768 # creating a dirnode object for the root of the repo
769 769 rootobj = dirnode(b'')
770 770 pstatus = (
771 771 b'modified',
772 772 b'added',
773 773 b'deleted',
774 774 b'clean',
775 775 b'unknown',
776 776 b'ignored',
777 777 b'removed',
778 778 )
779 779
780 780 tersedict = {}
781 781 for attrname in pstatus:
782 782 statuschar = attrname[0:1]
783 783 for f in getattr(statuslist, attrname):
784 784 rootobj.addfile(f, statuschar)
785 785 tersedict[statuschar] = []
786 786
787 787 # we won't be tersing the root dir, so add files in it
788 788 for st, fpath in rootobj.iterfilepaths():
789 789 tersedict[st].append(fpath)
790 790
791 791 # process each sub-directory and build tersedict
792 792 for subdir in rootobj.subdirs.values():
793 793 for st, f in subdir.tersewalk(terseargs):
794 794 tersedict[st].append(f)
795 795
796 796 tersedlist = []
797 797 for st in allst:
798 798 tersedict[st].sort()
799 799 tersedlist.append(tersedict[st])
800 800
801 801 return scmutil.status(*tersedlist)
802 802
803 803
804 804 def _commentlines(raw):
805 805 '''Surround lineswith a comment char and a new line'''
806 806 lines = raw.splitlines()
807 807 commentedlines = [b'# %s' % line for line in lines]
808 808 return b'\n'.join(commentedlines) + b'\n'
809 809
810 810
811 811 @attr.s(frozen=True)
812 812 class morestatus(object):
813 813 reporoot = attr.ib()
814 814 unfinishedop = attr.ib()
815 815 unfinishedmsg = attr.ib()
816 816 activemerge = attr.ib()
817 817 unresolvedpaths = attr.ib()
818 818 _formattedpaths = attr.ib(init=False, default=set())
819 819 _label = b'status.morestatus'
820 820
821 821 def formatfile(self, path, fm):
822 822 self._formattedpaths.add(path)
823 823 if self.activemerge and path in self.unresolvedpaths:
824 824 fm.data(unresolved=True)
825 825
826 826 def formatfooter(self, fm):
827 827 if self.unfinishedop or self.unfinishedmsg:
828 828 fm.startitem()
829 829 fm.data(itemtype=b'morestatus')
830 830
831 831 if self.unfinishedop:
832 832 fm.data(unfinished=self.unfinishedop)
833 833 statemsg = (
834 834 _(b'The repository is in an unfinished *%s* state.')
835 835 % self.unfinishedop
836 836 )
837 837 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
838 838 if self.unfinishedmsg:
839 839 fm.data(unfinishedmsg=self.unfinishedmsg)
840 840
841 841 # May also start new data items.
842 842 self._formatconflicts(fm)
843 843
844 844 if self.unfinishedmsg:
845 845 fm.plain(
846 846 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
847 847 )
848 848
849 849 def _formatconflicts(self, fm):
850 850 if not self.activemerge:
851 851 return
852 852
853 853 if self.unresolvedpaths:
854 854 mergeliststr = b'\n'.join(
855 855 [
856 856 b' %s'
857 857 % util.pathto(self.reporoot, encoding.getcwd(), path)
858 858 for path in self.unresolvedpaths
859 859 ]
860 860 )
861 861 msg = (
862 862 _(
863 863 '''Unresolved merge conflicts:
864 864
865 865 %s
866 866
867 867 To mark files as resolved: hg resolve --mark FILE'''
868 868 )
869 869 % mergeliststr
870 870 )
871 871
872 872 # If any paths with unresolved conflicts were not previously
873 873 # formatted, output them now.
874 874 for f in self.unresolvedpaths:
875 875 if f in self._formattedpaths:
876 876 # Already output.
877 877 continue
878 878 fm.startitem()
879 879 # We can't claim to know the status of the file - it may just
880 880 # have been in one of the states that were not requested for
881 881 # display, so it could be anything.
882 882 fm.data(itemtype=b'file', path=f, unresolved=True)
883 883
884 884 else:
885 885 msg = _(b'No unresolved merge conflicts.')
886 886
887 887 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
888 888
889 889
890 890 def readmorestatus(repo):
891 891 """Returns a morestatus object if the repo has unfinished state."""
892 892 statetuple = statemod.getrepostate(repo)
893 893 mergestate = mergemod.mergestate.read(repo)
894 894 activemerge = mergestate.active()
895 895 if not statetuple and not activemerge:
896 896 return None
897 897
898 898 unfinishedop = unfinishedmsg = unresolved = None
899 899 if statetuple:
900 900 unfinishedop, unfinishedmsg = statetuple
901 901 if activemerge:
902 902 unresolved = sorted(mergestate.unresolved())
903 903 return morestatus(
904 904 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
905 905 )
906 906
907 907
908 908 def findpossible(cmd, table, strict=False):
909 909 """
910 910 Return cmd -> (aliases, command table entry)
911 911 for each matching command.
912 912 Return debug commands (or their aliases) only if no normal command matches.
913 913 """
914 914 choice = {}
915 915 debugchoice = {}
916 916
917 917 if cmd in table:
918 918 # short-circuit exact matches, "log" alias beats "log|history"
919 919 keys = [cmd]
920 920 else:
921 921 keys = table.keys()
922 922
923 923 allcmds = []
924 924 for e in keys:
925 925 aliases = parsealiases(e)
926 926 allcmds.extend(aliases)
927 927 found = None
928 928 if cmd in aliases:
929 929 found = cmd
930 930 elif not strict:
931 931 for a in aliases:
932 932 if a.startswith(cmd):
933 933 found = a
934 934 break
935 935 if found is not None:
936 936 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
937 937 debugchoice[found] = (aliases, table[e])
938 938 else:
939 939 choice[found] = (aliases, table[e])
940 940
941 941 if not choice and debugchoice:
942 942 choice = debugchoice
943 943
944 944 return choice, allcmds
945 945
946 946
947 947 def findcmd(cmd, table, strict=True):
948 948 """Return (aliases, command table entry) for command string."""
949 949 choice, allcmds = findpossible(cmd, table, strict)
950 950
951 951 if cmd in choice:
952 952 return choice[cmd]
953 953
954 954 if len(choice) > 1:
955 955 clist = sorted(choice)
956 956 raise error.AmbiguousCommand(cmd, clist)
957 957
958 958 if choice:
959 959 return list(choice.values())[0]
960 960
961 961 raise error.UnknownCommand(cmd, allcmds)
962 962
963 963
964 964 def changebranch(ui, repo, revs, label):
965 965 """ Change the branch name of given revs to label """
966 966
967 967 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
968 968 # abort in case of uncommitted merge or dirty wdir
969 969 bailifchanged(repo)
970 970 revs = scmutil.revrange(repo, revs)
971 971 if not revs:
972 972 raise error.Abort(b"empty revision set")
973 973 roots = repo.revs(b'roots(%ld)', revs)
974 974 if len(roots) > 1:
975 975 raise error.Abort(
976 976 _(b"cannot change branch of non-linear revisions")
977 977 )
978 978 rewriteutil.precheck(repo, revs, b'change branch of')
979 979
980 980 root = repo[roots.first()]
981 981 rpb = {parent.branch() for parent in root.parents()}
982 982 if label not in rpb and label in repo.branchmap():
983 983 raise error.Abort(_(b"a branch of the same name already exists"))
984 984
985 985 if repo.revs(b'obsolete() and %ld', revs):
986 986 raise error.Abort(
987 987 _(b"cannot change branch of a obsolete changeset")
988 988 )
989 989
990 990 # make sure only topological heads
991 991 if repo.revs(b'heads(%ld) - head()', revs):
992 992 raise error.Abort(_(b"cannot change branch in middle of a stack"))
993 993
994 994 replacements = {}
995 995 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
996 996 # mercurial.subrepo -> mercurial.cmdutil
997 997 from . import context
998 998
999 999 for rev in revs:
1000 1000 ctx = repo[rev]
1001 1001 oldbranch = ctx.branch()
1002 1002 # check if ctx has same branch
1003 1003 if oldbranch == label:
1004 1004 continue
1005 1005
1006 1006 def filectxfn(repo, newctx, path):
1007 1007 try:
1008 1008 return ctx[path]
1009 1009 except error.ManifestLookupError:
1010 1010 return None
1011 1011
1012 1012 ui.debug(
1013 1013 b"changing branch of '%s' from '%s' to '%s'\n"
1014 1014 % (hex(ctx.node()), oldbranch, label)
1015 1015 )
1016 1016 extra = ctx.extra()
1017 1017 extra[b'branch_change'] = hex(ctx.node())
1018 1018 # While changing branch of set of linear commits, make sure that
1019 1019 # we base our commits on new parent rather than old parent which
1020 1020 # was obsoleted while changing the branch
1021 1021 p1 = ctx.p1().node()
1022 1022 p2 = ctx.p2().node()
1023 1023 if p1 in replacements:
1024 1024 p1 = replacements[p1][0]
1025 1025 if p2 in replacements:
1026 1026 p2 = replacements[p2][0]
1027 1027
1028 1028 mc = context.memctx(
1029 1029 repo,
1030 1030 (p1, p2),
1031 1031 ctx.description(),
1032 1032 ctx.files(),
1033 1033 filectxfn,
1034 1034 user=ctx.user(),
1035 1035 date=ctx.date(),
1036 1036 extra=extra,
1037 1037 branch=label,
1038 1038 )
1039 1039
1040 1040 newnode = repo.commitctx(mc)
1041 1041 replacements[ctx.node()] = (newnode,)
1042 1042 ui.debug(b'new node id is %s\n' % hex(newnode))
1043 1043
1044 1044 # create obsmarkers and move bookmarks
1045 1045 scmutil.cleanupnodes(
1046 1046 repo, replacements, b'branch-change', fixphase=True
1047 1047 )
1048 1048
1049 1049 # move the working copy too
1050 1050 wctx = repo[None]
1051 1051 # in-progress merge is a bit too complex for now.
1052 1052 if len(wctx.parents()) == 1:
1053 1053 newid = replacements.get(wctx.p1().node())
1054 1054 if newid is not None:
1055 1055 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1056 1056 # mercurial.cmdutil
1057 1057 from . import hg
1058 1058
1059 1059 hg.update(repo, newid[0], quietempty=True)
1060 1060
1061 1061 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1062 1062
1063 1063
1064 1064 def findrepo(p):
1065 1065 while not os.path.isdir(os.path.join(p, b".hg")):
1066 1066 oldp, p = p, os.path.dirname(p)
1067 1067 if p == oldp:
1068 1068 return None
1069 1069
1070 1070 return p
1071 1071
1072 1072
1073 1073 def bailifchanged(repo, merge=True, hint=None):
1074 1074 """ enforce the precondition that working directory must be clean.
1075 1075
1076 1076 'merge' can be set to false if a pending uncommitted merge should be
1077 1077 ignored (such as when 'update --check' runs).
1078 1078
1079 1079 'hint' is the usual hint given to Abort exception.
1080 1080 """
1081 1081
1082 1082 if merge and repo.dirstate.p2() != nullid:
1083 1083 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1084 1084 st = repo.status()
1085 1085 if st.modified or st.added or st.removed or st.deleted:
1086 1086 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1087 1087 ctx = repo[None]
1088 1088 for s in sorted(ctx.substate):
1089 1089 ctx.sub(s).bailifchanged(hint=hint)
1090 1090
1091 1091
1092 1092 def logmessage(ui, opts):
1093 1093 """ get the log message according to -m and -l option """
1094 1094
1095 1095 check_at_most_one_arg(opts, b'message', b'logfile')
1096 1096
1097 1097 message = opts.get(b'message')
1098 1098 logfile = opts.get(b'logfile')
1099 1099
1100 1100 if not message and logfile:
1101 1101 try:
1102 1102 if isstdiofilename(logfile):
1103 1103 message = ui.fin.read()
1104 1104 else:
1105 1105 message = b'\n'.join(util.readfile(logfile).splitlines())
1106 1106 except IOError as inst:
1107 1107 raise error.Abort(
1108 1108 _(b"can't read commit message '%s': %s")
1109 1109 % (logfile, encoding.strtolocal(inst.strerror))
1110 1110 )
1111 1111 return message
1112 1112
1113 1113
1114 1114 def mergeeditform(ctxorbool, baseformname):
1115 1115 """return appropriate editform name (referencing a committemplate)
1116 1116
1117 1117 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1118 1118 merging is committed.
1119 1119
1120 1120 This returns baseformname with '.merge' appended if it is a merge,
1121 1121 otherwise '.normal' is appended.
1122 1122 """
1123 1123 if isinstance(ctxorbool, bool):
1124 1124 if ctxorbool:
1125 1125 return baseformname + b".merge"
1126 1126 elif len(ctxorbool.parents()) > 1:
1127 1127 return baseformname + b".merge"
1128 1128
1129 1129 return baseformname + b".normal"
1130 1130
1131 1131
1132 1132 def getcommiteditor(
1133 1133 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1134 1134 ):
1135 1135 """get appropriate commit message editor according to '--edit' option
1136 1136
1137 1137 'finishdesc' is a function to be called with edited commit message
1138 1138 (= 'description' of the new changeset) just after editing, but
1139 1139 before checking empty-ness. It should return actual text to be
1140 1140 stored into history. This allows to change description before
1141 1141 storing.
1142 1142
1143 1143 'extramsg' is a extra message to be shown in the editor instead of
1144 1144 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1145 1145 is automatically added.
1146 1146
1147 1147 'editform' is a dot-separated list of names, to distinguish
1148 1148 the purpose of commit text editing.
1149 1149
1150 1150 'getcommiteditor' returns 'commitforceeditor' regardless of
1151 1151 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1152 1152 they are specific for usage in MQ.
1153 1153 """
1154 1154 if edit or finishdesc or extramsg:
1155 1155 return lambda r, c, s: commitforceeditor(
1156 1156 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1157 1157 )
1158 1158 elif editform:
1159 1159 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1160 1160 else:
1161 1161 return commiteditor
1162 1162
1163 1163
1164 1164 def _escapecommandtemplate(tmpl):
1165 1165 parts = []
1166 1166 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1167 1167 if typ == b'string':
1168 1168 parts.append(stringutil.escapestr(tmpl[start:end]))
1169 1169 else:
1170 1170 parts.append(tmpl[start:end])
1171 1171 return b''.join(parts)
1172 1172
1173 1173
1174 1174 def rendercommandtemplate(ui, tmpl, props):
1175 1175 r"""Expand a literal template 'tmpl' in a way suitable for command line
1176 1176
1177 1177 '\' in outermost string is not taken as an escape character because it
1178 1178 is a directory separator on Windows.
1179 1179
1180 1180 >>> from . import ui as uimod
1181 1181 >>> ui = uimod.ui()
1182 1182 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1183 1183 'c:\\foo'
1184 1184 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1185 1185 'c:{path}'
1186 1186 """
1187 1187 if not tmpl:
1188 1188 return tmpl
1189 1189 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1190 1190 return t.renderdefault(props)
1191 1191
1192 1192
1193 1193 def rendertemplate(ctx, tmpl, props=None):
1194 1194 """Expand a literal template 'tmpl' byte-string against one changeset
1195 1195
1196 1196 Each props item must be a stringify-able value or a callable returning
1197 1197 such value, i.e. no bare list nor dict should be passed.
1198 1198 """
1199 1199 repo = ctx.repo()
1200 1200 tres = formatter.templateresources(repo.ui, repo)
1201 1201 t = formatter.maketemplater(
1202 1202 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1203 1203 )
1204 1204 mapping = {b'ctx': ctx}
1205 1205 if props:
1206 1206 mapping.update(props)
1207 1207 return t.renderdefault(mapping)
1208 1208
1209 1209
1210 1210 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1211 1211 r"""Convert old-style filename format string to template string
1212 1212
1213 1213 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1214 1214 'foo-{reporoot|basename}-{seqno}.patch'
1215 1215 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1216 1216 '{rev}{tags % "{tag}"}{node}'
1217 1217
1218 1218 '\' in outermost strings has to be escaped because it is a directory
1219 1219 separator on Windows:
1220 1220
1221 1221 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1222 1222 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1223 1223 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1224 1224 '\\\\\\\\foo\\\\bar.patch'
1225 1225 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1226 1226 '\\\\{tags % "{tag}"}'
1227 1227
1228 1228 but inner strings follow the template rules (i.e. '\' is taken as an
1229 1229 escape character):
1230 1230
1231 1231 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1232 1232 '{"c:\\tmp"}'
1233 1233 """
1234 1234 expander = {
1235 1235 b'H': b'{node}',
1236 1236 b'R': b'{rev}',
1237 1237 b'h': b'{node|short}',
1238 1238 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1239 1239 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1240 1240 b'%': b'%',
1241 1241 b'b': b'{reporoot|basename}',
1242 1242 }
1243 1243 if total is not None:
1244 1244 expander[b'N'] = b'{total}'
1245 1245 if seqno is not None:
1246 1246 expander[b'n'] = b'{seqno}'
1247 1247 if total is not None and seqno is not None:
1248 1248 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1249 1249 if pathname is not None:
1250 1250 expander[b's'] = b'{pathname|basename}'
1251 1251 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1252 1252 expander[b'p'] = b'{pathname}'
1253 1253
1254 1254 newname = []
1255 1255 for typ, start, end in templater.scantemplate(pat, raw=True):
1256 1256 if typ != b'string':
1257 1257 newname.append(pat[start:end])
1258 1258 continue
1259 1259 i = start
1260 1260 while i < end:
1261 1261 n = pat.find(b'%', i, end)
1262 1262 if n < 0:
1263 1263 newname.append(stringutil.escapestr(pat[i:end]))
1264 1264 break
1265 1265 newname.append(stringutil.escapestr(pat[i:n]))
1266 1266 if n + 2 > end:
1267 1267 raise error.Abort(
1268 1268 _(b"incomplete format spec in output filename")
1269 1269 )
1270 1270 c = pat[n + 1 : n + 2]
1271 1271 i = n + 2
1272 1272 try:
1273 1273 newname.append(expander[c])
1274 1274 except KeyError:
1275 1275 raise error.Abort(
1276 1276 _(b"invalid format spec '%%%s' in output filename") % c
1277 1277 )
1278 1278 return b''.join(newname)
1279 1279
1280 1280
1281 1281 def makefilename(ctx, pat, **props):
1282 1282 if not pat:
1283 1283 return pat
1284 1284 tmpl = _buildfntemplate(pat, **props)
1285 1285 # BUG: alias expansion shouldn't be made against template fragments
1286 1286 # rewritten from %-format strings, but we have no easy way to partially
1287 1287 # disable the expansion.
1288 1288 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1289 1289
1290 1290
1291 1291 def isstdiofilename(pat):
1292 1292 """True if the given pat looks like a filename denoting stdin/stdout"""
1293 1293 return not pat or pat == b'-'
1294 1294
1295 1295
1296 1296 class _unclosablefile(object):
1297 1297 def __init__(self, fp):
1298 1298 self._fp = fp
1299 1299
1300 1300 def close(self):
1301 1301 pass
1302 1302
1303 1303 def __iter__(self):
1304 1304 return iter(self._fp)
1305 1305
1306 1306 def __getattr__(self, attr):
1307 1307 return getattr(self._fp, attr)
1308 1308
1309 1309 def __enter__(self):
1310 1310 return self
1311 1311
1312 1312 def __exit__(self, exc_type, exc_value, exc_tb):
1313 1313 pass
1314 1314
1315 1315
1316 1316 def makefileobj(ctx, pat, mode=b'wb', **props):
1317 1317 writable = mode not in (b'r', b'rb')
1318 1318
1319 1319 if isstdiofilename(pat):
1320 1320 repo = ctx.repo()
1321 1321 if writable:
1322 1322 fp = repo.ui.fout
1323 1323 else:
1324 1324 fp = repo.ui.fin
1325 1325 return _unclosablefile(fp)
1326 1326 fn = makefilename(ctx, pat, **props)
1327 1327 return open(fn, mode)
1328 1328
1329 1329
1330 1330 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1331 1331 """opens the changelog, manifest, a filelog or a given revlog"""
1332 1332 cl = opts[b'changelog']
1333 1333 mf = opts[b'manifest']
1334 1334 dir = opts[b'dir']
1335 1335 msg = None
1336 1336 if cl and mf:
1337 1337 msg = _(b'cannot specify --changelog and --manifest at the same time')
1338 1338 elif cl and dir:
1339 1339 msg = _(b'cannot specify --changelog and --dir at the same time')
1340 1340 elif cl or mf or dir:
1341 1341 if file_:
1342 1342 msg = _(b'cannot specify filename with --changelog or --manifest')
1343 1343 elif not repo:
1344 1344 msg = _(
1345 1345 b'cannot specify --changelog or --manifest or --dir '
1346 1346 b'without a repository'
1347 1347 )
1348 1348 if msg:
1349 1349 raise error.Abort(msg)
1350 1350
1351 1351 r = None
1352 1352 if repo:
1353 1353 if cl:
1354 1354 r = repo.unfiltered().changelog
1355 1355 elif dir:
1356 1356 if b'treemanifest' not in repo.requirements:
1357 1357 raise error.Abort(
1358 1358 _(
1359 1359 b"--dir can only be used on repos with "
1360 1360 b"treemanifest enabled"
1361 1361 )
1362 1362 )
1363 1363 if not dir.endswith(b'/'):
1364 1364 dir = dir + b'/'
1365 1365 dirlog = repo.manifestlog.getstorage(dir)
1366 1366 if len(dirlog):
1367 1367 r = dirlog
1368 1368 elif mf:
1369 1369 r = repo.manifestlog.getstorage(b'')
1370 1370 elif file_:
1371 1371 filelog = repo.file(file_)
1372 1372 if len(filelog):
1373 1373 r = filelog
1374 1374
1375 1375 # Not all storage may be revlogs. If requested, try to return an actual
1376 1376 # revlog instance.
1377 1377 if returnrevlog:
1378 1378 if isinstance(r, revlog.revlog):
1379 1379 pass
1380 1380 elif util.safehasattr(r, b'_revlog'):
1381 1381 r = r._revlog # pytype: disable=attribute-error
1382 1382 elif r is not None:
1383 1383 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1384 1384
1385 1385 if not r:
1386 1386 if not returnrevlog:
1387 1387 raise error.Abort(_(b'cannot give path to non-revlog'))
1388 1388
1389 1389 if not file_:
1390 1390 raise error.CommandError(cmd, _(b'invalid arguments'))
1391 1391 if not os.path.isfile(file_):
1392 1392 raise error.Abort(_(b"revlog '%s' not found") % file_)
1393 1393 r = revlog.revlog(
1394 1394 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1395 1395 )
1396 1396 return r
1397 1397
1398 1398
1399 1399 def openrevlog(repo, cmd, file_, opts):
1400 1400 """Obtain a revlog backing storage of an item.
1401 1401
1402 1402 This is similar to ``openstorage()`` except it always returns a revlog.
1403 1403
1404 1404 In most cases, a caller cares about the main storage object - not the
1405 1405 revlog backing it. Therefore, this function should only be used by code
1406 1406 that needs to examine low-level revlog implementation details. e.g. debug
1407 1407 commands.
1408 1408 """
1409 1409 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1410 1410
1411 1411
1412 1412 def copy(ui, repo, pats, opts, rename=False):
1413 1413 # called with the repo lock held
1414 1414 #
1415 1415 # hgsep => pathname that uses "/" to separate directories
1416 1416 # ossep => pathname that uses os.sep to separate directories
1417 1417 cwd = repo.getcwd()
1418 1418 targets = {}
1419 1419 after = opts.get(b"after")
1420 1420 dryrun = opts.get(b"dry_run")
1421 1421 wctx = repo[None]
1422 pctx = wctx.p1()
1422 1423
1423 1424 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1424 1425
1425 1426 def walkpat(pat):
1426 1427 srcs = []
1427 if after:
1428 badstates = b'?'
1429 else:
1430 badstates = b'?r'
1431 1428 m = scmutil.match(wctx, [pat], opts, globbed=True)
1432 1429 for abs in wctx.walk(m):
1433 state = repo.dirstate[abs]
1434 1430 rel = uipathfn(abs)
1435 1431 exact = m.exact(abs)
1436 if state in badstates:
1437 if exact and state == b'?':
1438 ui.warn(_(b'%s: not copying - file is not managed\n') % rel)
1439 if exact and state == b'r':
1440 ui.warn(
1441 _(
1442 b'%s: not copying - file has been marked for'
1443 b' remove\n'
1432 if abs not in wctx:
1433 if abs in pctx:
1434 if not after:
1435 if exact:
1436 ui.warn(
1437 _(
1438 b'%s: not copying - file has been marked '
1439 b'for remove\n'
1440 )
1441 % rel
1442 )
1443 continue
1444 else:
1445 if exact:
1446 ui.warn(
1447 _(b'%s: not copying - file is not managed\n') % rel
1444 1448 )
1445 % rel
1446 )
1447 continue
1449 continue
1450
1448 1451 # abs: hgsep
1449 1452 # rel: ossep
1450 1453 srcs.append((abs, rel, exact))
1451 1454 return srcs
1452 1455
1453 1456 # abssrc: hgsep
1454 1457 # relsrc: ossep
1455 1458 # otarget: ossep
1456 1459 def copyfile(abssrc, relsrc, otarget, exact):
1457 1460 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1458 1461 if b'/' in abstarget:
1459 1462 # We cannot normalize abstarget itself, this would prevent
1460 1463 # case only renames, like a => A.
1461 1464 abspath, absname = abstarget.rsplit(b'/', 1)
1462 1465 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1463 1466 reltarget = repo.pathto(abstarget, cwd)
1464 1467 target = repo.wjoin(abstarget)
1465 1468 src = repo.wjoin(abssrc)
1466 1469 state = repo.dirstate[abstarget]
1467 1470
1468 1471 scmutil.checkportable(ui, abstarget)
1469 1472
1470 1473 # check for collisions
1471 1474 prevsrc = targets.get(abstarget)
1472 1475 if prevsrc is not None:
1473 1476 ui.warn(
1474 1477 _(b'%s: not overwriting - %s collides with %s\n')
1475 1478 % (
1476 1479 reltarget,
1477 1480 repo.pathto(abssrc, cwd),
1478 1481 repo.pathto(prevsrc, cwd),
1479 1482 )
1480 1483 )
1481 1484 return True # report a failure
1482 1485
1483 1486 # check for overwrites
1484 1487 exists = os.path.lexists(target)
1485 1488 samefile = False
1486 1489 if exists and abssrc != abstarget:
1487 1490 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1488 1491 abstarget
1489 1492 ):
1490 1493 if not rename:
1491 1494 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1492 1495 return True # report a failure
1493 1496 exists = False
1494 1497 samefile = True
1495 1498
1496 1499 if not after and exists or after and state in b'mn':
1497 1500 if not opts[b'force']:
1498 1501 if state in b'mn':
1499 1502 msg = _(b'%s: not overwriting - file already committed\n')
1500 1503 if after:
1501 1504 flags = b'--after --force'
1502 1505 else:
1503 1506 flags = b'--force'
1504 1507 if rename:
1505 1508 hint = (
1506 1509 _(
1507 1510 b"('hg rename %s' to replace the file by "
1508 1511 b'recording a rename)\n'
1509 1512 )
1510 1513 % flags
1511 1514 )
1512 1515 else:
1513 1516 hint = (
1514 1517 _(
1515 1518 b"('hg copy %s' to replace the file by "
1516 1519 b'recording a copy)\n'
1517 1520 )
1518 1521 % flags
1519 1522 )
1520 1523 else:
1521 1524 msg = _(b'%s: not overwriting - file exists\n')
1522 1525 if rename:
1523 1526 hint = _(
1524 1527 b"('hg rename --after' to record the rename)\n"
1525 1528 )
1526 1529 else:
1527 1530 hint = _(b"('hg copy --after' to record the copy)\n")
1528 1531 ui.warn(msg % reltarget)
1529 1532 ui.warn(hint)
1530 1533 return True # report a failure
1531 1534
1532 1535 if after:
1533 1536 if not exists:
1534 1537 if rename:
1535 1538 ui.warn(
1536 1539 _(b'%s: not recording move - %s does not exist\n')
1537 1540 % (relsrc, reltarget)
1538 1541 )
1539 1542 else:
1540 1543 ui.warn(
1541 1544 _(b'%s: not recording copy - %s does not exist\n')
1542 1545 % (relsrc, reltarget)
1543 1546 )
1544 1547 return True # report a failure
1545 1548 elif not dryrun:
1546 1549 try:
1547 1550 if exists:
1548 1551 os.unlink(target)
1549 1552 targetdir = os.path.dirname(target) or b'.'
1550 1553 if not os.path.isdir(targetdir):
1551 1554 os.makedirs(targetdir)
1552 1555 if samefile:
1553 1556 tmp = target + b"~hgrename"
1554 1557 os.rename(src, tmp)
1555 1558 os.rename(tmp, target)
1556 1559 else:
1557 1560 # Preserve stat info on renames, not on copies; this matches
1558 1561 # Linux CLI behavior.
1559 1562 util.copyfile(src, target, copystat=rename)
1560 1563 srcexists = True
1561 1564 except IOError as inst:
1562 1565 if inst.errno == errno.ENOENT:
1563 1566 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1564 1567 srcexists = False
1565 1568 else:
1566 1569 ui.warn(
1567 1570 _(b'%s: cannot copy - %s\n')
1568 1571 % (relsrc, encoding.strtolocal(inst.strerror))
1569 1572 )
1570 1573 return True # report a failure
1571 1574
1572 1575 if ui.verbose or not exact:
1573 1576 if rename:
1574 1577 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1575 1578 else:
1576 1579 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1577 1580
1578 1581 targets[abstarget] = abssrc
1579 1582
1580 1583 # fix up dirstate
1581 1584 scmutil.dirstatecopy(
1582 1585 ui, repo, wctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1583 1586 )
1584 1587 if rename and not dryrun:
1585 1588 if not after and srcexists and not samefile:
1586 1589 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1587 1590 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1588 1591 wctx.forget([abssrc])
1589 1592
1590 1593 # pat: ossep
1591 1594 # dest ossep
1592 1595 # srcs: list of (hgsep, hgsep, ossep, bool)
1593 1596 # return: function that takes hgsep and returns ossep
1594 1597 def targetpathfn(pat, dest, srcs):
1595 1598 if os.path.isdir(pat):
1596 1599 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1597 1600 abspfx = util.localpath(abspfx)
1598 1601 if destdirexists:
1599 1602 striplen = len(os.path.split(abspfx)[0])
1600 1603 else:
1601 1604 striplen = len(abspfx)
1602 1605 if striplen:
1603 1606 striplen += len(pycompat.ossep)
1604 1607 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1605 1608 elif destdirexists:
1606 1609 res = lambda p: os.path.join(
1607 1610 dest, os.path.basename(util.localpath(p))
1608 1611 )
1609 1612 else:
1610 1613 res = lambda p: dest
1611 1614 return res
1612 1615
1613 1616 # pat: ossep
1614 1617 # dest ossep
1615 1618 # srcs: list of (hgsep, hgsep, ossep, bool)
1616 1619 # return: function that takes hgsep and returns ossep
1617 1620 def targetpathafterfn(pat, dest, srcs):
1618 1621 if matchmod.patkind(pat):
1619 1622 # a mercurial pattern
1620 1623 res = lambda p: os.path.join(
1621 1624 dest, os.path.basename(util.localpath(p))
1622 1625 )
1623 1626 else:
1624 1627 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1625 1628 if len(abspfx) < len(srcs[0][0]):
1626 1629 # A directory. Either the target path contains the last
1627 1630 # component of the source path or it does not.
1628 1631 def evalpath(striplen):
1629 1632 score = 0
1630 1633 for s in srcs:
1631 1634 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1632 1635 if os.path.lexists(t):
1633 1636 score += 1
1634 1637 return score
1635 1638
1636 1639 abspfx = util.localpath(abspfx)
1637 1640 striplen = len(abspfx)
1638 1641 if striplen:
1639 1642 striplen += len(pycompat.ossep)
1640 1643 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1641 1644 score = evalpath(striplen)
1642 1645 striplen1 = len(os.path.split(abspfx)[0])
1643 1646 if striplen1:
1644 1647 striplen1 += len(pycompat.ossep)
1645 1648 if evalpath(striplen1) > score:
1646 1649 striplen = striplen1
1647 1650 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1648 1651 else:
1649 1652 # a file
1650 1653 if destdirexists:
1651 1654 res = lambda p: os.path.join(
1652 1655 dest, os.path.basename(util.localpath(p))
1653 1656 )
1654 1657 else:
1655 1658 res = lambda p: dest
1656 1659 return res
1657 1660
1658 1661 pats = scmutil.expandpats(pats)
1659 1662 if not pats:
1660 1663 raise error.Abort(_(b'no source or destination specified'))
1661 1664 if len(pats) == 1:
1662 1665 raise error.Abort(_(b'no destination specified'))
1663 1666 dest = pats.pop()
1664 1667 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1665 1668 if not destdirexists:
1666 1669 if len(pats) > 1 or matchmod.patkind(pats[0]):
1667 1670 raise error.Abort(
1668 1671 _(
1669 1672 b'with multiple sources, destination must be an '
1670 1673 b'existing directory'
1671 1674 )
1672 1675 )
1673 1676 if util.endswithsep(dest):
1674 1677 raise error.Abort(_(b'destination %s is not a directory') % dest)
1675 1678
1676 1679 tfn = targetpathfn
1677 1680 if after:
1678 1681 tfn = targetpathafterfn
1679 1682 copylist = []
1680 1683 for pat in pats:
1681 1684 srcs = walkpat(pat)
1682 1685 if not srcs:
1683 1686 continue
1684 1687 copylist.append((tfn(pat, dest, srcs), srcs))
1685 1688 if not copylist:
1686 1689 raise error.Abort(_(b'no files to copy'))
1687 1690
1688 1691 errors = 0
1689 1692 for targetpath, srcs in copylist:
1690 1693 for abssrc, relsrc, exact in srcs:
1691 1694 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1692 1695 errors += 1
1693 1696
1694 1697 return errors != 0
1695 1698
1696 1699
1697 1700 ## facility to let extension process additional data into an import patch
1698 1701 # list of identifier to be executed in order
1699 1702 extrapreimport = [] # run before commit
1700 1703 extrapostimport = [] # run after commit
1701 1704 # mapping from identifier to actual import function
1702 1705 #
1703 1706 # 'preimport' are run before the commit is made and are provided the following
1704 1707 # arguments:
1705 1708 # - repo: the localrepository instance,
1706 1709 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1707 1710 # - extra: the future extra dictionary of the changeset, please mutate it,
1708 1711 # - opts: the import options.
1709 1712 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1710 1713 # mutation of in memory commit and more. Feel free to rework the code to get
1711 1714 # there.
1712 1715 extrapreimportmap = {}
1713 1716 # 'postimport' are run after the commit is made and are provided the following
1714 1717 # argument:
1715 1718 # - ctx: the changectx created by import.
1716 1719 extrapostimportmap = {}
1717 1720
1718 1721
1719 1722 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1720 1723 """Utility function used by commands.import to import a single patch
1721 1724
1722 1725 This function is explicitly defined here to help the evolve extension to
1723 1726 wrap this part of the import logic.
1724 1727
1725 1728 The API is currently a bit ugly because it a simple code translation from
1726 1729 the import command. Feel free to make it better.
1727 1730
1728 1731 :patchdata: a dictionary containing parsed patch data (such as from
1729 1732 ``patch.extract()``)
1730 1733 :parents: nodes that will be parent of the created commit
1731 1734 :opts: the full dict of option passed to the import command
1732 1735 :msgs: list to save commit message to.
1733 1736 (used in case we need to save it when failing)
1734 1737 :updatefunc: a function that update a repo to a given node
1735 1738 updatefunc(<repo>, <node>)
1736 1739 """
1737 1740 # avoid cycle context -> subrepo -> cmdutil
1738 1741 from . import context
1739 1742
1740 1743 tmpname = patchdata.get(b'filename')
1741 1744 message = patchdata.get(b'message')
1742 1745 user = opts.get(b'user') or patchdata.get(b'user')
1743 1746 date = opts.get(b'date') or patchdata.get(b'date')
1744 1747 branch = patchdata.get(b'branch')
1745 1748 nodeid = patchdata.get(b'nodeid')
1746 1749 p1 = patchdata.get(b'p1')
1747 1750 p2 = patchdata.get(b'p2')
1748 1751
1749 1752 nocommit = opts.get(b'no_commit')
1750 1753 importbranch = opts.get(b'import_branch')
1751 1754 update = not opts.get(b'bypass')
1752 1755 strip = opts[b"strip"]
1753 1756 prefix = opts[b"prefix"]
1754 1757 sim = float(opts.get(b'similarity') or 0)
1755 1758
1756 1759 if not tmpname:
1757 1760 return None, None, False
1758 1761
1759 1762 rejects = False
1760 1763
1761 1764 cmdline_message = logmessage(ui, opts)
1762 1765 if cmdline_message:
1763 1766 # pickup the cmdline msg
1764 1767 message = cmdline_message
1765 1768 elif message:
1766 1769 # pickup the patch msg
1767 1770 message = message.strip()
1768 1771 else:
1769 1772 # launch the editor
1770 1773 message = None
1771 1774 ui.debug(b'message:\n%s\n' % (message or b''))
1772 1775
1773 1776 if len(parents) == 1:
1774 1777 parents.append(repo[nullid])
1775 1778 if opts.get(b'exact'):
1776 1779 if not nodeid or not p1:
1777 1780 raise error.Abort(_(b'not a Mercurial patch'))
1778 1781 p1 = repo[p1]
1779 1782 p2 = repo[p2 or nullid]
1780 1783 elif p2:
1781 1784 try:
1782 1785 p1 = repo[p1]
1783 1786 p2 = repo[p2]
1784 1787 # Without any options, consider p2 only if the
1785 1788 # patch is being applied on top of the recorded
1786 1789 # first parent.
1787 1790 if p1 != parents[0]:
1788 1791 p1 = parents[0]
1789 1792 p2 = repo[nullid]
1790 1793 except error.RepoError:
1791 1794 p1, p2 = parents
1792 1795 if p2.node() == nullid:
1793 1796 ui.warn(
1794 1797 _(
1795 1798 b"warning: import the patch as a normal revision\n"
1796 1799 b"(use --exact to import the patch as a merge)\n"
1797 1800 )
1798 1801 )
1799 1802 else:
1800 1803 p1, p2 = parents
1801 1804
1802 1805 n = None
1803 1806 if update:
1804 1807 if p1 != parents[0]:
1805 1808 updatefunc(repo, p1.node())
1806 1809 if p2 != parents[1]:
1807 1810 repo.setparents(p1.node(), p2.node())
1808 1811
1809 1812 if opts.get(b'exact') or importbranch:
1810 1813 repo.dirstate.setbranch(branch or b'default')
1811 1814
1812 1815 partial = opts.get(b'partial', False)
1813 1816 files = set()
1814 1817 try:
1815 1818 patch.patch(
1816 1819 ui,
1817 1820 repo,
1818 1821 tmpname,
1819 1822 strip=strip,
1820 1823 prefix=prefix,
1821 1824 files=files,
1822 1825 eolmode=None,
1823 1826 similarity=sim / 100.0,
1824 1827 )
1825 1828 except error.PatchError as e:
1826 1829 if not partial:
1827 1830 raise error.Abort(pycompat.bytestr(e))
1828 1831 if partial:
1829 1832 rejects = True
1830 1833
1831 1834 files = list(files)
1832 1835 if nocommit:
1833 1836 if message:
1834 1837 msgs.append(message)
1835 1838 else:
1836 1839 if opts.get(b'exact') or p2:
1837 1840 # If you got here, you either use --force and know what
1838 1841 # you are doing or used --exact or a merge patch while
1839 1842 # being updated to its first parent.
1840 1843 m = None
1841 1844 else:
1842 1845 m = scmutil.matchfiles(repo, files or [])
1843 1846 editform = mergeeditform(repo[None], b'import.normal')
1844 1847 if opts.get(b'exact'):
1845 1848 editor = None
1846 1849 else:
1847 1850 editor = getcommiteditor(
1848 1851 editform=editform, **pycompat.strkwargs(opts)
1849 1852 )
1850 1853 extra = {}
1851 1854 for idfunc in extrapreimport:
1852 1855 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1853 1856 overrides = {}
1854 1857 if partial:
1855 1858 overrides[(b'ui', b'allowemptycommit')] = True
1856 1859 if opts.get(b'secret'):
1857 1860 overrides[(b'phases', b'new-commit')] = b'secret'
1858 1861 with repo.ui.configoverride(overrides, b'import'):
1859 1862 n = repo.commit(
1860 1863 message, user, date, match=m, editor=editor, extra=extra
1861 1864 )
1862 1865 for idfunc in extrapostimport:
1863 1866 extrapostimportmap[idfunc](repo[n])
1864 1867 else:
1865 1868 if opts.get(b'exact') or importbranch:
1866 1869 branch = branch or b'default'
1867 1870 else:
1868 1871 branch = p1.branch()
1869 1872 store = patch.filestore()
1870 1873 try:
1871 1874 files = set()
1872 1875 try:
1873 1876 patch.patchrepo(
1874 1877 ui,
1875 1878 repo,
1876 1879 p1,
1877 1880 store,
1878 1881 tmpname,
1879 1882 strip,
1880 1883 prefix,
1881 1884 files,
1882 1885 eolmode=None,
1883 1886 )
1884 1887 except error.PatchError as e:
1885 1888 raise error.Abort(stringutil.forcebytestr(e))
1886 1889 if opts.get(b'exact'):
1887 1890 editor = None
1888 1891 else:
1889 1892 editor = getcommiteditor(editform=b'import.bypass')
1890 1893 memctx = context.memctx(
1891 1894 repo,
1892 1895 (p1.node(), p2.node()),
1893 1896 message,
1894 1897 files=files,
1895 1898 filectxfn=store,
1896 1899 user=user,
1897 1900 date=date,
1898 1901 branch=branch,
1899 1902 editor=editor,
1900 1903 )
1901 1904 n = memctx.commit()
1902 1905 finally:
1903 1906 store.close()
1904 1907 if opts.get(b'exact') and nocommit:
1905 1908 # --exact with --no-commit is still useful in that it does merge
1906 1909 # and branch bits
1907 1910 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
1908 1911 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
1909 1912 raise error.Abort(_(b'patch is damaged or loses information'))
1910 1913 msg = _(b'applied to working directory')
1911 1914 if n:
1912 1915 # i18n: refers to a short changeset id
1913 1916 msg = _(b'created %s') % short(n)
1914 1917 return msg, n, rejects
1915 1918
1916 1919
1917 1920 # facility to let extensions include additional data in an exported patch
1918 1921 # list of identifiers to be executed in order
1919 1922 extraexport = []
1920 1923 # mapping from identifier to actual export function
1921 1924 # function as to return a string to be added to the header or None
1922 1925 # it is given two arguments (sequencenumber, changectx)
1923 1926 extraexportmap = {}
1924 1927
1925 1928
1926 1929 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
1927 1930 node = scmutil.binnode(ctx)
1928 1931 parents = [p.node() for p in ctx.parents() if p]
1929 1932 branch = ctx.branch()
1930 1933 if switch_parent:
1931 1934 parents.reverse()
1932 1935
1933 1936 if parents:
1934 1937 prev = parents[0]
1935 1938 else:
1936 1939 prev = nullid
1937 1940
1938 1941 fm.context(ctx=ctx)
1939 1942 fm.plain(b'# HG changeset patch\n')
1940 1943 fm.write(b'user', b'# User %s\n', ctx.user())
1941 1944 fm.plain(b'# Date %d %d\n' % ctx.date())
1942 1945 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
1943 1946 fm.condwrite(
1944 1947 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
1945 1948 )
1946 1949 fm.write(b'node', b'# Node ID %s\n', hex(node))
1947 1950 fm.plain(b'# Parent %s\n' % hex(prev))
1948 1951 if len(parents) > 1:
1949 1952 fm.plain(b'# Parent %s\n' % hex(parents[1]))
1950 1953 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
1951 1954
1952 1955 # TODO: redesign extraexportmap function to support formatter
1953 1956 for headerid in extraexport:
1954 1957 header = extraexportmap[headerid](seqno, ctx)
1955 1958 if header is not None:
1956 1959 fm.plain(b'# %s\n' % header)
1957 1960
1958 1961 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
1959 1962 fm.plain(b'\n')
1960 1963
1961 1964 if fm.isplain():
1962 1965 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
1963 1966 for chunk, label in chunkiter:
1964 1967 fm.plain(chunk, label=label)
1965 1968 else:
1966 1969 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
1967 1970 # TODO: make it structured?
1968 1971 fm.data(diff=b''.join(chunkiter))
1969 1972
1970 1973
1971 1974 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
1972 1975 """Export changesets to stdout or a single file"""
1973 1976 for seqno, rev in enumerate(revs, 1):
1974 1977 ctx = repo[rev]
1975 1978 if not dest.startswith(b'<'):
1976 1979 repo.ui.note(b"%s\n" % dest)
1977 1980 fm.startitem()
1978 1981 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
1979 1982
1980 1983
1981 1984 def _exportfntemplate(
1982 1985 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
1983 1986 ):
1984 1987 """Export changesets to possibly multiple files"""
1985 1988 total = len(revs)
1986 1989 revwidth = max(len(str(rev)) for rev in revs)
1987 1990 filemap = util.sortdict() # filename: [(seqno, rev), ...]
1988 1991
1989 1992 for seqno, rev in enumerate(revs, 1):
1990 1993 ctx = repo[rev]
1991 1994 dest = makefilename(
1992 1995 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
1993 1996 )
1994 1997 filemap.setdefault(dest, []).append((seqno, rev))
1995 1998
1996 1999 for dest in filemap:
1997 2000 with formatter.maybereopen(basefm, dest) as fm:
1998 2001 repo.ui.note(b"%s\n" % dest)
1999 2002 for seqno, rev in filemap[dest]:
2000 2003 fm.startitem()
2001 2004 ctx = repo[rev]
2002 2005 _exportsingle(
2003 2006 repo, ctx, fm, match, switch_parent, seqno, diffopts
2004 2007 )
2005 2008
2006 2009
2007 2010 def _prefetchchangedfiles(repo, revs, match):
2008 2011 allfiles = set()
2009 2012 for rev in revs:
2010 2013 for file in repo[rev].files():
2011 2014 if not match or match(file):
2012 2015 allfiles.add(file)
2013 2016 scmutil.prefetchfiles(repo, revs, scmutil.matchfiles(repo, allfiles))
2014 2017
2015 2018
2016 2019 def export(
2017 2020 repo,
2018 2021 revs,
2019 2022 basefm,
2020 2023 fntemplate=b'hg-%h.patch',
2021 2024 switch_parent=False,
2022 2025 opts=None,
2023 2026 match=None,
2024 2027 ):
2025 2028 '''export changesets as hg patches
2026 2029
2027 2030 Args:
2028 2031 repo: The repository from which we're exporting revisions.
2029 2032 revs: A list of revisions to export as revision numbers.
2030 2033 basefm: A formatter to which patches should be written.
2031 2034 fntemplate: An optional string to use for generating patch file names.
2032 2035 switch_parent: If True, show diffs against second parent when not nullid.
2033 2036 Default is false, which always shows diff against p1.
2034 2037 opts: diff options to use for generating the patch.
2035 2038 match: If specified, only export changes to files matching this matcher.
2036 2039
2037 2040 Returns:
2038 2041 Nothing.
2039 2042
2040 2043 Side Effect:
2041 2044 "HG Changeset Patch" data is emitted to one of the following
2042 2045 destinations:
2043 2046 fntemplate specified: Each rev is written to a unique file named using
2044 2047 the given template.
2045 2048 Otherwise: All revs will be written to basefm.
2046 2049 '''
2047 2050 _prefetchchangedfiles(repo, revs, match)
2048 2051
2049 2052 if not fntemplate:
2050 2053 _exportfile(
2051 2054 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2052 2055 )
2053 2056 else:
2054 2057 _exportfntemplate(
2055 2058 repo, revs, basefm, fntemplate, switch_parent, opts, match
2056 2059 )
2057 2060
2058 2061
2059 2062 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2060 2063 """Export changesets to the given file stream"""
2061 2064 _prefetchchangedfiles(repo, revs, match)
2062 2065
2063 2066 dest = getattr(fp, 'name', b'<unnamed>')
2064 2067 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2065 2068 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2066 2069
2067 2070
2068 2071 def showmarker(fm, marker, index=None):
2069 2072 """utility function to display obsolescence marker in a readable way
2070 2073
2071 2074 To be used by debug function."""
2072 2075 if index is not None:
2073 2076 fm.write(b'index', b'%i ', index)
2074 2077 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2075 2078 succs = marker.succnodes()
2076 2079 fm.condwrite(
2077 2080 succs,
2078 2081 b'succnodes',
2079 2082 b'%s ',
2080 2083 fm.formatlist(map(hex, succs), name=b'node'),
2081 2084 )
2082 2085 fm.write(b'flag', b'%X ', marker.flags())
2083 2086 parents = marker.parentnodes()
2084 2087 if parents is not None:
2085 2088 fm.write(
2086 2089 b'parentnodes',
2087 2090 b'{%s} ',
2088 2091 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2089 2092 )
2090 2093 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2091 2094 meta = marker.metadata().copy()
2092 2095 meta.pop(b'date', None)
2093 2096 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2094 2097 fm.write(
2095 2098 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2096 2099 )
2097 2100 fm.plain(b'\n')
2098 2101
2099 2102
2100 2103 def finddate(ui, repo, date):
2101 2104 """Find the tipmost changeset that matches the given date spec"""
2102 2105
2103 2106 df = dateutil.matchdate(date)
2104 2107 m = scmutil.matchall(repo)
2105 2108 results = {}
2106 2109
2107 2110 def prep(ctx, fns):
2108 2111 d = ctx.date()
2109 2112 if df(d[0]):
2110 2113 results[ctx.rev()] = d
2111 2114
2112 2115 for ctx in walkchangerevs(repo, m, {b'rev': None}, prep):
2113 2116 rev = ctx.rev()
2114 2117 if rev in results:
2115 2118 ui.status(
2116 2119 _(b"found revision %d from %s\n")
2117 2120 % (rev, dateutil.datestr(results[rev]))
2118 2121 )
2119 2122 return b'%d' % rev
2120 2123
2121 2124 raise error.Abort(_(b"revision matching date not found"))
2122 2125
2123 2126
2124 2127 def increasingwindows(windowsize=8, sizelimit=512):
2125 2128 while True:
2126 2129 yield windowsize
2127 2130 if windowsize < sizelimit:
2128 2131 windowsize *= 2
2129 2132
2130 2133
2131 2134 def _walkrevs(repo, opts):
2132 2135 # Default --rev value depends on --follow but --follow behavior
2133 2136 # depends on revisions resolved from --rev...
2134 2137 follow = opts.get(b'follow') or opts.get(b'follow_first')
2135 2138 if opts.get(b'rev'):
2136 2139 revs = scmutil.revrange(repo, opts[b'rev'])
2137 2140 elif follow and repo.dirstate.p1() == nullid:
2138 2141 revs = smartset.baseset()
2139 2142 elif follow:
2140 2143 revs = repo.revs(b'reverse(:.)')
2141 2144 else:
2142 2145 revs = smartset.spanset(repo)
2143 2146 revs.reverse()
2144 2147 return revs
2145 2148
2146 2149
2147 2150 class FileWalkError(Exception):
2148 2151 pass
2149 2152
2150 2153
2151 2154 def walkfilerevs(repo, match, follow, revs, fncache):
2152 2155 '''Walks the file history for the matched files.
2153 2156
2154 2157 Returns the changeset revs that are involved in the file history.
2155 2158
2156 2159 Throws FileWalkError if the file history can't be walked using
2157 2160 filelogs alone.
2158 2161 '''
2159 2162 wanted = set()
2160 2163 copies = []
2161 2164 minrev, maxrev = min(revs), max(revs)
2162 2165
2163 2166 def filerevs(filelog, last):
2164 2167 """
2165 2168 Only files, no patterns. Check the history of each file.
2166 2169
2167 2170 Examines filelog entries within minrev, maxrev linkrev range
2168 2171 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2169 2172 tuples in backwards order
2170 2173 """
2171 2174 cl_count = len(repo)
2172 2175 revs = []
2173 2176 for j in pycompat.xrange(0, last + 1):
2174 2177 linkrev = filelog.linkrev(j)
2175 2178 if linkrev < minrev:
2176 2179 continue
2177 2180 # only yield rev for which we have the changelog, it can
2178 2181 # happen while doing "hg log" during a pull or commit
2179 2182 if linkrev >= cl_count:
2180 2183 break
2181 2184
2182 2185 parentlinkrevs = []
2183 2186 for p in filelog.parentrevs(j):
2184 2187 if p != nullrev:
2185 2188 parentlinkrevs.append(filelog.linkrev(p))
2186 2189 n = filelog.node(j)
2187 2190 revs.append(
2188 2191 (linkrev, parentlinkrevs, follow and filelog.renamed(n))
2189 2192 )
2190 2193
2191 2194 return reversed(revs)
2192 2195
2193 2196 def iterfiles():
2194 2197 pctx = repo[b'.']
2195 2198 for filename in match.files():
2196 2199 if follow:
2197 2200 if filename not in pctx:
2198 2201 raise error.Abort(
2199 2202 _(
2200 2203 b'cannot follow file not in parent '
2201 2204 b'revision: "%s"'
2202 2205 )
2203 2206 % filename
2204 2207 )
2205 2208 yield filename, pctx[filename].filenode()
2206 2209 else:
2207 2210 yield filename, None
2208 2211 for filename_node in copies:
2209 2212 yield filename_node
2210 2213
2211 2214 for file_, node in iterfiles():
2212 2215 filelog = repo.file(file_)
2213 2216 if not len(filelog):
2214 2217 if node is None:
2215 2218 # A zero count may be a directory or deleted file, so
2216 2219 # try to find matching entries on the slow path.
2217 2220 if follow:
2218 2221 raise error.Abort(
2219 2222 _(b'cannot follow nonexistent file: "%s"') % file_
2220 2223 )
2221 2224 raise FileWalkError(b"Cannot walk via filelog")
2222 2225 else:
2223 2226 continue
2224 2227
2225 2228 if node is None:
2226 2229 last = len(filelog) - 1
2227 2230 else:
2228 2231 last = filelog.rev(node)
2229 2232
2230 2233 # keep track of all ancestors of the file
2231 2234 ancestors = {filelog.linkrev(last)}
2232 2235
2233 2236 # iterate from latest to oldest revision
2234 2237 for rev, flparentlinkrevs, copied in filerevs(filelog, last):
2235 2238 if not follow:
2236 2239 if rev > maxrev:
2237 2240 continue
2238 2241 else:
2239 2242 # Note that last might not be the first interesting
2240 2243 # rev to us:
2241 2244 # if the file has been changed after maxrev, we'll
2242 2245 # have linkrev(last) > maxrev, and we still need
2243 2246 # to explore the file graph
2244 2247 if rev not in ancestors:
2245 2248 continue
2246 2249 # XXX insert 1327 fix here
2247 2250 if flparentlinkrevs:
2248 2251 ancestors.update(flparentlinkrevs)
2249 2252
2250 2253 fncache.setdefault(rev, []).append(file_)
2251 2254 wanted.add(rev)
2252 2255 if copied:
2253 2256 copies.append(copied)
2254 2257
2255 2258 return wanted
2256 2259
2257 2260
2258 2261 class _followfilter(object):
2259 2262 def __init__(self, repo, onlyfirst=False):
2260 2263 self.repo = repo
2261 2264 self.startrev = nullrev
2262 2265 self.roots = set()
2263 2266 self.onlyfirst = onlyfirst
2264 2267
2265 2268 def match(self, rev):
2266 2269 def realparents(rev):
2267 2270 if self.onlyfirst:
2268 2271 return self.repo.changelog.parentrevs(rev)[0:1]
2269 2272 else:
2270 2273 return filter(
2271 2274 lambda x: x != nullrev, self.repo.changelog.parentrevs(rev)
2272 2275 )
2273 2276
2274 2277 if self.startrev == nullrev:
2275 2278 self.startrev = rev
2276 2279 return True
2277 2280
2278 2281 if rev > self.startrev:
2279 2282 # forward: all descendants
2280 2283 if not self.roots:
2281 2284 self.roots.add(self.startrev)
2282 2285 for parent in realparents(rev):
2283 2286 if parent in self.roots:
2284 2287 self.roots.add(rev)
2285 2288 return True
2286 2289 else:
2287 2290 # backwards: all parents
2288 2291 if not self.roots:
2289 2292 self.roots.update(realparents(self.startrev))
2290 2293 if rev in self.roots:
2291 2294 self.roots.remove(rev)
2292 2295 self.roots.update(realparents(rev))
2293 2296 return True
2294 2297
2295 2298 return False
2296 2299
2297 2300
2298 2301 def walkchangerevs(repo, match, opts, prepare):
2299 2302 '''Iterate over files and the revs in which they changed.
2300 2303
2301 2304 Callers most commonly need to iterate backwards over the history
2302 2305 in which they are interested. Doing so has awful (quadratic-looking)
2303 2306 performance, so we use iterators in a "windowed" way.
2304 2307
2305 2308 We walk a window of revisions in the desired order. Within the
2306 2309 window, we first walk forwards to gather data, then in the desired
2307 2310 order (usually backwards) to display it.
2308 2311
2309 2312 This function returns an iterator yielding contexts. Before
2310 2313 yielding each context, the iterator will first call the prepare
2311 2314 function on each context in the window in forward order.'''
2312 2315
2313 2316 allfiles = opts.get(b'all_files')
2314 2317 follow = opts.get(b'follow') or opts.get(b'follow_first')
2315 2318 revs = _walkrevs(repo, opts)
2316 2319 if not revs:
2317 2320 return []
2318 2321 wanted = set()
2319 2322 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
2320 2323 fncache = {}
2321 2324 change = repo.__getitem__
2322 2325
2323 2326 # First step is to fill wanted, the set of revisions that we want to yield.
2324 2327 # When it does not induce extra cost, we also fill fncache for revisions in
2325 2328 # wanted: a cache of filenames that were changed (ctx.files()) and that
2326 2329 # match the file filtering conditions.
2327 2330
2328 2331 if match.always() or allfiles:
2329 2332 # No files, no patterns. Display all revs.
2330 2333 wanted = revs
2331 2334 elif not slowpath:
2332 2335 # We only have to read through the filelog to find wanted revisions
2333 2336
2334 2337 try:
2335 2338 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2336 2339 except FileWalkError:
2337 2340 slowpath = True
2338 2341
2339 2342 # We decided to fall back to the slowpath because at least one
2340 2343 # of the paths was not a file. Check to see if at least one of them
2341 2344 # existed in history, otherwise simply return
2342 2345 for path in match.files():
2343 2346 if path == b'.' or path in repo.store:
2344 2347 break
2345 2348 else:
2346 2349 return []
2347 2350
2348 2351 if slowpath:
2349 2352 # We have to read the changelog to match filenames against
2350 2353 # changed files
2351 2354
2352 2355 if follow:
2353 2356 raise error.Abort(
2354 2357 _(b'can only follow copies/renames for explicit filenames')
2355 2358 )
2356 2359
2357 2360 # The slow path checks files modified in every changeset.
2358 2361 # This is really slow on large repos, so compute the set lazily.
2359 2362 class lazywantedset(object):
2360 2363 def __init__(self):
2361 2364 self.set = set()
2362 2365 self.revs = set(revs)
2363 2366
2364 2367 # No need to worry about locality here because it will be accessed
2365 2368 # in the same order as the increasing window below.
2366 2369 def __contains__(self, value):
2367 2370 if value in self.set:
2368 2371 return True
2369 2372 elif not value in self.revs:
2370 2373 return False
2371 2374 else:
2372 2375 self.revs.discard(value)
2373 2376 ctx = change(value)
2374 2377 if allfiles:
2375 2378 matches = list(ctx.manifest().walk(match))
2376 2379 else:
2377 2380 matches = [f for f in ctx.files() if match(f)]
2378 2381 if matches:
2379 2382 fncache[value] = matches
2380 2383 self.set.add(value)
2381 2384 return True
2382 2385 return False
2383 2386
2384 2387 def discard(self, value):
2385 2388 self.revs.discard(value)
2386 2389 self.set.discard(value)
2387 2390
2388 2391 wanted = lazywantedset()
2389 2392
2390 2393 # it might be worthwhile to do this in the iterator if the rev range
2391 2394 # is descending and the prune args are all within that range
2392 2395 for rev in opts.get(b'prune', ()):
2393 2396 rev = repo[rev].rev()
2394 2397 ff = _followfilter(repo)
2395 2398 stop = min(revs[0], revs[-1])
2396 2399 for x in pycompat.xrange(rev, stop - 1, -1):
2397 2400 if ff.match(x):
2398 2401 wanted = wanted - [x]
2399 2402
2400 2403 # Now that wanted is correctly initialized, we can iterate over the
2401 2404 # revision range, yielding only revisions in wanted.
2402 2405 def iterate():
2403 2406 if follow and match.always():
2404 2407 ff = _followfilter(repo, onlyfirst=opts.get(b'follow_first'))
2405 2408
2406 2409 def want(rev):
2407 2410 return ff.match(rev) and rev in wanted
2408 2411
2409 2412 else:
2410 2413
2411 2414 def want(rev):
2412 2415 return rev in wanted
2413 2416
2414 2417 it = iter(revs)
2415 2418 stopiteration = False
2416 2419 for windowsize in increasingwindows():
2417 2420 nrevs = []
2418 2421 for i in pycompat.xrange(windowsize):
2419 2422 rev = next(it, None)
2420 2423 if rev is None:
2421 2424 stopiteration = True
2422 2425 break
2423 2426 elif want(rev):
2424 2427 nrevs.append(rev)
2425 2428 for rev in sorted(nrevs):
2426 2429 fns = fncache.get(rev)
2427 2430 ctx = change(rev)
2428 2431 if not fns:
2429 2432
2430 2433 def fns_generator():
2431 2434 if allfiles:
2432 2435
2433 2436 def bad(f, msg):
2434 2437 pass
2435 2438
2436 2439 for f in ctx.matches(matchmod.badmatch(match, bad)):
2437 2440 yield f
2438 2441 else:
2439 2442 for f in ctx.files():
2440 2443 if match(f):
2441 2444 yield f
2442 2445
2443 2446 fns = fns_generator()
2444 2447 prepare(ctx, fns)
2445 2448 for rev in nrevs:
2446 2449 yield change(rev)
2447 2450
2448 2451 if stopiteration:
2449 2452 break
2450 2453
2451 2454 return iterate()
2452 2455
2453 2456
2454 2457 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2455 2458 bad = []
2456 2459
2457 2460 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2458 2461 names = []
2459 2462 wctx = repo[None]
2460 2463 cca = None
2461 2464 abort, warn = scmutil.checkportabilityalert(ui)
2462 2465 if abort or warn:
2463 2466 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2464 2467
2465 2468 match = repo.narrowmatch(match, includeexact=True)
2466 2469 badmatch = matchmod.badmatch(match, badfn)
2467 2470 dirstate = repo.dirstate
2468 2471 # We don't want to just call wctx.walk here, since it would return a lot of
2469 2472 # clean files, which we aren't interested in and takes time.
2470 2473 for f in sorted(
2471 2474 dirstate.walk(
2472 2475 badmatch,
2473 2476 subrepos=sorted(wctx.substate),
2474 2477 unknown=True,
2475 2478 ignored=False,
2476 2479 full=False,
2477 2480 )
2478 2481 ):
2479 2482 exact = match.exact(f)
2480 2483 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2481 2484 if cca:
2482 2485 cca(f)
2483 2486 names.append(f)
2484 2487 if ui.verbose or not exact:
2485 2488 ui.status(
2486 2489 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2487 2490 )
2488 2491
2489 2492 for subpath in sorted(wctx.substate):
2490 2493 sub = wctx.sub(subpath)
2491 2494 try:
2492 2495 submatch = matchmod.subdirmatcher(subpath, match)
2493 2496 subprefix = repo.wvfs.reljoin(prefix, subpath)
2494 2497 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2495 2498 if opts.get('subrepos'):
2496 2499 bad.extend(
2497 2500 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2498 2501 )
2499 2502 else:
2500 2503 bad.extend(
2501 2504 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2502 2505 )
2503 2506 except error.LookupError:
2504 2507 ui.status(
2505 2508 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2506 2509 )
2507 2510
2508 2511 if not opts.get('dry_run'):
2509 2512 rejected = wctx.add(names, prefix)
2510 2513 bad.extend(f for f in rejected if f in match.files())
2511 2514 return bad
2512 2515
2513 2516
2514 2517 def addwebdirpath(repo, serverpath, webconf):
2515 2518 webconf[serverpath] = repo.root
2516 2519 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2517 2520
2518 2521 for r in repo.revs(b'filelog("path:.hgsub")'):
2519 2522 ctx = repo[r]
2520 2523 for subpath in ctx.substate:
2521 2524 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2522 2525
2523 2526
2524 2527 def forget(
2525 2528 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2526 2529 ):
2527 2530 if dryrun and interactive:
2528 2531 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2529 2532 bad = []
2530 2533 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2531 2534 wctx = repo[None]
2532 2535 forgot = []
2533 2536
2534 2537 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2535 2538 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2536 2539 if explicitonly:
2537 2540 forget = [f for f in forget if match.exact(f)]
2538 2541
2539 2542 for subpath in sorted(wctx.substate):
2540 2543 sub = wctx.sub(subpath)
2541 2544 submatch = matchmod.subdirmatcher(subpath, match)
2542 2545 subprefix = repo.wvfs.reljoin(prefix, subpath)
2543 2546 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2544 2547 try:
2545 2548 subbad, subforgot = sub.forget(
2546 2549 submatch,
2547 2550 subprefix,
2548 2551 subuipathfn,
2549 2552 dryrun=dryrun,
2550 2553 interactive=interactive,
2551 2554 )
2552 2555 bad.extend([subpath + b'/' + f for f in subbad])
2553 2556 forgot.extend([subpath + b'/' + f for f in subforgot])
2554 2557 except error.LookupError:
2555 2558 ui.status(
2556 2559 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2557 2560 )
2558 2561
2559 2562 if not explicitonly:
2560 2563 for f in match.files():
2561 2564 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2562 2565 if f not in forgot:
2563 2566 if repo.wvfs.exists(f):
2564 2567 # Don't complain if the exact case match wasn't given.
2565 2568 # But don't do this until after checking 'forgot', so
2566 2569 # that subrepo files aren't normalized, and this op is
2567 2570 # purely from data cached by the status walk above.
2568 2571 if repo.dirstate.normalize(f) in repo.dirstate:
2569 2572 continue
2570 2573 ui.warn(
2571 2574 _(
2572 2575 b'not removing %s: '
2573 2576 b'file is already untracked\n'
2574 2577 )
2575 2578 % uipathfn(f)
2576 2579 )
2577 2580 bad.append(f)
2578 2581
2579 2582 if interactive:
2580 2583 responses = _(
2581 2584 b'[Ynsa?]'
2582 2585 b'$$ &Yes, forget this file'
2583 2586 b'$$ &No, skip this file'
2584 2587 b'$$ &Skip remaining files'
2585 2588 b'$$ Include &all remaining files'
2586 2589 b'$$ &? (display help)'
2587 2590 )
2588 2591 for filename in forget[:]:
2589 2592 r = ui.promptchoice(
2590 2593 _(b'forget %s %s') % (uipathfn(filename), responses)
2591 2594 )
2592 2595 if r == 4: # ?
2593 2596 while r == 4:
2594 2597 for c, t in ui.extractchoices(responses)[1]:
2595 2598 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2596 2599 r = ui.promptchoice(
2597 2600 _(b'forget %s %s') % (uipathfn(filename), responses)
2598 2601 )
2599 2602 if r == 0: # yes
2600 2603 continue
2601 2604 elif r == 1: # no
2602 2605 forget.remove(filename)
2603 2606 elif r == 2: # Skip
2604 2607 fnindex = forget.index(filename)
2605 2608 del forget[fnindex:]
2606 2609 break
2607 2610 elif r == 3: # All
2608 2611 break
2609 2612
2610 2613 for f in forget:
2611 2614 if ui.verbose or not match.exact(f) or interactive:
2612 2615 ui.status(
2613 2616 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2614 2617 )
2615 2618
2616 2619 if not dryrun:
2617 2620 rejected = wctx.forget(forget, prefix)
2618 2621 bad.extend(f for f in rejected if f in match.files())
2619 2622 forgot.extend(f for f in forget if f not in rejected)
2620 2623 return bad, forgot
2621 2624
2622 2625
2623 2626 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2624 2627 ret = 1
2625 2628
2626 2629 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2627 2630 for f in ctx.matches(m):
2628 2631 fm.startitem()
2629 2632 fm.context(ctx=ctx)
2630 2633 if needsfctx:
2631 2634 fc = ctx[f]
2632 2635 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2633 2636 fm.data(path=f)
2634 2637 fm.plain(fmt % uipathfn(f))
2635 2638 ret = 0
2636 2639
2637 2640 for subpath in sorted(ctx.substate):
2638 2641 submatch = matchmod.subdirmatcher(subpath, m)
2639 2642 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2640 2643 if subrepos or m.exact(subpath) or any(submatch.files()):
2641 2644 sub = ctx.sub(subpath)
2642 2645 try:
2643 2646 recurse = m.exact(subpath) or subrepos
2644 2647 if (
2645 2648 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2646 2649 == 0
2647 2650 ):
2648 2651 ret = 0
2649 2652 except error.LookupError:
2650 2653 ui.status(
2651 2654 _(b"skipping missing subrepository: %s\n")
2652 2655 % uipathfn(subpath)
2653 2656 )
2654 2657
2655 2658 return ret
2656 2659
2657 2660
2658 2661 def remove(
2659 2662 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2660 2663 ):
2661 2664 ret = 0
2662 2665 s = repo.status(match=m, clean=True)
2663 2666 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2664 2667
2665 2668 wctx = repo[None]
2666 2669
2667 2670 if warnings is None:
2668 2671 warnings = []
2669 2672 warn = True
2670 2673 else:
2671 2674 warn = False
2672 2675
2673 2676 subs = sorted(wctx.substate)
2674 2677 progress = ui.makeprogress(
2675 2678 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2676 2679 )
2677 2680 for subpath in subs:
2678 2681 submatch = matchmod.subdirmatcher(subpath, m)
2679 2682 subprefix = repo.wvfs.reljoin(prefix, subpath)
2680 2683 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2681 2684 if subrepos or m.exact(subpath) or any(submatch.files()):
2682 2685 progress.increment()
2683 2686 sub = wctx.sub(subpath)
2684 2687 try:
2685 2688 if sub.removefiles(
2686 2689 submatch,
2687 2690 subprefix,
2688 2691 subuipathfn,
2689 2692 after,
2690 2693 force,
2691 2694 subrepos,
2692 2695 dryrun,
2693 2696 warnings,
2694 2697 ):
2695 2698 ret = 1
2696 2699 except error.LookupError:
2697 2700 warnings.append(
2698 2701 _(b"skipping missing subrepository: %s\n")
2699 2702 % uipathfn(subpath)
2700 2703 )
2701 2704 progress.complete()
2702 2705
2703 2706 # warn about failure to delete explicit files/dirs
2704 2707 deleteddirs = pathutil.dirs(deleted)
2705 2708 files = m.files()
2706 2709 progress = ui.makeprogress(
2707 2710 _(b'deleting'), total=len(files), unit=_(b'files')
2708 2711 )
2709 2712 for f in files:
2710 2713
2711 2714 def insubrepo():
2712 2715 for subpath in wctx.substate:
2713 2716 if f.startswith(subpath + b'/'):
2714 2717 return True
2715 2718 return False
2716 2719
2717 2720 progress.increment()
2718 2721 isdir = f in deleteddirs or wctx.hasdir(f)
2719 2722 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2720 2723 continue
2721 2724
2722 2725 if repo.wvfs.exists(f):
2723 2726 if repo.wvfs.isdir(f):
2724 2727 warnings.append(
2725 2728 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2726 2729 )
2727 2730 else:
2728 2731 warnings.append(
2729 2732 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2730 2733 )
2731 2734 # missing files will generate a warning elsewhere
2732 2735 ret = 1
2733 2736 progress.complete()
2734 2737
2735 2738 if force:
2736 2739 list = modified + deleted + clean + added
2737 2740 elif after:
2738 2741 list = deleted
2739 2742 remaining = modified + added + clean
2740 2743 progress = ui.makeprogress(
2741 2744 _(b'skipping'), total=len(remaining), unit=_(b'files')
2742 2745 )
2743 2746 for f in remaining:
2744 2747 progress.increment()
2745 2748 if ui.verbose or (f in files):
2746 2749 warnings.append(
2747 2750 _(b'not removing %s: file still exists\n') % uipathfn(f)
2748 2751 )
2749 2752 ret = 1
2750 2753 progress.complete()
2751 2754 else:
2752 2755 list = deleted + clean
2753 2756 progress = ui.makeprogress(
2754 2757 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2755 2758 )
2756 2759 for f in modified:
2757 2760 progress.increment()
2758 2761 warnings.append(
2759 2762 _(
2760 2763 b'not removing %s: file is modified (use -f'
2761 2764 b' to force removal)\n'
2762 2765 )
2763 2766 % uipathfn(f)
2764 2767 )
2765 2768 ret = 1
2766 2769 for f in added:
2767 2770 progress.increment()
2768 2771 warnings.append(
2769 2772 _(
2770 2773 b"not removing %s: file has been marked for add"
2771 2774 b" (use 'hg forget' to undo add)\n"
2772 2775 )
2773 2776 % uipathfn(f)
2774 2777 )
2775 2778 ret = 1
2776 2779 progress.complete()
2777 2780
2778 2781 list = sorted(list)
2779 2782 progress = ui.makeprogress(
2780 2783 _(b'deleting'), total=len(list), unit=_(b'files')
2781 2784 )
2782 2785 for f in list:
2783 2786 if ui.verbose or not m.exact(f):
2784 2787 progress.increment()
2785 2788 ui.status(
2786 2789 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2787 2790 )
2788 2791 progress.complete()
2789 2792
2790 2793 if not dryrun:
2791 2794 with repo.wlock():
2792 2795 if not after:
2793 2796 for f in list:
2794 2797 if f in added:
2795 2798 continue # we never unlink added files on remove
2796 2799 rmdir = repo.ui.configbool(
2797 2800 b'experimental', b'removeemptydirs'
2798 2801 )
2799 2802 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2800 2803 repo[None].forget(list)
2801 2804
2802 2805 if warn:
2803 2806 for warning in warnings:
2804 2807 ui.warn(warning)
2805 2808
2806 2809 return ret
2807 2810
2808 2811
2809 2812 def _catfmtneedsdata(fm):
2810 2813 return not fm.datahint() or b'data' in fm.datahint()
2811 2814
2812 2815
2813 2816 def _updatecatformatter(fm, ctx, matcher, path, decode):
2814 2817 """Hook for adding data to the formatter used by ``hg cat``.
2815 2818
2816 2819 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2817 2820 this method first."""
2818 2821
2819 2822 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2820 2823 # wasn't requested.
2821 2824 data = b''
2822 2825 if _catfmtneedsdata(fm):
2823 2826 data = ctx[path].data()
2824 2827 if decode:
2825 2828 data = ctx.repo().wwritedata(path, data)
2826 2829 fm.startitem()
2827 2830 fm.context(ctx=ctx)
2828 2831 fm.write(b'data', b'%s', data)
2829 2832 fm.data(path=path)
2830 2833
2831 2834
2832 2835 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2833 2836 err = 1
2834 2837 opts = pycompat.byteskwargs(opts)
2835 2838
2836 2839 def write(path):
2837 2840 filename = None
2838 2841 if fntemplate:
2839 2842 filename = makefilename(
2840 2843 ctx, fntemplate, pathname=os.path.join(prefix, path)
2841 2844 )
2842 2845 # attempt to create the directory if it does not already exist
2843 2846 try:
2844 2847 os.makedirs(os.path.dirname(filename))
2845 2848 except OSError:
2846 2849 pass
2847 2850 with formatter.maybereopen(basefm, filename) as fm:
2848 2851 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2849 2852
2850 2853 # Automation often uses hg cat on single files, so special case it
2851 2854 # for performance to avoid the cost of parsing the manifest.
2852 2855 if len(matcher.files()) == 1 and not matcher.anypats():
2853 2856 file = matcher.files()[0]
2854 2857 mfl = repo.manifestlog
2855 2858 mfnode = ctx.manifestnode()
2856 2859 try:
2857 2860 if mfnode and mfl[mfnode].find(file)[0]:
2858 2861 if _catfmtneedsdata(basefm):
2859 2862 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2860 2863 write(file)
2861 2864 return 0
2862 2865 except KeyError:
2863 2866 pass
2864 2867
2865 2868 if _catfmtneedsdata(basefm):
2866 2869 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2867 2870
2868 2871 for abs in ctx.walk(matcher):
2869 2872 write(abs)
2870 2873 err = 0
2871 2874
2872 2875 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2873 2876 for subpath in sorted(ctx.substate):
2874 2877 sub = ctx.sub(subpath)
2875 2878 try:
2876 2879 submatch = matchmod.subdirmatcher(subpath, matcher)
2877 2880 subprefix = os.path.join(prefix, subpath)
2878 2881 if not sub.cat(
2879 2882 submatch,
2880 2883 basefm,
2881 2884 fntemplate,
2882 2885 subprefix,
2883 2886 **pycompat.strkwargs(opts)
2884 2887 ):
2885 2888 err = 0
2886 2889 except error.RepoLookupError:
2887 2890 ui.status(
2888 2891 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2889 2892 )
2890 2893
2891 2894 return err
2892 2895
2893 2896
2894 2897 def commit(ui, repo, commitfunc, pats, opts):
2895 2898 '''commit the specified files or all outstanding changes'''
2896 2899 date = opts.get(b'date')
2897 2900 if date:
2898 2901 opts[b'date'] = dateutil.parsedate(date)
2899 2902 message = logmessage(ui, opts)
2900 2903 matcher = scmutil.match(repo[None], pats, opts)
2901 2904
2902 2905 dsguard = None
2903 2906 # extract addremove carefully -- this function can be called from a command
2904 2907 # that doesn't support addremove
2905 2908 if opts.get(b'addremove'):
2906 2909 dsguard = dirstateguard.dirstateguard(repo, b'commit')
2907 2910 with dsguard or util.nullcontextmanager():
2908 2911 if dsguard:
2909 2912 relative = scmutil.anypats(pats, opts)
2910 2913 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2911 2914 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
2912 2915 raise error.Abort(
2913 2916 _(b"failed to mark all new/missing files as added/removed")
2914 2917 )
2915 2918
2916 2919 return commitfunc(ui, repo, message, matcher, opts)
2917 2920
2918 2921
2919 2922 def samefile(f, ctx1, ctx2):
2920 2923 if f in ctx1.manifest():
2921 2924 a = ctx1.filectx(f)
2922 2925 if f in ctx2.manifest():
2923 2926 b = ctx2.filectx(f)
2924 2927 return not a.cmp(b) and a.flags() == b.flags()
2925 2928 else:
2926 2929 return False
2927 2930 else:
2928 2931 return f not in ctx2.manifest()
2929 2932
2930 2933
2931 2934 def amend(ui, repo, old, extra, pats, opts):
2932 2935 # avoid cycle context -> subrepo -> cmdutil
2933 2936 from . import context
2934 2937
2935 2938 # amend will reuse the existing user if not specified, but the obsolete
2936 2939 # marker creation requires that the current user's name is specified.
2937 2940 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2938 2941 ui.username() # raise exception if username not set
2939 2942
2940 2943 ui.note(_(b'amending changeset %s\n') % old)
2941 2944 base = old.p1()
2942 2945
2943 2946 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
2944 2947 # Participating changesets:
2945 2948 #
2946 2949 # wctx o - workingctx that contains changes from working copy
2947 2950 # | to go into amending commit
2948 2951 # |
2949 2952 # old o - changeset to amend
2950 2953 # |
2951 2954 # base o - first parent of the changeset to amend
2952 2955 wctx = repo[None]
2953 2956
2954 2957 # Copy to avoid mutating input
2955 2958 extra = extra.copy()
2956 2959 # Update extra dict from amended commit (e.g. to preserve graft
2957 2960 # source)
2958 2961 extra.update(old.extra())
2959 2962
2960 2963 # Also update it from the from the wctx
2961 2964 extra.update(wctx.extra())
2962 2965
2963 2966 # date-only change should be ignored?
2964 2967 datemaydiffer = resolvecommitoptions(ui, opts)
2965 2968
2966 2969 date = old.date()
2967 2970 if opts.get(b'date'):
2968 2971 date = dateutil.parsedate(opts.get(b'date'))
2969 2972 user = opts.get(b'user') or old.user()
2970 2973
2971 2974 if len(old.parents()) > 1:
2972 2975 # ctx.files() isn't reliable for merges, so fall back to the
2973 2976 # slower repo.status() method
2974 2977 st = base.status(old)
2975 2978 files = set(st.modified) | set(st.added) | set(st.removed)
2976 2979 else:
2977 2980 files = set(old.files())
2978 2981
2979 2982 # add/remove the files to the working copy if the "addremove" option
2980 2983 # was specified.
2981 2984 matcher = scmutil.match(wctx, pats, opts)
2982 2985 relative = scmutil.anypats(pats, opts)
2983 2986 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2984 2987 if opts.get(b'addremove') and scmutil.addremove(
2985 2988 repo, matcher, b"", uipathfn, opts
2986 2989 ):
2987 2990 raise error.Abort(
2988 2991 _(b"failed to mark all new/missing files as added/removed")
2989 2992 )
2990 2993
2991 2994 # Check subrepos. This depends on in-place wctx._status update in
2992 2995 # subrepo.precommit(). To minimize the risk of this hack, we do
2993 2996 # nothing if .hgsub does not exist.
2994 2997 if b'.hgsub' in wctx or b'.hgsub' in old:
2995 2998 subs, commitsubs, newsubstate = subrepoutil.precommit(
2996 2999 ui, wctx, wctx._status, matcher
2997 3000 )
2998 3001 # amend should abort if commitsubrepos is enabled
2999 3002 assert not commitsubs
3000 3003 if subs:
3001 3004 subrepoutil.writestate(repo, newsubstate)
3002 3005
3003 3006 ms = mergemod.mergestate.read(repo)
3004 3007 mergeutil.checkunresolved(ms)
3005 3008
3006 3009 filestoamend = set(f for f in wctx.files() if matcher(f))
3007 3010
3008 3011 changes = len(filestoamend) > 0
3009 3012 if changes:
3010 3013 # Recompute copies (avoid recording a -> b -> a)
3011 3014 copied = copies.pathcopies(base, wctx, matcher)
3012 3015 if old.p2:
3013 3016 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3014 3017
3015 3018 # Prune files which were reverted by the updates: if old
3016 3019 # introduced file X and the file was renamed in the working
3017 3020 # copy, then those two files are the same and
3018 3021 # we can discard X from our list of files. Likewise if X
3019 3022 # was removed, it's no longer relevant. If X is missing (aka
3020 3023 # deleted), old X must be preserved.
3021 3024 files.update(filestoamend)
3022 3025 files = [
3023 3026 f
3024 3027 for f in files
3025 3028 if (f not in filestoamend or not samefile(f, wctx, base))
3026 3029 ]
3027 3030
3028 3031 def filectxfn(repo, ctx_, path):
3029 3032 try:
3030 3033 # If the file being considered is not amongst the files
3031 3034 # to be amended, we should return the file context from the
3032 3035 # old changeset. This avoids issues when only some files in
3033 3036 # the working copy are being amended but there are also
3034 3037 # changes to other files from the old changeset.
3035 3038 if path not in filestoamend:
3036 3039 return old.filectx(path)
3037 3040
3038 3041 # Return None for removed files.
3039 3042 if path in wctx.removed():
3040 3043 return None
3041 3044
3042 3045 fctx = wctx[path]
3043 3046 flags = fctx.flags()
3044 3047 mctx = context.memfilectx(
3045 3048 repo,
3046 3049 ctx_,
3047 3050 fctx.path(),
3048 3051 fctx.data(),
3049 3052 islink=b'l' in flags,
3050 3053 isexec=b'x' in flags,
3051 3054 copysource=copied.get(path),
3052 3055 )
3053 3056 return mctx
3054 3057 except KeyError:
3055 3058 return None
3056 3059
3057 3060 else:
3058 3061 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
3059 3062
3060 3063 # Use version of files as in the old cset
3061 3064 def filectxfn(repo, ctx_, path):
3062 3065 try:
3063 3066 return old.filectx(path)
3064 3067 except KeyError:
3065 3068 return None
3066 3069
3067 3070 # See if we got a message from -m or -l, if not, open the editor with
3068 3071 # the message of the changeset to amend.
3069 3072 message = logmessage(ui, opts)
3070 3073
3071 3074 editform = mergeeditform(old, b'commit.amend')
3072 3075
3073 3076 if not message:
3074 3077 message = old.description()
3075 3078 # Default if message isn't provided and --edit is not passed is to
3076 3079 # invoke editor, but allow --no-edit. If somehow we don't have any
3077 3080 # description, let's always start the editor.
3078 3081 doedit = not message or opts.get(b'edit') in [True, None]
3079 3082 else:
3080 3083 # Default if message is provided is to not invoke editor, but allow
3081 3084 # --edit.
3082 3085 doedit = opts.get(b'edit') is True
3083 3086 editor = getcommiteditor(edit=doedit, editform=editform)
3084 3087
3085 3088 pureextra = extra.copy()
3086 3089 extra[b'amend_source'] = old.hex()
3087 3090
3088 3091 new = context.memctx(
3089 3092 repo,
3090 3093 parents=[base.node(), old.p2().node()],
3091 3094 text=message,
3092 3095 files=files,
3093 3096 filectxfn=filectxfn,
3094 3097 user=user,
3095 3098 date=date,
3096 3099 extra=extra,
3097 3100 editor=editor,
3098 3101 )
3099 3102
3100 3103 newdesc = changelog.stripdesc(new.description())
3101 3104 if (
3102 3105 (not changes)
3103 3106 and newdesc == old.description()
3104 3107 and user == old.user()
3105 3108 and (date == old.date() or datemaydiffer)
3106 3109 and pureextra == old.extra()
3107 3110 ):
3108 3111 # nothing changed. continuing here would create a new node
3109 3112 # anyway because of the amend_source noise.
3110 3113 #
3111 3114 # This not what we expect from amend.
3112 3115 return old.node()
3113 3116
3114 3117 commitphase = None
3115 3118 if opts.get(b'secret'):
3116 3119 commitphase = phases.secret
3117 3120 newid = repo.commitctx(new)
3118 3121
3119 3122 # Reroute the working copy parent to the new changeset
3120 3123 repo.setparents(newid, nullid)
3121 3124 mapping = {old.node(): (newid,)}
3122 3125 obsmetadata = None
3123 3126 if opts.get(b'note'):
3124 3127 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3125 3128 backup = ui.configbool(b'rewrite', b'backup-bundle')
3126 3129 scmutil.cleanupnodes(
3127 3130 repo,
3128 3131 mapping,
3129 3132 b'amend',
3130 3133 metadata=obsmetadata,
3131 3134 fixphase=True,
3132 3135 targetphase=commitphase,
3133 3136 backup=backup,
3134 3137 )
3135 3138
3136 3139 # Fixing the dirstate because localrepo.commitctx does not update
3137 3140 # it. This is rather convenient because we did not need to update
3138 3141 # the dirstate for all the files in the new commit which commitctx
3139 3142 # could have done if it updated the dirstate. Now, we can
3140 3143 # selectively update the dirstate only for the amended files.
3141 3144 dirstate = repo.dirstate
3142 3145
3143 3146 # Update the state of the files which were added and modified in the
3144 3147 # amend to "normal" in the dirstate. We need to use "normallookup" since
3145 3148 # the files may have changed since the command started; using "normal"
3146 3149 # would mark them as clean but with uncommitted contents.
3147 3150 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3148 3151 for f in normalfiles:
3149 3152 dirstate.normallookup(f)
3150 3153
3151 3154 # Update the state of files which were removed in the amend
3152 3155 # to "removed" in the dirstate.
3153 3156 removedfiles = set(wctx.removed()) & filestoamend
3154 3157 for f in removedfiles:
3155 3158 dirstate.drop(f)
3156 3159
3157 3160 return newid
3158 3161
3159 3162
3160 3163 def commiteditor(repo, ctx, subs, editform=b''):
3161 3164 if ctx.description():
3162 3165 return ctx.description()
3163 3166 return commitforceeditor(
3164 3167 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3165 3168 )
3166 3169
3167 3170
3168 3171 def commitforceeditor(
3169 3172 repo,
3170 3173 ctx,
3171 3174 subs,
3172 3175 finishdesc=None,
3173 3176 extramsg=None,
3174 3177 editform=b'',
3175 3178 unchangedmessagedetection=False,
3176 3179 ):
3177 3180 if not extramsg:
3178 3181 extramsg = _(b"Leave message empty to abort commit.")
3179 3182
3180 3183 forms = [e for e in editform.split(b'.') if e]
3181 3184 forms.insert(0, b'changeset')
3182 3185 templatetext = None
3183 3186 while forms:
3184 3187 ref = b'.'.join(forms)
3185 3188 if repo.ui.config(b'committemplate', ref):
3186 3189 templatetext = committext = buildcommittemplate(
3187 3190 repo, ctx, subs, extramsg, ref
3188 3191 )
3189 3192 break
3190 3193 forms.pop()
3191 3194 else:
3192 3195 committext = buildcommittext(repo, ctx, subs, extramsg)
3193 3196
3194 3197 # run editor in the repository root
3195 3198 olddir = encoding.getcwd()
3196 3199 os.chdir(repo.root)
3197 3200
3198 3201 # make in-memory changes visible to external process
3199 3202 tr = repo.currenttransaction()
3200 3203 repo.dirstate.write(tr)
3201 3204 pending = tr and tr.writepending() and repo.root
3202 3205
3203 3206 editortext = repo.ui.edit(
3204 3207 committext,
3205 3208 ctx.user(),
3206 3209 ctx.extra(),
3207 3210 editform=editform,
3208 3211 pending=pending,
3209 3212 repopath=repo.path,
3210 3213 action=b'commit',
3211 3214 )
3212 3215 text = editortext
3213 3216
3214 3217 # strip away anything below this special string (used for editors that want
3215 3218 # to display the diff)
3216 3219 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3217 3220 if stripbelow:
3218 3221 text = text[: stripbelow.start()]
3219 3222
3220 3223 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3221 3224 os.chdir(olddir)
3222 3225
3223 3226 if finishdesc:
3224 3227 text = finishdesc(text)
3225 3228 if not text.strip():
3226 3229 raise error.Abort(_(b"empty commit message"))
3227 3230 if unchangedmessagedetection and editortext == templatetext:
3228 3231 raise error.Abort(_(b"commit message unchanged"))
3229 3232
3230 3233 return text
3231 3234
3232 3235
3233 3236 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3234 3237 ui = repo.ui
3235 3238 spec = formatter.templatespec(ref, None, None)
3236 3239 t = logcmdutil.changesettemplater(ui, repo, spec)
3237 3240 t.t.cache.update(
3238 3241 (k, templater.unquotestring(v))
3239 3242 for k, v in repo.ui.configitems(b'committemplate')
3240 3243 )
3241 3244
3242 3245 if not extramsg:
3243 3246 extramsg = b'' # ensure that extramsg is string
3244 3247
3245 3248 ui.pushbuffer()
3246 3249 t.show(ctx, extramsg=extramsg)
3247 3250 return ui.popbuffer()
3248 3251
3249 3252
3250 3253 def hgprefix(msg):
3251 3254 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3252 3255
3253 3256
3254 3257 def buildcommittext(repo, ctx, subs, extramsg):
3255 3258 edittext = []
3256 3259 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3257 3260 if ctx.description():
3258 3261 edittext.append(ctx.description())
3259 3262 edittext.append(b"")
3260 3263 edittext.append(b"") # Empty line between message and comments.
3261 3264 edittext.append(
3262 3265 hgprefix(
3263 3266 _(
3264 3267 b"Enter commit message."
3265 3268 b" Lines beginning with 'HG:' are removed."
3266 3269 )
3267 3270 )
3268 3271 )
3269 3272 edittext.append(hgprefix(extramsg))
3270 3273 edittext.append(b"HG: --")
3271 3274 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3272 3275 if ctx.p2():
3273 3276 edittext.append(hgprefix(_(b"branch merge")))
3274 3277 if ctx.branch():
3275 3278 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3276 3279 if bookmarks.isactivewdirparent(repo):
3277 3280 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3278 3281 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3279 3282 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3280 3283 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3281 3284 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3282 3285 if not added and not modified and not removed:
3283 3286 edittext.append(hgprefix(_(b"no files changed")))
3284 3287 edittext.append(b"")
3285 3288
3286 3289 return b"\n".join(edittext)
3287 3290
3288 3291
3289 3292 def commitstatus(repo, node, branch, bheads=None, opts=None):
3290 3293 if opts is None:
3291 3294 opts = {}
3292 3295 ctx = repo[node]
3293 3296 parents = ctx.parents()
3294 3297
3295 3298 if (
3296 3299 not opts.get(b'amend')
3297 3300 and bheads
3298 3301 and node not in bheads
3299 3302 and not [
3300 3303 x for x in parents if x.node() in bheads and x.branch() == branch
3301 3304 ]
3302 3305 ):
3303 3306 repo.ui.status(_(b'created new head\n'))
3304 3307 # The message is not printed for initial roots. For the other
3305 3308 # changesets, it is printed in the following situations:
3306 3309 #
3307 3310 # Par column: for the 2 parents with ...
3308 3311 # N: null or no parent
3309 3312 # B: parent is on another named branch
3310 3313 # C: parent is a regular non head changeset
3311 3314 # H: parent was a branch head of the current branch
3312 3315 # Msg column: whether we print "created new head" message
3313 3316 # In the following, it is assumed that there already exists some
3314 3317 # initial branch heads of the current branch, otherwise nothing is
3315 3318 # printed anyway.
3316 3319 #
3317 3320 # Par Msg Comment
3318 3321 # N N y additional topo root
3319 3322 #
3320 3323 # B N y additional branch root
3321 3324 # C N y additional topo head
3322 3325 # H N n usual case
3323 3326 #
3324 3327 # B B y weird additional branch root
3325 3328 # C B y branch merge
3326 3329 # H B n merge with named branch
3327 3330 #
3328 3331 # C C y additional head from merge
3329 3332 # C H n merge with a head
3330 3333 #
3331 3334 # H H n head merge: head count decreases
3332 3335
3333 3336 if not opts.get(b'close_branch'):
3334 3337 for r in parents:
3335 3338 if r.closesbranch() and r.branch() == branch:
3336 3339 repo.ui.status(
3337 3340 _(b'reopening closed branch head %d\n') % r.rev()
3338 3341 )
3339 3342
3340 3343 if repo.ui.debugflag:
3341 3344 repo.ui.write(
3342 3345 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3343 3346 )
3344 3347 elif repo.ui.verbose:
3345 3348 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3346 3349
3347 3350
3348 3351 def postcommitstatus(repo, pats, opts):
3349 3352 return repo.status(match=scmutil.match(repo[None], pats, opts))
3350 3353
3351 3354
3352 3355 def revert(ui, repo, ctx, parents, *pats, **opts):
3353 3356 opts = pycompat.byteskwargs(opts)
3354 3357 parent, p2 = parents
3355 3358 node = ctx.node()
3356 3359
3357 3360 mf = ctx.manifest()
3358 3361 if node == p2:
3359 3362 parent = p2
3360 3363
3361 3364 # need all matching names in dirstate and manifest of target rev,
3362 3365 # so have to walk both. do not print errors if files exist in one
3363 3366 # but not other. in both cases, filesets should be evaluated against
3364 3367 # workingctx to get consistent result (issue4497). this means 'set:**'
3365 3368 # cannot be used to select missing files from target rev.
3366 3369
3367 3370 # `names` is a mapping for all elements in working copy and target revision
3368 3371 # The mapping is in the form:
3369 3372 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3370 3373 names = {}
3371 3374 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3372 3375
3373 3376 with repo.wlock():
3374 3377 ## filling of the `names` mapping
3375 3378 # walk dirstate to fill `names`
3376 3379
3377 3380 interactive = opts.get(b'interactive', False)
3378 3381 wctx = repo[None]
3379 3382 m = scmutil.match(wctx, pats, opts)
3380 3383
3381 3384 # we'll need this later
3382 3385 targetsubs = sorted(s for s in wctx.substate if m(s))
3383 3386
3384 3387 if not m.always():
3385 3388 matcher = matchmod.badmatch(m, lambda x, y: False)
3386 3389 for abs in wctx.walk(matcher):
3387 3390 names[abs] = m.exact(abs)
3388 3391
3389 3392 # walk target manifest to fill `names`
3390 3393
3391 3394 def badfn(path, msg):
3392 3395 if path in names:
3393 3396 return
3394 3397 if path in ctx.substate:
3395 3398 return
3396 3399 path_ = path + b'/'
3397 3400 for f in names:
3398 3401 if f.startswith(path_):
3399 3402 return
3400 3403 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3401 3404
3402 3405 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3403 3406 if abs not in names:
3404 3407 names[abs] = m.exact(abs)
3405 3408
3406 3409 # Find status of all file in `names`.
3407 3410 m = scmutil.matchfiles(repo, names)
3408 3411
3409 3412 changes = repo.status(
3410 3413 node1=node, match=m, unknown=True, ignored=True, clean=True
3411 3414 )
3412 3415 else:
3413 3416 changes = repo.status(node1=node, match=m)
3414 3417 for kind in changes:
3415 3418 for abs in kind:
3416 3419 names[abs] = m.exact(abs)
3417 3420
3418 3421 m = scmutil.matchfiles(repo, names)
3419 3422
3420 3423 modified = set(changes.modified)
3421 3424 added = set(changes.added)
3422 3425 removed = set(changes.removed)
3423 3426 _deleted = set(changes.deleted)
3424 3427 unknown = set(changes.unknown)
3425 3428 unknown.update(changes.ignored)
3426 3429 clean = set(changes.clean)
3427 3430 modadded = set()
3428 3431
3429 3432 # We need to account for the state of the file in the dirstate,
3430 3433 # even when we revert against something else than parent. This will
3431 3434 # slightly alter the behavior of revert (doing back up or not, delete
3432 3435 # or just forget etc).
3433 3436 if parent == node:
3434 3437 dsmodified = modified
3435 3438 dsadded = added
3436 3439 dsremoved = removed
3437 3440 # store all local modifications, useful later for rename detection
3438 3441 localchanges = dsmodified | dsadded
3439 3442 modified, added, removed = set(), set(), set()
3440 3443 else:
3441 3444 changes = repo.status(node1=parent, match=m)
3442 3445 dsmodified = set(changes.modified)
3443 3446 dsadded = set(changes.added)
3444 3447 dsremoved = set(changes.removed)
3445 3448 # store all local modifications, useful later for rename detection
3446 3449 localchanges = dsmodified | dsadded
3447 3450
3448 3451 # only take into account for removes between wc and target
3449 3452 clean |= dsremoved - removed
3450 3453 dsremoved &= removed
3451 3454 # distinct between dirstate remove and other
3452 3455 removed -= dsremoved
3453 3456
3454 3457 modadded = added & dsmodified
3455 3458 added -= modadded
3456 3459
3457 3460 # tell newly modified apart.
3458 3461 dsmodified &= modified
3459 3462 dsmodified |= modified & dsadded # dirstate added may need backup
3460 3463 modified -= dsmodified
3461 3464
3462 3465 # We need to wait for some post-processing to update this set
3463 3466 # before making the distinction. The dirstate will be used for
3464 3467 # that purpose.
3465 3468 dsadded = added
3466 3469
3467 3470 # in case of merge, files that are actually added can be reported as
3468 3471 # modified, we need to post process the result
3469 3472 if p2 != nullid:
3470 3473 mergeadd = set(dsmodified)
3471 3474 for path in dsmodified:
3472 3475 if path in mf:
3473 3476 mergeadd.remove(path)
3474 3477 dsadded |= mergeadd
3475 3478 dsmodified -= mergeadd
3476 3479
3477 3480 # if f is a rename, update `names` to also revert the source
3478 3481 for f in localchanges:
3479 3482 src = repo.dirstate.copied(f)
3480 3483 # XXX should we check for rename down to target node?
3481 3484 if src and src not in names and repo.dirstate[src] == b'r':
3482 3485 dsremoved.add(src)
3483 3486 names[src] = True
3484 3487
3485 3488 # determine the exact nature of the deleted changesets
3486 3489 deladded = set(_deleted)
3487 3490 for path in _deleted:
3488 3491 if path in mf:
3489 3492 deladded.remove(path)
3490 3493 deleted = _deleted - deladded
3491 3494
3492 3495 # distinguish between file to forget and the other
3493 3496 added = set()
3494 3497 for abs in dsadded:
3495 3498 if repo.dirstate[abs] != b'a':
3496 3499 added.add(abs)
3497 3500 dsadded -= added
3498 3501
3499 3502 for abs in deladded:
3500 3503 if repo.dirstate[abs] == b'a':
3501 3504 dsadded.add(abs)
3502 3505 deladded -= dsadded
3503 3506
3504 3507 # For files marked as removed, we check if an unknown file is present at
3505 3508 # the same path. If a such file exists it may need to be backed up.
3506 3509 # Making the distinction at this stage helps have simpler backup
3507 3510 # logic.
3508 3511 removunk = set()
3509 3512 for abs in removed:
3510 3513 target = repo.wjoin(abs)
3511 3514 if os.path.lexists(target):
3512 3515 removunk.add(abs)
3513 3516 removed -= removunk
3514 3517
3515 3518 dsremovunk = set()
3516 3519 for abs in dsremoved:
3517 3520 target = repo.wjoin(abs)
3518 3521 if os.path.lexists(target):
3519 3522 dsremovunk.add(abs)
3520 3523 dsremoved -= dsremovunk
3521 3524
3522 3525 # action to be actually performed by revert
3523 3526 # (<list of file>, message>) tuple
3524 3527 actions = {
3525 3528 b'revert': ([], _(b'reverting %s\n')),
3526 3529 b'add': ([], _(b'adding %s\n')),
3527 3530 b'remove': ([], _(b'removing %s\n')),
3528 3531 b'drop': ([], _(b'removing %s\n')),
3529 3532 b'forget': ([], _(b'forgetting %s\n')),
3530 3533 b'undelete': ([], _(b'undeleting %s\n')),
3531 3534 b'noop': (None, _(b'no changes needed to %s\n')),
3532 3535 b'unknown': (None, _(b'file not managed: %s\n')),
3533 3536 }
3534 3537
3535 3538 # "constant" that convey the backup strategy.
3536 3539 # All set to `discard` if `no-backup` is set do avoid checking
3537 3540 # no_backup lower in the code.
3538 3541 # These values are ordered for comparison purposes
3539 3542 backupinteractive = 3 # do backup if interactively modified
3540 3543 backup = 2 # unconditionally do backup
3541 3544 check = 1 # check if the existing file differs from target
3542 3545 discard = 0 # never do backup
3543 3546 if opts.get(b'no_backup'):
3544 3547 backupinteractive = backup = check = discard
3545 3548 if interactive:
3546 3549 dsmodifiedbackup = backupinteractive
3547 3550 else:
3548 3551 dsmodifiedbackup = backup
3549 3552 tobackup = set()
3550 3553
3551 3554 backupanddel = actions[b'remove']
3552 3555 if not opts.get(b'no_backup'):
3553 3556 backupanddel = actions[b'drop']
3554 3557
3555 3558 disptable = (
3556 3559 # dispatch table:
3557 3560 # file state
3558 3561 # action
3559 3562 # make backup
3560 3563 ## Sets that results that will change file on disk
3561 3564 # Modified compared to target, no local change
3562 3565 (modified, actions[b'revert'], discard),
3563 3566 # Modified compared to target, but local file is deleted
3564 3567 (deleted, actions[b'revert'], discard),
3565 3568 # Modified compared to target, local change
3566 3569 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3567 3570 # Added since target
3568 3571 (added, actions[b'remove'], discard),
3569 3572 # Added in working directory
3570 3573 (dsadded, actions[b'forget'], discard),
3571 3574 # Added since target, have local modification
3572 3575 (modadded, backupanddel, backup),
3573 3576 # Added since target but file is missing in working directory
3574 3577 (deladded, actions[b'drop'], discard),
3575 3578 # Removed since target, before working copy parent
3576 3579 (removed, actions[b'add'], discard),
3577 3580 # Same as `removed` but an unknown file exists at the same path
3578 3581 (removunk, actions[b'add'], check),
3579 3582 # Removed since targe, marked as such in working copy parent
3580 3583 (dsremoved, actions[b'undelete'], discard),
3581 3584 # Same as `dsremoved` but an unknown file exists at the same path
3582 3585 (dsremovunk, actions[b'undelete'], check),
3583 3586 ## the following sets does not result in any file changes
3584 3587 # File with no modification
3585 3588 (clean, actions[b'noop'], discard),
3586 3589 # Existing file, not tracked anywhere
3587 3590 (unknown, actions[b'unknown'], discard),
3588 3591 )
3589 3592
3590 3593 for abs, exact in sorted(names.items()):
3591 3594 # target file to be touch on disk (relative to cwd)
3592 3595 target = repo.wjoin(abs)
3593 3596 # search the entry in the dispatch table.
3594 3597 # if the file is in any of these sets, it was touched in the working
3595 3598 # directory parent and we are sure it needs to be reverted.
3596 3599 for table, (xlist, msg), dobackup in disptable:
3597 3600 if abs not in table:
3598 3601 continue
3599 3602 if xlist is not None:
3600 3603 xlist.append(abs)
3601 3604 if dobackup:
3602 3605 # If in interactive mode, don't automatically create
3603 3606 # .orig files (issue4793)
3604 3607 if dobackup == backupinteractive:
3605 3608 tobackup.add(abs)
3606 3609 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3607 3610 absbakname = scmutil.backuppath(ui, repo, abs)
3608 3611 bakname = os.path.relpath(
3609 3612 absbakname, start=repo.root
3610 3613 )
3611 3614 ui.note(
3612 3615 _(b'saving current version of %s as %s\n')
3613 3616 % (uipathfn(abs), uipathfn(bakname))
3614 3617 )
3615 3618 if not opts.get(b'dry_run'):
3616 3619 if interactive:
3617 3620 util.copyfile(target, absbakname)
3618 3621 else:
3619 3622 util.rename(target, absbakname)
3620 3623 if opts.get(b'dry_run'):
3621 3624 if ui.verbose or not exact:
3622 3625 ui.status(msg % uipathfn(abs))
3623 3626 elif exact:
3624 3627 ui.warn(msg % uipathfn(abs))
3625 3628 break
3626 3629
3627 3630 if not opts.get(b'dry_run'):
3628 3631 needdata = (b'revert', b'add', b'undelete')
3629 3632 oplist = [actions[name][0] for name in needdata]
3630 3633 prefetch = scmutil.prefetchfiles
3631 3634 matchfiles = scmutil.matchfiles
3632 3635 prefetch(
3633 3636 repo,
3634 3637 [ctx.rev()],
3635 3638 matchfiles(repo, [f for sublist in oplist for f in sublist]),
3636 3639 )
3637 3640 match = scmutil.match(repo[None], pats)
3638 3641 _performrevert(
3639 3642 repo,
3640 3643 parents,
3641 3644 ctx,
3642 3645 names,
3643 3646 uipathfn,
3644 3647 actions,
3645 3648 match,
3646 3649 interactive,
3647 3650 tobackup,
3648 3651 )
3649 3652
3650 3653 if targetsubs:
3651 3654 # Revert the subrepos on the revert list
3652 3655 for sub in targetsubs:
3653 3656 try:
3654 3657 wctx.sub(sub).revert(
3655 3658 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3656 3659 )
3657 3660 except KeyError:
3658 3661 raise error.Abort(
3659 3662 b"subrepository '%s' does not exist in %s!"
3660 3663 % (sub, short(ctx.node()))
3661 3664 )
3662 3665
3663 3666
3664 3667 def _performrevert(
3665 3668 repo,
3666 3669 parents,
3667 3670 ctx,
3668 3671 names,
3669 3672 uipathfn,
3670 3673 actions,
3671 3674 match,
3672 3675 interactive=False,
3673 3676 tobackup=None,
3674 3677 ):
3675 3678 """function that actually perform all the actions computed for revert
3676 3679
3677 3680 This is an independent function to let extension to plug in and react to
3678 3681 the imminent revert.
3679 3682
3680 3683 Make sure you have the working directory locked when calling this function.
3681 3684 """
3682 3685 parent, p2 = parents
3683 3686 node = ctx.node()
3684 3687 excluded_files = []
3685 3688
3686 3689 def checkout(f):
3687 3690 fc = ctx[f]
3688 3691 repo.wwrite(f, fc.data(), fc.flags())
3689 3692
3690 3693 def doremove(f):
3691 3694 try:
3692 3695 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3693 3696 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3694 3697 except OSError:
3695 3698 pass
3696 3699 repo.dirstate.remove(f)
3697 3700
3698 3701 def prntstatusmsg(action, f):
3699 3702 exact = names[f]
3700 3703 if repo.ui.verbose or not exact:
3701 3704 repo.ui.status(actions[action][1] % uipathfn(f))
3702 3705
3703 3706 audit_path = pathutil.pathauditor(repo.root, cached=True)
3704 3707 for f in actions[b'forget'][0]:
3705 3708 if interactive:
3706 3709 choice = repo.ui.promptchoice(
3707 3710 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3708 3711 )
3709 3712 if choice == 0:
3710 3713 prntstatusmsg(b'forget', f)
3711 3714 repo.dirstate.drop(f)
3712 3715 else:
3713 3716 excluded_files.append(f)
3714 3717 else:
3715 3718 prntstatusmsg(b'forget', f)
3716 3719 repo.dirstate.drop(f)
3717 3720 for f in actions[b'remove'][0]:
3718 3721 audit_path(f)
3719 3722 if interactive:
3720 3723 choice = repo.ui.promptchoice(
3721 3724 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3722 3725 )
3723 3726 if choice == 0:
3724 3727 prntstatusmsg(b'remove', f)
3725 3728 doremove(f)
3726 3729 else:
3727 3730 excluded_files.append(f)
3728 3731 else:
3729 3732 prntstatusmsg(b'remove', f)
3730 3733 doremove(f)
3731 3734 for f in actions[b'drop'][0]:
3732 3735 audit_path(f)
3733 3736 prntstatusmsg(b'drop', f)
3734 3737 repo.dirstate.remove(f)
3735 3738
3736 3739 normal = None
3737 3740 if node == parent:
3738 3741 # We're reverting to our parent. If possible, we'd like status
3739 3742 # to report the file as clean. We have to use normallookup for
3740 3743 # merges to avoid losing information about merged/dirty files.
3741 3744 if p2 != nullid:
3742 3745 normal = repo.dirstate.normallookup
3743 3746 else:
3744 3747 normal = repo.dirstate.normal
3745 3748
3746 3749 newlyaddedandmodifiedfiles = set()
3747 3750 if interactive:
3748 3751 # Prompt the user for changes to revert
3749 3752 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3750 3753 m = scmutil.matchfiles(repo, torevert)
3751 3754 diffopts = patch.difffeatureopts(
3752 3755 repo.ui,
3753 3756 whitespace=True,
3754 3757 section=b'commands',
3755 3758 configprefix=b'revert.interactive.',
3756 3759 )
3757 3760 diffopts.nodates = True
3758 3761 diffopts.git = True
3759 3762 operation = b'apply'
3760 3763 if node == parent:
3761 3764 if repo.ui.configbool(
3762 3765 b'experimental', b'revert.interactive.select-to-keep'
3763 3766 ):
3764 3767 operation = b'keep'
3765 3768 else:
3766 3769 operation = b'discard'
3767 3770
3768 3771 if operation == b'apply':
3769 3772 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3770 3773 else:
3771 3774 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3772 3775 originalchunks = patch.parsepatch(diff)
3773 3776
3774 3777 try:
3775 3778
3776 3779 chunks, opts = recordfilter(
3777 3780 repo.ui, originalchunks, match, operation=operation
3778 3781 )
3779 3782 if operation == b'discard':
3780 3783 chunks = patch.reversehunks(chunks)
3781 3784
3782 3785 except error.PatchError as err:
3783 3786 raise error.Abort(_(b'error parsing patch: %s') % err)
3784 3787
3785 3788 # FIXME: when doing an interactive revert of a copy, there's no way of
3786 3789 # performing a partial revert of the added file, the only option is
3787 3790 # "remove added file <name> (Yn)?", so we don't need to worry about the
3788 3791 # alsorestore value. Ideally we'd be able to partially revert
3789 3792 # copied/renamed files.
3790 3793 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3791 3794 chunks, originalchunks
3792 3795 )
3793 3796 if tobackup is None:
3794 3797 tobackup = set()
3795 3798 # Apply changes
3796 3799 fp = stringio()
3797 3800 # chunks are serialized per file, but files aren't sorted
3798 3801 for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))):
3799 3802 prntstatusmsg(b'revert', f)
3800 3803 files = set()
3801 3804 for c in chunks:
3802 3805 if ishunk(c):
3803 3806 abs = c.header.filename()
3804 3807 # Create a backup file only if this hunk should be backed up
3805 3808 if c.header.filename() in tobackup:
3806 3809 target = repo.wjoin(abs)
3807 3810 bakname = scmutil.backuppath(repo.ui, repo, abs)
3808 3811 util.copyfile(target, bakname)
3809 3812 tobackup.remove(abs)
3810 3813 if abs not in files:
3811 3814 files.add(abs)
3812 3815 if operation == b'keep':
3813 3816 checkout(abs)
3814 3817 c.write(fp)
3815 3818 dopatch = fp.tell()
3816 3819 fp.seek(0)
3817 3820 if dopatch:
3818 3821 try:
3819 3822 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3820 3823 except error.PatchError as err:
3821 3824 raise error.Abort(pycompat.bytestr(err))
3822 3825 del fp
3823 3826 else:
3824 3827 for f in actions[b'revert'][0]:
3825 3828 prntstatusmsg(b'revert', f)
3826 3829 checkout(f)
3827 3830 if normal:
3828 3831 normal(f)
3829 3832
3830 3833 for f in actions[b'add'][0]:
3831 3834 # Don't checkout modified files, they are already created by the diff
3832 3835 if f not in newlyaddedandmodifiedfiles:
3833 3836 prntstatusmsg(b'add', f)
3834 3837 checkout(f)
3835 3838 repo.dirstate.add(f)
3836 3839
3837 3840 normal = repo.dirstate.normallookup
3838 3841 if node == parent and p2 == nullid:
3839 3842 normal = repo.dirstate.normal
3840 3843 for f in actions[b'undelete'][0]:
3841 3844 if interactive:
3842 3845 choice = repo.ui.promptchoice(
3843 3846 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3844 3847 )
3845 3848 if choice == 0:
3846 3849 prntstatusmsg(b'undelete', f)
3847 3850 checkout(f)
3848 3851 normal(f)
3849 3852 else:
3850 3853 excluded_files.append(f)
3851 3854 else:
3852 3855 prntstatusmsg(b'undelete', f)
3853 3856 checkout(f)
3854 3857 normal(f)
3855 3858
3856 3859 copied = copies.pathcopies(repo[parent], ctx)
3857 3860
3858 3861 for f in (
3859 3862 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
3860 3863 ):
3861 3864 if f in copied:
3862 3865 repo.dirstate.copy(copied[f], f)
3863 3866
3864 3867
3865 3868 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3866 3869 # commands.outgoing. "missing" is "missing" of the result of
3867 3870 # "findcommonoutgoing()"
3868 3871 outgoinghooks = util.hooks()
3869 3872
3870 3873 # a list of (ui, repo) functions called by commands.summary
3871 3874 summaryhooks = util.hooks()
3872 3875
3873 3876 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3874 3877 #
3875 3878 # functions should return tuple of booleans below, if 'changes' is None:
3876 3879 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3877 3880 #
3878 3881 # otherwise, 'changes' is a tuple of tuples below:
3879 3882 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3880 3883 # - (desturl, destbranch, destpeer, outgoing)
3881 3884 summaryremotehooks = util.hooks()
3882 3885
3883 3886
3884 3887 def checkunfinished(repo, commit=False, skipmerge=False):
3885 3888 '''Look for an unfinished multistep operation, like graft, and abort
3886 3889 if found. It's probably good to check this right before
3887 3890 bailifchanged().
3888 3891 '''
3889 3892 # Check for non-clearable states first, so things like rebase will take
3890 3893 # precedence over update.
3891 3894 for state in statemod._unfinishedstates:
3892 3895 if (
3893 3896 state._clearable
3894 3897 or (commit and state._allowcommit)
3895 3898 or state._reportonly
3896 3899 ):
3897 3900 continue
3898 3901 if state.isunfinished(repo):
3899 3902 raise error.Abort(state.msg(), hint=state.hint())
3900 3903
3901 3904 for s in statemod._unfinishedstates:
3902 3905 if (
3903 3906 not s._clearable
3904 3907 or (commit and s._allowcommit)
3905 3908 or (s._opname == b'merge' and skipmerge)
3906 3909 or s._reportonly
3907 3910 ):
3908 3911 continue
3909 3912 if s.isunfinished(repo):
3910 3913 raise error.Abort(s.msg(), hint=s.hint())
3911 3914
3912 3915
3913 3916 def clearunfinished(repo):
3914 3917 '''Check for unfinished operations (as above), and clear the ones
3915 3918 that are clearable.
3916 3919 '''
3917 3920 for state in statemod._unfinishedstates:
3918 3921 if state._reportonly:
3919 3922 continue
3920 3923 if not state._clearable and state.isunfinished(repo):
3921 3924 raise error.Abort(state.msg(), hint=state.hint())
3922 3925
3923 3926 for s in statemod._unfinishedstates:
3924 3927 if s._opname == b'merge' or state._reportonly:
3925 3928 continue
3926 3929 if s._clearable and s.isunfinished(repo):
3927 3930 util.unlink(repo.vfs.join(s._fname))
3928 3931
3929 3932
3930 3933 def getunfinishedstate(repo):
3931 3934 ''' Checks for unfinished operations and returns statecheck object
3932 3935 for it'''
3933 3936 for state in statemod._unfinishedstates:
3934 3937 if state.isunfinished(repo):
3935 3938 return state
3936 3939 return None
3937 3940
3938 3941
3939 3942 def howtocontinue(repo):
3940 3943 '''Check for an unfinished operation and return the command to finish
3941 3944 it.
3942 3945
3943 3946 statemod._unfinishedstates list is checked for an unfinished operation
3944 3947 and the corresponding message to finish it is generated if a method to
3945 3948 continue is supported by the operation.
3946 3949
3947 3950 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3948 3951 a boolean.
3949 3952 '''
3950 3953 contmsg = _(b"continue: %s")
3951 3954 for state in statemod._unfinishedstates:
3952 3955 if not state._continueflag:
3953 3956 continue
3954 3957 if state.isunfinished(repo):
3955 3958 return contmsg % state.continuemsg(), True
3956 3959 if repo[None].dirty(missing=True, merge=False, branch=False):
3957 3960 return contmsg % _(b"hg commit"), False
3958 3961 return None, None
3959 3962
3960 3963
3961 3964 def checkafterresolved(repo):
3962 3965 '''Inform the user about the next action after completing hg resolve
3963 3966
3964 3967 If there's a an unfinished operation that supports continue flag,
3965 3968 howtocontinue will yield repo.ui.warn as the reporter.
3966 3969
3967 3970 Otherwise, it will yield repo.ui.note.
3968 3971 '''
3969 3972 msg, warning = howtocontinue(repo)
3970 3973 if msg is not None:
3971 3974 if warning:
3972 3975 repo.ui.warn(b"%s\n" % msg)
3973 3976 else:
3974 3977 repo.ui.note(b"%s\n" % msg)
3975 3978
3976 3979
3977 3980 def wrongtooltocontinue(repo, task):
3978 3981 '''Raise an abort suggesting how to properly continue if there is an
3979 3982 active task.
3980 3983
3981 3984 Uses howtocontinue() to find the active task.
3982 3985
3983 3986 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3984 3987 a hint.
3985 3988 '''
3986 3989 after = howtocontinue(repo)
3987 3990 hint = None
3988 3991 if after[1]:
3989 3992 hint = after[0]
3990 3993 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
3991 3994
3992 3995
3993 3996 def abortgraft(ui, repo, graftstate):
3994 3997 """abort the interrupted graft and rollbacks to the state before interrupted
3995 3998 graft"""
3996 3999 if not graftstate.exists():
3997 4000 raise error.Abort(_(b"no interrupted graft to abort"))
3998 4001 statedata = readgraftstate(repo, graftstate)
3999 4002 newnodes = statedata.get(b'newnodes')
4000 4003 if newnodes is None:
4001 4004 # and old graft state which does not have all the data required to abort
4002 4005 # the graft
4003 4006 raise error.Abort(_(b"cannot abort using an old graftstate"))
4004 4007
4005 4008 # changeset from which graft operation was started
4006 4009 if len(newnodes) > 0:
4007 4010 startctx = repo[newnodes[0]].p1()
4008 4011 else:
4009 4012 startctx = repo[b'.']
4010 4013 # whether to strip or not
4011 4014 cleanup = False
4012 4015 from . import hg
4013 4016
4014 4017 if newnodes:
4015 4018 newnodes = [repo[r].rev() for r in newnodes]
4016 4019 cleanup = True
4017 4020 # checking that none of the newnodes turned public or is public
4018 4021 immutable = [c for c in newnodes if not repo[c].mutable()]
4019 4022 if immutable:
4020 4023 repo.ui.warn(
4021 4024 _(b"cannot clean up public changesets %s\n")
4022 4025 % b', '.join(bytes(repo[r]) for r in immutable),
4023 4026 hint=_(b"see 'hg help phases' for details"),
4024 4027 )
4025 4028 cleanup = False
4026 4029
4027 4030 # checking that no new nodes are created on top of grafted revs
4028 4031 desc = set(repo.changelog.descendants(newnodes))
4029 4032 if desc - set(newnodes):
4030 4033 repo.ui.warn(
4031 4034 _(
4032 4035 b"new changesets detected on destination "
4033 4036 b"branch, can't strip\n"
4034 4037 )
4035 4038 )
4036 4039 cleanup = False
4037 4040
4038 4041 if cleanup:
4039 4042 with repo.wlock(), repo.lock():
4040 4043 hg.updaterepo(repo, startctx.node(), overwrite=True)
4041 4044 # stripping the new nodes created
4042 4045 strippoints = [
4043 4046 c.node() for c in repo.set(b"roots(%ld)", newnodes)
4044 4047 ]
4045 4048 repair.strip(repo.ui, repo, strippoints, backup=False)
4046 4049
4047 4050 if not cleanup:
4048 4051 # we don't update to the startnode if we can't strip
4049 4052 startctx = repo[b'.']
4050 4053 hg.updaterepo(repo, startctx.node(), overwrite=True)
4051 4054
4052 4055 ui.status(_(b"graft aborted\n"))
4053 4056 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
4054 4057 graftstate.delete()
4055 4058 return 0
4056 4059
4057 4060
4058 4061 def readgraftstate(repo, graftstate):
4059 4062 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
4060 4063 """read the graft state file and return a dict of the data stored in it"""
4061 4064 try:
4062 4065 return graftstate.read()
4063 4066 except error.CorruptedState:
4064 4067 nodes = repo.vfs.read(b'graftstate').splitlines()
4065 4068 return {b'nodes': nodes}
4066 4069
4067 4070
4068 4071 def hgabortgraft(ui, repo):
4069 4072 """ abort logic for aborting graft using 'hg abort'"""
4070 4073 with repo.wlock():
4071 4074 graftstate = statemod.cmdstate(repo, b'graftstate')
4072 4075 return abortgraft(ui, repo, graftstate)
General Comments 0
You need to be logged in to leave comments. Login now