##// END OF EJS Templates
py3: use pycompat.bytestr() or '%d' in place of str()...
Pulkit Goyal -
r35204:4ee493ea default
parent child Browse files
Show More
@@ -1,3995 +1,3995 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import itertools
12 12 import os
13 13 import re
14 14 import tempfile
15 15
16 16 from .i18n import _
17 17 from .node import (
18 18 hex,
19 19 nullid,
20 20 nullrev,
21 21 short,
22 22 )
23 23
24 24 from . import (
25 25 bookmarks,
26 26 changelog,
27 27 copies,
28 28 crecord as crecordmod,
29 29 dagop,
30 30 dirstateguard,
31 31 encoding,
32 32 error,
33 33 formatter,
34 34 graphmod,
35 35 match as matchmod,
36 36 mdiff,
37 37 obsolete,
38 38 patch,
39 39 pathutil,
40 40 pycompat,
41 41 registrar,
42 42 revlog,
43 43 revset,
44 44 scmutil,
45 45 smartset,
46 46 templatekw,
47 47 templater,
48 48 util,
49 49 vfs as vfsmod,
50 50 )
51 51 stringio = util.stringio
52 52
53 53 # templates of common command options
54 54
55 55 dryrunopts = [
56 56 ('n', 'dry-run', None,
57 57 _('do not perform actions, just print output')),
58 58 ]
59 59
60 60 remoteopts = [
61 61 ('e', 'ssh', '',
62 62 _('specify ssh command to use'), _('CMD')),
63 63 ('', 'remotecmd', '',
64 64 _('specify hg command to run on the remote side'), _('CMD')),
65 65 ('', 'insecure', None,
66 66 _('do not verify server certificate (ignoring web.cacerts config)')),
67 67 ]
68 68
69 69 walkopts = [
70 70 ('I', 'include', [],
71 71 _('include names matching the given patterns'), _('PATTERN')),
72 72 ('X', 'exclude', [],
73 73 _('exclude names matching the given patterns'), _('PATTERN')),
74 74 ]
75 75
76 76 commitopts = [
77 77 ('m', 'message', '',
78 78 _('use text as commit message'), _('TEXT')),
79 79 ('l', 'logfile', '',
80 80 _('read commit message from file'), _('FILE')),
81 81 ]
82 82
83 83 commitopts2 = [
84 84 ('d', 'date', '',
85 85 _('record the specified date as commit date'), _('DATE')),
86 86 ('u', 'user', '',
87 87 _('record the specified user as committer'), _('USER')),
88 88 ]
89 89
90 90 # hidden for now
91 91 formatteropts = [
92 92 ('T', 'template', '',
93 93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
94 94 ]
95 95
96 96 templateopts = [
97 97 ('', 'style', '',
98 98 _('display using template map file (DEPRECATED)'), _('STYLE')),
99 99 ('T', 'template', '',
100 100 _('display with template'), _('TEMPLATE')),
101 101 ]
102 102
103 103 logopts = [
104 104 ('p', 'patch', None, _('show patch')),
105 105 ('g', 'git', None, _('use git extended diff format')),
106 106 ('l', 'limit', '',
107 107 _('limit number of changes displayed'), _('NUM')),
108 108 ('M', 'no-merges', None, _('do not show merges')),
109 109 ('', 'stat', None, _('output diffstat-style summary of changes')),
110 110 ('G', 'graph', None, _("show the revision DAG")),
111 111 ] + templateopts
112 112
113 113 diffopts = [
114 114 ('a', 'text', None, _('treat all files as text')),
115 115 ('g', 'git', None, _('use git extended diff format')),
116 116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
117 117 ('', 'nodates', None, _('omit dates from diff headers'))
118 118 ]
119 119
120 120 diffwsopts = [
121 121 ('w', 'ignore-all-space', None,
122 122 _('ignore white space when comparing lines')),
123 123 ('b', 'ignore-space-change', None,
124 124 _('ignore changes in the amount of white space')),
125 125 ('B', 'ignore-blank-lines', None,
126 126 _('ignore changes whose lines are all blank')),
127 127 ('Z', 'ignore-space-at-eol', None,
128 128 _('ignore changes in whitespace at EOL')),
129 129 ]
130 130
131 131 diffopts2 = [
132 132 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
133 133 ('p', 'show-function', None, _('show which function each change is in')),
134 134 ('', 'reverse', None, _('produce a diff that undoes the changes')),
135 135 ] + diffwsopts + [
136 136 ('U', 'unified', '',
137 137 _('number of lines of context to show'), _('NUM')),
138 138 ('', 'stat', None, _('output diffstat-style summary of changes')),
139 139 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
140 140 ]
141 141
142 142 mergetoolopts = [
143 143 ('t', 'tool', '', _('specify merge tool')),
144 144 ]
145 145
146 146 similarityopts = [
147 147 ('s', 'similarity', '',
148 148 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
149 149 ]
150 150
151 151 subrepoopts = [
152 152 ('S', 'subrepos', None,
153 153 _('recurse into subrepositories'))
154 154 ]
155 155
156 156 debugrevlogopts = [
157 157 ('c', 'changelog', False, _('open changelog')),
158 158 ('m', 'manifest', False, _('open manifest')),
159 159 ('', 'dir', '', _('open directory manifest')),
160 160 ]
161 161
162 162 # special string such that everything below this line will be ingored in the
163 163 # editor text
164 164 _linebelow = "^HG: ------------------------ >8 ------------------------$"
165 165
166 166 def ishunk(x):
167 167 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
168 168 return isinstance(x, hunkclasses)
169 169
170 170 def newandmodified(chunks, originalchunks):
171 171 newlyaddedandmodifiedfiles = set()
172 172 for chunk in chunks:
173 173 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
174 174 originalchunks:
175 175 newlyaddedandmodifiedfiles.add(chunk.header.filename())
176 176 return newlyaddedandmodifiedfiles
177 177
178 178 def parsealiases(cmd):
179 179 return cmd.lstrip("^").split("|")
180 180
181 181 def setupwrapcolorwrite(ui):
182 182 # wrap ui.write so diff output can be labeled/colorized
183 183 def wrapwrite(orig, *args, **kw):
184 184 label = kw.pop('label', '')
185 185 for chunk, l in patch.difflabel(lambda: args):
186 186 orig(chunk, label=label + l)
187 187
188 188 oldwrite = ui.write
189 189 def wrap(*args, **kwargs):
190 190 return wrapwrite(oldwrite, *args, **kwargs)
191 191 setattr(ui, 'write', wrap)
192 192 return oldwrite
193 193
194 194 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
195 195 if usecurses:
196 196 if testfile:
197 197 recordfn = crecordmod.testdecorator(testfile,
198 198 crecordmod.testchunkselector)
199 199 else:
200 200 recordfn = crecordmod.chunkselector
201 201
202 202 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
203 203
204 204 else:
205 205 return patch.filterpatch(ui, originalhunks, operation)
206 206
207 207 def recordfilter(ui, originalhunks, operation=None):
208 208 """ Prompts the user to filter the originalhunks and return a list of
209 209 selected hunks.
210 210 *operation* is used for to build ui messages to indicate the user what
211 211 kind of filtering they are doing: reverting, committing, shelving, etc.
212 212 (see patch.filterpatch).
213 213 """
214 214 usecurses = crecordmod.checkcurses(ui)
215 215 testfile = ui.config('experimental', 'crecordtest')
216 216 oldwrite = setupwrapcolorwrite(ui)
217 217 try:
218 218 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
219 219 testfile, operation)
220 220 finally:
221 221 ui.write = oldwrite
222 222 return newchunks, newopts
223 223
224 224 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
225 225 filterfn, *pats, **opts):
226 226 from . import merge as mergemod
227 227 opts = pycompat.byteskwargs(opts)
228 228 if not ui.interactive():
229 229 if cmdsuggest:
230 230 msg = _('running non-interactively, use %s instead') % cmdsuggest
231 231 else:
232 232 msg = _('running non-interactively')
233 233 raise error.Abort(msg)
234 234
235 235 # make sure username is set before going interactive
236 236 if not opts.get('user'):
237 237 ui.username() # raise exception, username not provided
238 238
239 239 def recordfunc(ui, repo, message, match, opts):
240 240 """This is generic record driver.
241 241
242 242 Its job is to interactively filter local changes, and
243 243 accordingly prepare working directory into a state in which the
244 244 job can be delegated to a non-interactive commit command such as
245 245 'commit' or 'qrefresh'.
246 246
247 247 After the actual job is done by non-interactive command, the
248 248 working directory is restored to its original state.
249 249
250 250 In the end we'll record interesting changes, and everything else
251 251 will be left in place, so the user can continue working.
252 252 """
253 253
254 254 checkunfinished(repo, commit=True)
255 255 wctx = repo[None]
256 256 merge = len(wctx.parents()) > 1
257 257 if merge:
258 258 raise error.Abort(_('cannot partially commit a merge '
259 259 '(use "hg commit" instead)'))
260 260
261 261 def fail(f, msg):
262 262 raise error.Abort('%s: %s' % (f, msg))
263 263
264 264 force = opts.get('force')
265 265 if not force:
266 266 vdirs = []
267 267 match.explicitdir = vdirs.append
268 268 match.bad = fail
269 269
270 270 status = repo.status(match=match)
271 271 if not force:
272 272 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
273 273 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
274 274 diffopts.nodates = True
275 275 diffopts.git = True
276 276 diffopts.showfunc = True
277 277 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
278 278 originalchunks = patch.parsepatch(originaldiff)
279 279
280 280 # 1. filter patch, since we are intending to apply subset of it
281 281 try:
282 282 chunks, newopts = filterfn(ui, originalchunks)
283 283 except error.PatchError as err:
284 284 raise error.Abort(_('error parsing patch: %s') % err)
285 285 opts.update(newopts)
286 286
287 287 # We need to keep a backup of files that have been newly added and
288 288 # modified during the recording process because there is a previous
289 289 # version without the edit in the workdir
290 290 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
291 291 contenders = set()
292 292 for h in chunks:
293 293 try:
294 294 contenders.update(set(h.files()))
295 295 except AttributeError:
296 296 pass
297 297
298 298 changed = status.modified + status.added + status.removed
299 299 newfiles = [f for f in changed if f in contenders]
300 300 if not newfiles:
301 301 ui.status(_('no changes to record\n'))
302 302 return 0
303 303
304 304 modified = set(status.modified)
305 305
306 306 # 2. backup changed files, so we can restore them in the end
307 307
308 308 if backupall:
309 309 tobackup = changed
310 310 else:
311 311 tobackup = [f for f in newfiles if f in modified or f in \
312 312 newlyaddedandmodifiedfiles]
313 313 backups = {}
314 314 if tobackup:
315 315 backupdir = repo.vfs.join('record-backups')
316 316 try:
317 317 os.mkdir(backupdir)
318 318 except OSError as err:
319 319 if err.errno != errno.EEXIST:
320 320 raise
321 321 try:
322 322 # backup continues
323 323 for f in tobackup:
324 324 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
325 325 dir=backupdir)
326 326 os.close(fd)
327 327 ui.debug('backup %r as %r\n' % (f, tmpname))
328 328 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
329 329 backups[f] = tmpname
330 330
331 331 fp = stringio()
332 332 for c in chunks:
333 333 fname = c.filename()
334 334 if fname in backups:
335 335 c.write(fp)
336 336 dopatch = fp.tell()
337 337 fp.seek(0)
338 338
339 339 # 2.5 optionally review / modify patch in text editor
340 340 if opts.get('review', False):
341 341 patchtext = (crecordmod.diffhelptext
342 342 + crecordmod.patchhelptext
343 343 + fp.read())
344 344 reviewedpatch = ui.edit(patchtext, "",
345 345 action="diff",
346 346 repopath=repo.path)
347 347 fp.truncate(0)
348 348 fp.write(reviewedpatch)
349 349 fp.seek(0)
350 350
351 351 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
352 352 # 3a. apply filtered patch to clean repo (clean)
353 353 if backups:
354 354 # Equivalent to hg.revert
355 355 m = scmutil.matchfiles(repo, backups.keys())
356 356 mergemod.update(repo, repo.dirstate.p1(),
357 357 False, True, matcher=m)
358 358
359 359 # 3b. (apply)
360 360 if dopatch:
361 361 try:
362 362 ui.debug('applying patch\n')
363 363 ui.debug(fp.getvalue())
364 364 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
365 365 except error.PatchError as err:
366 366 raise error.Abort(str(err))
367 367 del fp
368 368
369 369 # 4. We prepared working directory according to filtered
370 370 # patch. Now is the time to delegate the job to
371 371 # commit/qrefresh or the like!
372 372
373 373 # Make all of the pathnames absolute.
374 374 newfiles = [repo.wjoin(nf) for nf in newfiles]
375 375 return commitfunc(ui, repo, *newfiles, **opts)
376 376 finally:
377 377 # 5. finally restore backed-up files
378 378 try:
379 379 dirstate = repo.dirstate
380 380 for realname, tmpname in backups.iteritems():
381 381 ui.debug('restoring %r to %r\n' % (tmpname, realname))
382 382
383 383 if dirstate[realname] == 'n':
384 384 # without normallookup, restoring timestamp
385 385 # may cause partially committed files
386 386 # to be treated as unmodified
387 387 dirstate.normallookup(realname)
388 388
389 389 # copystat=True here and above are a hack to trick any
390 390 # editors that have f open that we haven't modified them.
391 391 #
392 392 # Also note that this racy as an editor could notice the
393 393 # file's mtime before we've finished writing it.
394 394 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
395 395 os.unlink(tmpname)
396 396 if tobackup:
397 397 os.rmdir(backupdir)
398 398 except OSError:
399 399 pass
400 400
401 401 def recordinwlock(ui, repo, message, match, opts):
402 402 with repo.wlock():
403 403 return recordfunc(ui, repo, message, match, opts)
404 404
405 405 return commit(ui, repo, recordinwlock, pats, opts)
406 406
407 407 class dirnode(object):
408 408 """
409 409 Represent a directory in user working copy with information required for
410 410 the purpose of tersing its status.
411 411
412 412 path is the path to the directory
413 413
414 414 statuses is a set of statuses of all files in this directory (this includes
415 415 all the files in all the subdirectories too)
416 416
417 417 files is a list of files which are direct child of this directory
418 418
419 419 subdirs is a dictionary of sub-directory name as the key and it's own
420 420 dirnode object as the value
421 421 """
422 422
423 423 def __init__(self, dirpath):
424 424 self.path = dirpath
425 425 self.statuses = set([])
426 426 self.files = []
427 427 self.subdirs = {}
428 428
429 429 def _addfileindir(self, filename, status):
430 430 """Add a file in this directory as a direct child."""
431 431 self.files.append((filename, status))
432 432
433 433 def addfile(self, filename, status):
434 434 """
435 435 Add a file to this directory or to its direct parent directory.
436 436
437 437 If the file is not direct child of this directory, we traverse to the
438 438 directory of which this file is a direct child of and add the file
439 439 there.
440 440 """
441 441
442 442 # the filename contains a path separator, it means it's not the direct
443 443 # child of this directory
444 444 if '/' in filename:
445 445 subdir, filep = filename.split('/', 1)
446 446
447 447 # does the dirnode object for subdir exists
448 448 if subdir not in self.subdirs:
449 449 subdirpath = os.path.join(self.path, subdir)
450 450 self.subdirs[subdir] = dirnode(subdirpath)
451 451
452 452 # try adding the file in subdir
453 453 self.subdirs[subdir].addfile(filep, status)
454 454
455 455 else:
456 456 self._addfileindir(filename, status)
457 457
458 458 if status not in self.statuses:
459 459 self.statuses.add(status)
460 460
461 461 def iterfilepaths(self):
462 462 """Yield (status, path) for files directly under this directory."""
463 463 for f, st in self.files:
464 464 yield st, os.path.join(self.path, f)
465 465
466 466 def tersewalk(self, terseargs):
467 467 """
468 468 Yield (status, path) obtained by processing the status of this
469 469 dirnode.
470 470
471 471 terseargs is the string of arguments passed by the user with `--terse`
472 472 flag.
473 473
474 474 Following are the cases which can happen:
475 475
476 476 1) All the files in the directory (including all the files in its
477 477 subdirectories) share the same status and the user has asked us to terse
478 478 that status. -> yield (status, dirpath)
479 479
480 480 2) Otherwise, we do following:
481 481
482 482 a) Yield (status, filepath) for all the files which are in this
483 483 directory (only the ones in this directory, not the subdirs)
484 484
485 485 b) Recurse the function on all the subdirectories of this
486 486 directory
487 487 """
488 488
489 489 if len(self.statuses) == 1:
490 490 onlyst = self.statuses.pop()
491 491
492 492 # Making sure we terse only when the status abbreviation is
493 493 # passed as terse argument
494 494 if onlyst in terseargs:
495 495 yield onlyst, self.path + pycompat.ossep
496 496 return
497 497
498 498 # add the files to status list
499 499 for st, fpath in self.iterfilepaths():
500 500 yield st, fpath
501 501
502 502 #recurse on the subdirs
503 503 for dirobj in self.subdirs.values():
504 504 for st, fpath in dirobj.tersewalk(terseargs):
505 505 yield st, fpath
506 506
507 507 def tersedir(statuslist, terseargs):
508 508 """
509 509 Terse the status if all the files in a directory shares the same status.
510 510
511 511 statuslist is scmutil.status() object which contains a list of files for
512 512 each status.
513 513 terseargs is string which is passed by the user as the argument to `--terse`
514 514 flag.
515 515
516 516 The function makes a tree of objects of dirnode class, and at each node it
517 517 stores the information required to know whether we can terse a certain
518 518 directory or not.
519 519 """
520 520 # the order matters here as that is used to produce final list
521 521 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
522 522
523 523 # checking the argument validity
524 524 for s in pycompat.bytestr(terseargs):
525 525 if s not in allst:
526 526 raise error.Abort(_("'%s' not recognized") % s)
527 527
528 528 # creating a dirnode object for the root of the repo
529 529 rootobj = dirnode('')
530 530 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
531 531 'ignored', 'removed')
532 532
533 533 tersedict = {}
534 534 for attrname in pstatus:
535 535 statuschar = attrname[0:1]
536 536 for f in getattr(statuslist, attrname):
537 537 rootobj.addfile(f, statuschar)
538 538 tersedict[statuschar] = []
539 539
540 540 # we won't be tersing the root dir, so add files in it
541 541 for st, fpath in rootobj.iterfilepaths():
542 542 tersedict[st].append(fpath)
543 543
544 544 # process each sub-directory and build tersedict
545 545 for subdir in rootobj.subdirs.values():
546 546 for st, f in subdir.tersewalk(terseargs):
547 547 tersedict[st].append(f)
548 548
549 549 tersedlist = []
550 550 for st in allst:
551 551 tersedict[st].sort()
552 552 tersedlist.append(tersedict[st])
553 553
554 554 return tersedlist
555 555
556 556 def _commentlines(raw):
557 557 '''Surround lineswith a comment char and a new line'''
558 558 lines = raw.splitlines()
559 559 commentedlines = ['# %s' % line for line in lines]
560 560 return '\n'.join(commentedlines) + '\n'
561 561
562 562 def _conflictsmsg(repo):
563 563 # avoid merge cycle
564 564 from . import merge as mergemod
565 565 mergestate = mergemod.mergestate.read(repo)
566 566 if not mergestate.active():
567 567 return
568 568
569 569 m = scmutil.match(repo[None])
570 570 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
571 571 if unresolvedlist:
572 572 mergeliststr = '\n'.join(
573 573 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
574 574 for path in unresolvedlist])
575 575 msg = _('''Unresolved merge conflicts:
576 576
577 577 %s
578 578
579 579 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
580 580 else:
581 581 msg = _('No unresolved merge conflicts.')
582 582
583 583 return _commentlines(msg)
584 584
585 585 def _helpmessage(continuecmd, abortcmd):
586 586 msg = _('To continue: %s\n'
587 587 'To abort: %s') % (continuecmd, abortcmd)
588 588 return _commentlines(msg)
589 589
590 590 def _rebasemsg():
591 591 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
592 592
593 593 def _histeditmsg():
594 594 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
595 595
596 596 def _unshelvemsg():
597 597 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
598 598
599 599 def _updatecleanmsg(dest=None):
600 600 warning = _('warning: this will discard uncommitted changes')
601 601 return 'hg update --clean %s (%s)' % (dest or '.', warning)
602 602
603 603 def _graftmsg():
604 604 # tweakdefaults requires `update` to have a rev hence the `.`
605 605 return _helpmessage('hg graft --continue', _updatecleanmsg())
606 606
607 607 def _mergemsg():
608 608 # tweakdefaults requires `update` to have a rev hence the `.`
609 609 return _helpmessage('hg commit', _updatecleanmsg())
610 610
611 611 def _bisectmsg():
612 612 msg = _('To mark the changeset good: hg bisect --good\n'
613 613 'To mark the changeset bad: hg bisect --bad\n'
614 614 'To abort: hg bisect --reset\n')
615 615 return _commentlines(msg)
616 616
617 617 def fileexistspredicate(filename):
618 618 return lambda repo: repo.vfs.exists(filename)
619 619
620 620 def _mergepredicate(repo):
621 621 return len(repo[None].parents()) > 1
622 622
623 623 STATES = (
624 624 # (state, predicate to detect states, helpful message function)
625 625 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
626 626 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
627 627 ('graft', fileexistspredicate('graftstate'), _graftmsg),
628 628 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
629 629 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
630 630 # The merge state is part of a list that will be iterated over.
631 631 # They need to be last because some of the other unfinished states may also
632 632 # be in a merge or update state (eg. rebase, histedit, graft, etc).
633 633 # We want those to have priority.
634 634 ('merge', _mergepredicate, _mergemsg),
635 635 )
636 636
637 637 def _getrepostate(repo):
638 638 # experimental config: commands.status.skipstates
639 639 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
640 640 for state, statedetectionpredicate, msgfn in STATES:
641 641 if state in skip:
642 642 continue
643 643 if statedetectionpredicate(repo):
644 644 return (state, statedetectionpredicate, msgfn)
645 645
646 646 def morestatus(repo, fm):
647 647 statetuple = _getrepostate(repo)
648 648 label = 'status.morestatus'
649 649 if statetuple:
650 650 fm.startitem()
651 651 state, statedetectionpredicate, helpfulmsg = statetuple
652 652 statemsg = _('The repository is in an unfinished *%s* state.') % state
653 653 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
654 654 conmsg = _conflictsmsg(repo)
655 655 if conmsg:
656 656 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
657 657 if helpfulmsg:
658 658 helpmsg = helpfulmsg()
659 659 fm.write('helpmsg', '%s\n', helpmsg, label=label)
660 660
661 661 def findpossible(cmd, table, strict=False):
662 662 """
663 663 Return cmd -> (aliases, command table entry)
664 664 for each matching command.
665 665 Return debug commands (or their aliases) only if no normal command matches.
666 666 """
667 667 choice = {}
668 668 debugchoice = {}
669 669
670 670 if cmd in table:
671 671 # short-circuit exact matches, "log" alias beats "^log|history"
672 672 keys = [cmd]
673 673 else:
674 674 keys = table.keys()
675 675
676 676 allcmds = []
677 677 for e in keys:
678 678 aliases = parsealiases(e)
679 679 allcmds.extend(aliases)
680 680 found = None
681 681 if cmd in aliases:
682 682 found = cmd
683 683 elif not strict:
684 684 for a in aliases:
685 685 if a.startswith(cmd):
686 686 found = a
687 687 break
688 688 if found is not None:
689 689 if aliases[0].startswith("debug") or found.startswith("debug"):
690 690 debugchoice[found] = (aliases, table[e])
691 691 else:
692 692 choice[found] = (aliases, table[e])
693 693
694 694 if not choice and debugchoice:
695 695 choice = debugchoice
696 696
697 697 return choice, allcmds
698 698
699 699 def findcmd(cmd, table, strict=True):
700 700 """Return (aliases, command table entry) for command string."""
701 701 choice, allcmds = findpossible(cmd, table, strict)
702 702
703 703 if cmd in choice:
704 704 return choice[cmd]
705 705
706 706 if len(choice) > 1:
707 707 clist = sorted(choice)
708 708 raise error.AmbiguousCommand(cmd, clist)
709 709
710 710 if choice:
711 711 return list(choice.values())[0]
712 712
713 713 raise error.UnknownCommand(cmd, allcmds)
714 714
715 715 def findrepo(p):
716 716 while not os.path.isdir(os.path.join(p, ".hg")):
717 717 oldp, p = p, os.path.dirname(p)
718 718 if p == oldp:
719 719 return None
720 720
721 721 return p
722 722
723 723 def bailifchanged(repo, merge=True, hint=None):
724 724 """ enforce the precondition that working directory must be clean.
725 725
726 726 'merge' can be set to false if a pending uncommitted merge should be
727 727 ignored (such as when 'update --check' runs).
728 728
729 729 'hint' is the usual hint given to Abort exception.
730 730 """
731 731
732 732 if merge and repo.dirstate.p2() != nullid:
733 733 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
734 734 modified, added, removed, deleted = repo.status()[:4]
735 735 if modified or added or removed or deleted:
736 736 raise error.Abort(_('uncommitted changes'), hint=hint)
737 737 ctx = repo[None]
738 738 for s in sorted(ctx.substate):
739 739 ctx.sub(s).bailifchanged(hint=hint)
740 740
741 741 def logmessage(ui, opts):
742 742 """ get the log message according to -m and -l option """
743 743 message = opts.get('message')
744 744 logfile = opts.get('logfile')
745 745
746 746 if message and logfile:
747 747 raise error.Abort(_('options --message and --logfile are mutually '
748 748 'exclusive'))
749 749 if not message and logfile:
750 750 try:
751 751 if isstdiofilename(logfile):
752 752 message = ui.fin.read()
753 753 else:
754 754 message = '\n'.join(util.readfile(logfile).splitlines())
755 755 except IOError as inst:
756 756 raise error.Abort(_("can't read commit message '%s': %s") %
757 757 (logfile, encoding.strtolocal(inst.strerror)))
758 758 return message
759 759
760 760 def mergeeditform(ctxorbool, baseformname):
761 761 """return appropriate editform name (referencing a committemplate)
762 762
763 763 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
764 764 merging is committed.
765 765
766 766 This returns baseformname with '.merge' appended if it is a merge,
767 767 otherwise '.normal' is appended.
768 768 """
769 769 if isinstance(ctxorbool, bool):
770 770 if ctxorbool:
771 771 return baseformname + ".merge"
772 772 elif 1 < len(ctxorbool.parents()):
773 773 return baseformname + ".merge"
774 774
775 775 return baseformname + ".normal"
776 776
777 777 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
778 778 editform='', **opts):
779 779 """get appropriate commit message editor according to '--edit' option
780 780
781 781 'finishdesc' is a function to be called with edited commit message
782 782 (= 'description' of the new changeset) just after editing, but
783 783 before checking empty-ness. It should return actual text to be
784 784 stored into history. This allows to change description before
785 785 storing.
786 786
787 787 'extramsg' is a extra message to be shown in the editor instead of
788 788 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
789 789 is automatically added.
790 790
791 791 'editform' is a dot-separated list of names, to distinguish
792 792 the purpose of commit text editing.
793 793
794 794 'getcommiteditor' returns 'commitforceeditor' regardless of
795 795 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
796 796 they are specific for usage in MQ.
797 797 """
798 798 if edit or finishdesc or extramsg:
799 799 return lambda r, c, s: commitforceeditor(r, c, s,
800 800 finishdesc=finishdesc,
801 801 extramsg=extramsg,
802 802 editform=editform)
803 803 elif editform:
804 804 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
805 805 else:
806 806 return commiteditor
807 807
808 808 def loglimit(opts):
809 809 """get the log limit according to option -l/--limit"""
810 810 limit = opts.get('limit')
811 811 if limit:
812 812 try:
813 813 limit = int(limit)
814 814 except ValueError:
815 815 raise error.Abort(_('limit must be a positive integer'))
816 816 if limit <= 0:
817 817 raise error.Abort(_('limit must be positive'))
818 818 else:
819 819 limit = None
820 820 return limit
821 821
822 822 def makefilename(repo, pat, node, desc=None,
823 823 total=None, seqno=None, revwidth=None, pathname=None):
824 824 node_expander = {
825 825 'H': lambda: hex(node),
826 'R': lambda: str(repo.changelog.rev(node)),
826 'R': lambda: '%d' % repo.changelog.rev(node),
827 827 'h': lambda: short(node),
828 'm': lambda: re.sub('[^\w]', '_', str(desc))
828 'm': lambda: re.sub('[^\w]', '_', desc or '')
829 829 }
830 830 expander = {
831 831 '%': lambda: '%',
832 832 'b': lambda: os.path.basename(repo.root),
833 833 }
834 834
835 835 try:
836 836 if node:
837 837 expander.update(node_expander)
838 838 if node:
839 839 expander['r'] = (lambda:
840 str(repo.changelog.rev(node)).zfill(revwidth or 0))
840 ('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
841 841 if total is not None:
842 expander['N'] = lambda: str(total)
842 expander['N'] = lambda: '%d' % total
843 843 if seqno is not None:
844 expander['n'] = lambda: str(seqno)
844 expander['n'] = lambda: '%d' % seqno
845 845 if total is not None and seqno is not None:
846 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
846 expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
847 847 if pathname is not None:
848 848 expander['s'] = lambda: os.path.basename(pathname)
849 849 expander['d'] = lambda: os.path.dirname(pathname) or '.'
850 850 expander['p'] = lambda: pathname
851 851
852 852 newname = []
853 853 patlen = len(pat)
854 854 i = 0
855 855 while i < patlen:
856 856 c = pat[i:i + 1]
857 857 if c == '%':
858 858 i += 1
859 859 c = pat[i:i + 1]
860 860 c = expander[c]()
861 861 newname.append(c)
862 862 i += 1
863 863 return ''.join(newname)
864 864 except KeyError as inst:
865 865 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
866 866 inst.args[0])
867 867
868 868 def isstdiofilename(pat):
869 869 """True if the given pat looks like a filename denoting stdin/stdout"""
870 870 return not pat or pat == '-'
871 871
872 872 class _unclosablefile(object):
873 873 def __init__(self, fp):
874 874 self._fp = fp
875 875
876 876 def close(self):
877 877 pass
878 878
879 879 def __iter__(self):
880 880 return iter(self._fp)
881 881
882 882 def __getattr__(self, attr):
883 883 return getattr(self._fp, attr)
884 884
885 885 def __enter__(self):
886 886 return self
887 887
888 888 def __exit__(self, exc_type, exc_value, exc_tb):
889 889 pass
890 890
891 891 def makefileobj(repo, pat, node=None, desc=None, total=None,
892 892 seqno=None, revwidth=None, mode='wb', modemap=None,
893 893 pathname=None):
894 894
895 895 writable = mode not in ('r', 'rb')
896 896
897 897 if isstdiofilename(pat):
898 898 if writable:
899 899 fp = repo.ui.fout
900 900 else:
901 901 fp = repo.ui.fin
902 902 return _unclosablefile(fp)
903 903 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
904 904 if modemap is not None:
905 905 mode = modemap.get(fn, mode)
906 906 if mode == 'wb':
907 907 modemap[fn] = 'ab'
908 908 return open(fn, mode)
909 909
910 910 def openrevlog(repo, cmd, file_, opts):
911 911 """opens the changelog, manifest, a filelog or a given revlog"""
912 912 cl = opts['changelog']
913 913 mf = opts['manifest']
914 914 dir = opts['dir']
915 915 msg = None
916 916 if cl and mf:
917 917 msg = _('cannot specify --changelog and --manifest at the same time')
918 918 elif cl and dir:
919 919 msg = _('cannot specify --changelog and --dir at the same time')
920 920 elif cl or mf or dir:
921 921 if file_:
922 922 msg = _('cannot specify filename with --changelog or --manifest')
923 923 elif not repo:
924 924 msg = _('cannot specify --changelog or --manifest or --dir '
925 925 'without a repository')
926 926 if msg:
927 927 raise error.Abort(msg)
928 928
929 929 r = None
930 930 if repo:
931 931 if cl:
932 932 r = repo.unfiltered().changelog
933 933 elif dir:
934 934 if 'treemanifest' not in repo.requirements:
935 935 raise error.Abort(_("--dir can only be used on repos with "
936 936 "treemanifest enabled"))
937 937 dirlog = repo.manifestlog._revlog.dirlog(dir)
938 938 if len(dirlog):
939 939 r = dirlog
940 940 elif mf:
941 941 r = repo.manifestlog._revlog
942 942 elif file_:
943 943 filelog = repo.file(file_)
944 944 if len(filelog):
945 945 r = filelog
946 946 if not r:
947 947 if not file_:
948 948 raise error.CommandError(cmd, _('invalid arguments'))
949 949 if not os.path.isfile(file_):
950 950 raise error.Abort(_("revlog '%s' not found") % file_)
951 951 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
952 952 file_[:-2] + ".i")
953 953 return r
954 954
955 955 def copy(ui, repo, pats, opts, rename=False):
956 956 # called with the repo lock held
957 957 #
958 958 # hgsep => pathname that uses "/" to separate directories
959 959 # ossep => pathname that uses os.sep to separate directories
960 960 cwd = repo.getcwd()
961 961 targets = {}
962 962 after = opts.get("after")
963 963 dryrun = opts.get("dry_run")
964 964 wctx = repo[None]
965 965
966 966 def walkpat(pat):
967 967 srcs = []
968 968 if after:
969 969 badstates = '?'
970 970 else:
971 971 badstates = '?r'
972 972 m = scmutil.match(wctx, [pat], opts, globbed=True)
973 973 for abs in wctx.walk(m):
974 974 state = repo.dirstate[abs]
975 975 rel = m.rel(abs)
976 976 exact = m.exact(abs)
977 977 if state in badstates:
978 978 if exact and state == '?':
979 979 ui.warn(_('%s: not copying - file is not managed\n') % rel)
980 980 if exact and state == 'r':
981 981 ui.warn(_('%s: not copying - file has been marked for'
982 982 ' remove\n') % rel)
983 983 continue
984 984 # abs: hgsep
985 985 # rel: ossep
986 986 srcs.append((abs, rel, exact))
987 987 return srcs
988 988
989 989 # abssrc: hgsep
990 990 # relsrc: ossep
991 991 # otarget: ossep
992 992 def copyfile(abssrc, relsrc, otarget, exact):
993 993 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
994 994 if '/' in abstarget:
995 995 # We cannot normalize abstarget itself, this would prevent
996 996 # case only renames, like a => A.
997 997 abspath, absname = abstarget.rsplit('/', 1)
998 998 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
999 999 reltarget = repo.pathto(abstarget, cwd)
1000 1000 target = repo.wjoin(abstarget)
1001 1001 src = repo.wjoin(abssrc)
1002 1002 state = repo.dirstate[abstarget]
1003 1003
1004 1004 scmutil.checkportable(ui, abstarget)
1005 1005
1006 1006 # check for collisions
1007 1007 prevsrc = targets.get(abstarget)
1008 1008 if prevsrc is not None:
1009 1009 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1010 1010 (reltarget, repo.pathto(abssrc, cwd),
1011 1011 repo.pathto(prevsrc, cwd)))
1012 1012 return
1013 1013
1014 1014 # check for overwrites
1015 1015 exists = os.path.lexists(target)
1016 1016 samefile = False
1017 1017 if exists and abssrc != abstarget:
1018 1018 if (repo.dirstate.normalize(abssrc) ==
1019 1019 repo.dirstate.normalize(abstarget)):
1020 1020 if not rename:
1021 1021 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1022 1022 return
1023 1023 exists = False
1024 1024 samefile = True
1025 1025
1026 1026 if not after and exists or after and state in 'mn':
1027 1027 if not opts['force']:
1028 1028 if state in 'mn':
1029 1029 msg = _('%s: not overwriting - file already committed\n')
1030 1030 if after:
1031 1031 flags = '--after --force'
1032 1032 else:
1033 1033 flags = '--force'
1034 1034 if rename:
1035 1035 hint = _('(hg rename %s to replace the file by '
1036 1036 'recording a rename)\n') % flags
1037 1037 else:
1038 1038 hint = _('(hg copy %s to replace the file by '
1039 1039 'recording a copy)\n') % flags
1040 1040 else:
1041 1041 msg = _('%s: not overwriting - file exists\n')
1042 1042 if rename:
1043 1043 hint = _('(hg rename --after to record the rename)\n')
1044 1044 else:
1045 1045 hint = _('(hg copy --after to record the copy)\n')
1046 1046 ui.warn(msg % reltarget)
1047 1047 ui.warn(hint)
1048 1048 return
1049 1049
1050 1050 if after:
1051 1051 if not exists:
1052 1052 if rename:
1053 1053 ui.warn(_('%s: not recording move - %s does not exist\n') %
1054 1054 (relsrc, reltarget))
1055 1055 else:
1056 1056 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1057 1057 (relsrc, reltarget))
1058 1058 return
1059 1059 elif not dryrun:
1060 1060 try:
1061 1061 if exists:
1062 1062 os.unlink(target)
1063 1063 targetdir = os.path.dirname(target) or '.'
1064 1064 if not os.path.isdir(targetdir):
1065 1065 os.makedirs(targetdir)
1066 1066 if samefile:
1067 1067 tmp = target + "~hgrename"
1068 1068 os.rename(src, tmp)
1069 1069 os.rename(tmp, target)
1070 1070 else:
1071 1071 util.copyfile(src, target)
1072 1072 srcexists = True
1073 1073 except IOError as inst:
1074 1074 if inst.errno == errno.ENOENT:
1075 1075 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1076 1076 srcexists = False
1077 1077 else:
1078 1078 ui.warn(_('%s: cannot copy - %s\n') %
1079 1079 (relsrc, encoding.strtolocal(inst.strerror)))
1080 1080 return True # report a failure
1081 1081
1082 1082 if ui.verbose or not exact:
1083 1083 if rename:
1084 1084 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1085 1085 else:
1086 1086 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1087 1087
1088 1088 targets[abstarget] = abssrc
1089 1089
1090 1090 # fix up dirstate
1091 1091 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1092 1092 dryrun=dryrun, cwd=cwd)
1093 1093 if rename and not dryrun:
1094 1094 if not after and srcexists and not samefile:
1095 1095 repo.wvfs.unlinkpath(abssrc)
1096 1096 wctx.forget([abssrc])
1097 1097
1098 1098 # pat: ossep
1099 1099 # dest ossep
1100 1100 # srcs: list of (hgsep, hgsep, ossep, bool)
1101 1101 # return: function that takes hgsep and returns ossep
1102 1102 def targetpathfn(pat, dest, srcs):
1103 1103 if os.path.isdir(pat):
1104 1104 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1105 1105 abspfx = util.localpath(abspfx)
1106 1106 if destdirexists:
1107 1107 striplen = len(os.path.split(abspfx)[0])
1108 1108 else:
1109 1109 striplen = len(abspfx)
1110 1110 if striplen:
1111 1111 striplen += len(pycompat.ossep)
1112 1112 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1113 1113 elif destdirexists:
1114 1114 res = lambda p: os.path.join(dest,
1115 1115 os.path.basename(util.localpath(p)))
1116 1116 else:
1117 1117 res = lambda p: dest
1118 1118 return res
1119 1119
1120 1120 # pat: ossep
1121 1121 # dest ossep
1122 1122 # srcs: list of (hgsep, hgsep, ossep, bool)
1123 1123 # return: function that takes hgsep and returns ossep
1124 1124 def targetpathafterfn(pat, dest, srcs):
1125 1125 if matchmod.patkind(pat):
1126 1126 # a mercurial pattern
1127 1127 res = lambda p: os.path.join(dest,
1128 1128 os.path.basename(util.localpath(p)))
1129 1129 else:
1130 1130 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1131 1131 if len(abspfx) < len(srcs[0][0]):
1132 1132 # A directory. Either the target path contains the last
1133 1133 # component of the source path or it does not.
1134 1134 def evalpath(striplen):
1135 1135 score = 0
1136 1136 for s in srcs:
1137 1137 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1138 1138 if os.path.lexists(t):
1139 1139 score += 1
1140 1140 return score
1141 1141
1142 1142 abspfx = util.localpath(abspfx)
1143 1143 striplen = len(abspfx)
1144 1144 if striplen:
1145 1145 striplen += len(pycompat.ossep)
1146 1146 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1147 1147 score = evalpath(striplen)
1148 1148 striplen1 = len(os.path.split(abspfx)[0])
1149 1149 if striplen1:
1150 1150 striplen1 += len(pycompat.ossep)
1151 1151 if evalpath(striplen1) > score:
1152 1152 striplen = striplen1
1153 1153 res = lambda p: os.path.join(dest,
1154 1154 util.localpath(p)[striplen:])
1155 1155 else:
1156 1156 # a file
1157 1157 if destdirexists:
1158 1158 res = lambda p: os.path.join(dest,
1159 1159 os.path.basename(util.localpath(p)))
1160 1160 else:
1161 1161 res = lambda p: dest
1162 1162 return res
1163 1163
1164 1164 pats = scmutil.expandpats(pats)
1165 1165 if not pats:
1166 1166 raise error.Abort(_('no source or destination specified'))
1167 1167 if len(pats) == 1:
1168 1168 raise error.Abort(_('no destination specified'))
1169 1169 dest = pats.pop()
1170 1170 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1171 1171 if not destdirexists:
1172 1172 if len(pats) > 1 or matchmod.patkind(pats[0]):
1173 1173 raise error.Abort(_('with multiple sources, destination must be an '
1174 1174 'existing directory'))
1175 1175 if util.endswithsep(dest):
1176 1176 raise error.Abort(_('destination %s is not a directory') % dest)
1177 1177
1178 1178 tfn = targetpathfn
1179 1179 if after:
1180 1180 tfn = targetpathafterfn
1181 1181 copylist = []
1182 1182 for pat in pats:
1183 1183 srcs = walkpat(pat)
1184 1184 if not srcs:
1185 1185 continue
1186 1186 copylist.append((tfn(pat, dest, srcs), srcs))
1187 1187 if not copylist:
1188 1188 raise error.Abort(_('no files to copy'))
1189 1189
1190 1190 errors = 0
1191 1191 for targetpath, srcs in copylist:
1192 1192 for abssrc, relsrc, exact in srcs:
1193 1193 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1194 1194 errors += 1
1195 1195
1196 1196 if errors:
1197 1197 ui.warn(_('(consider using --after)\n'))
1198 1198
1199 1199 return errors != 0
1200 1200
1201 1201 ## facility to let extension process additional data into an import patch
1202 1202 # list of identifier to be executed in order
1203 1203 extrapreimport = [] # run before commit
1204 1204 extrapostimport = [] # run after commit
1205 1205 # mapping from identifier to actual import function
1206 1206 #
1207 1207 # 'preimport' are run before the commit is made and are provided the following
1208 1208 # arguments:
1209 1209 # - repo: the localrepository instance,
1210 1210 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1211 1211 # - extra: the future extra dictionary of the changeset, please mutate it,
1212 1212 # - opts: the import options.
1213 1213 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1214 1214 # mutation of in memory commit and more. Feel free to rework the code to get
1215 1215 # there.
1216 1216 extrapreimportmap = {}
1217 1217 # 'postimport' are run after the commit is made and are provided the following
1218 1218 # argument:
1219 1219 # - ctx: the changectx created by import.
1220 1220 extrapostimportmap = {}
1221 1221
1222 1222 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1223 1223 """Utility function used by commands.import to import a single patch
1224 1224
1225 1225 This function is explicitly defined here to help the evolve extension to
1226 1226 wrap this part of the import logic.
1227 1227
1228 1228 The API is currently a bit ugly because it a simple code translation from
1229 1229 the import command. Feel free to make it better.
1230 1230
1231 1231 :hunk: a patch (as a binary string)
1232 1232 :parents: nodes that will be parent of the created commit
1233 1233 :opts: the full dict of option passed to the import command
1234 1234 :msgs: list to save commit message to.
1235 1235 (used in case we need to save it when failing)
1236 1236 :updatefunc: a function that update a repo to a given node
1237 1237 updatefunc(<repo>, <node>)
1238 1238 """
1239 1239 # avoid cycle context -> subrepo -> cmdutil
1240 1240 from . import context
1241 1241 extractdata = patch.extract(ui, hunk)
1242 1242 tmpname = extractdata.get('filename')
1243 1243 message = extractdata.get('message')
1244 1244 user = opts.get('user') or extractdata.get('user')
1245 1245 date = opts.get('date') or extractdata.get('date')
1246 1246 branch = extractdata.get('branch')
1247 1247 nodeid = extractdata.get('nodeid')
1248 1248 p1 = extractdata.get('p1')
1249 1249 p2 = extractdata.get('p2')
1250 1250
1251 1251 nocommit = opts.get('no_commit')
1252 1252 importbranch = opts.get('import_branch')
1253 1253 update = not opts.get('bypass')
1254 1254 strip = opts["strip"]
1255 1255 prefix = opts["prefix"]
1256 1256 sim = float(opts.get('similarity') or 0)
1257 1257 if not tmpname:
1258 1258 return (None, None, False)
1259 1259
1260 1260 rejects = False
1261 1261
1262 1262 try:
1263 1263 cmdline_message = logmessage(ui, opts)
1264 1264 if cmdline_message:
1265 1265 # pickup the cmdline msg
1266 1266 message = cmdline_message
1267 1267 elif message:
1268 1268 # pickup the patch msg
1269 1269 message = message.strip()
1270 1270 else:
1271 1271 # launch the editor
1272 1272 message = None
1273 1273 ui.debug('message:\n%s\n' % message)
1274 1274
1275 1275 if len(parents) == 1:
1276 1276 parents.append(repo[nullid])
1277 1277 if opts.get('exact'):
1278 1278 if not nodeid or not p1:
1279 1279 raise error.Abort(_('not a Mercurial patch'))
1280 1280 p1 = repo[p1]
1281 1281 p2 = repo[p2 or nullid]
1282 1282 elif p2:
1283 1283 try:
1284 1284 p1 = repo[p1]
1285 1285 p2 = repo[p2]
1286 1286 # Without any options, consider p2 only if the
1287 1287 # patch is being applied on top of the recorded
1288 1288 # first parent.
1289 1289 if p1 != parents[0]:
1290 1290 p1 = parents[0]
1291 1291 p2 = repo[nullid]
1292 1292 except error.RepoError:
1293 1293 p1, p2 = parents
1294 1294 if p2.node() == nullid:
1295 1295 ui.warn(_("warning: import the patch as a normal revision\n"
1296 1296 "(use --exact to import the patch as a merge)\n"))
1297 1297 else:
1298 1298 p1, p2 = parents
1299 1299
1300 1300 n = None
1301 1301 if update:
1302 1302 if p1 != parents[0]:
1303 1303 updatefunc(repo, p1.node())
1304 1304 if p2 != parents[1]:
1305 1305 repo.setparents(p1.node(), p2.node())
1306 1306
1307 1307 if opts.get('exact') or importbranch:
1308 1308 repo.dirstate.setbranch(branch or 'default')
1309 1309
1310 1310 partial = opts.get('partial', False)
1311 1311 files = set()
1312 1312 try:
1313 1313 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1314 1314 files=files, eolmode=None, similarity=sim / 100.0)
1315 1315 except error.PatchError as e:
1316 1316 if not partial:
1317 1317 raise error.Abort(str(e))
1318 1318 if partial:
1319 1319 rejects = True
1320 1320
1321 1321 files = list(files)
1322 1322 if nocommit:
1323 1323 if message:
1324 1324 msgs.append(message)
1325 1325 else:
1326 1326 if opts.get('exact') or p2:
1327 1327 # If you got here, you either use --force and know what
1328 1328 # you are doing or used --exact or a merge patch while
1329 1329 # being updated to its first parent.
1330 1330 m = None
1331 1331 else:
1332 1332 m = scmutil.matchfiles(repo, files or [])
1333 1333 editform = mergeeditform(repo[None], 'import.normal')
1334 1334 if opts.get('exact'):
1335 1335 editor = None
1336 1336 else:
1337 1337 editor = getcommiteditor(editform=editform, **opts)
1338 1338 extra = {}
1339 1339 for idfunc in extrapreimport:
1340 1340 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1341 1341 overrides = {}
1342 1342 if partial:
1343 1343 overrides[('ui', 'allowemptycommit')] = True
1344 1344 with repo.ui.configoverride(overrides, 'import'):
1345 1345 n = repo.commit(message, user,
1346 1346 date, match=m,
1347 1347 editor=editor, extra=extra)
1348 1348 for idfunc in extrapostimport:
1349 1349 extrapostimportmap[idfunc](repo[n])
1350 1350 else:
1351 1351 if opts.get('exact') or importbranch:
1352 1352 branch = branch or 'default'
1353 1353 else:
1354 1354 branch = p1.branch()
1355 1355 store = patch.filestore()
1356 1356 try:
1357 1357 files = set()
1358 1358 try:
1359 1359 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1360 1360 files, eolmode=None)
1361 1361 except error.PatchError as e:
1362 1362 raise error.Abort(str(e))
1363 1363 if opts.get('exact'):
1364 1364 editor = None
1365 1365 else:
1366 1366 editor = getcommiteditor(editform='import.bypass')
1367 1367 memctx = context.memctx(repo, (p1.node(), p2.node()),
1368 1368 message,
1369 1369 files=files,
1370 1370 filectxfn=store,
1371 1371 user=user,
1372 1372 date=date,
1373 1373 branch=branch,
1374 1374 editor=editor)
1375 1375 n = memctx.commit()
1376 1376 finally:
1377 1377 store.close()
1378 1378 if opts.get('exact') and nocommit:
1379 1379 # --exact with --no-commit is still useful in that it does merge
1380 1380 # and branch bits
1381 1381 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1382 1382 elif opts.get('exact') and hex(n) != nodeid:
1383 1383 raise error.Abort(_('patch is damaged or loses information'))
1384 1384 msg = _('applied to working directory')
1385 1385 if n:
1386 1386 # i18n: refers to a short changeset id
1387 1387 msg = _('created %s') % short(n)
1388 1388 return (msg, n, rejects)
1389 1389 finally:
1390 1390 os.unlink(tmpname)
1391 1391
1392 1392 # facility to let extensions include additional data in an exported patch
1393 1393 # list of identifiers to be executed in order
1394 1394 extraexport = []
1395 1395 # mapping from identifier to actual export function
1396 1396 # function as to return a string to be added to the header or None
1397 1397 # it is given two arguments (sequencenumber, changectx)
1398 1398 extraexportmap = {}
1399 1399
1400 1400 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1401 1401 node = scmutil.binnode(ctx)
1402 1402 parents = [p.node() for p in ctx.parents() if p]
1403 1403 branch = ctx.branch()
1404 1404 if switch_parent:
1405 1405 parents.reverse()
1406 1406
1407 1407 if parents:
1408 1408 prev = parents[0]
1409 1409 else:
1410 1410 prev = nullid
1411 1411
1412 1412 write("# HG changeset patch\n")
1413 1413 write("# User %s\n" % ctx.user())
1414 1414 write("# Date %d %d\n" % ctx.date())
1415 1415 write("# %s\n" % util.datestr(ctx.date()))
1416 1416 if branch and branch != 'default':
1417 1417 write("# Branch %s\n" % branch)
1418 1418 write("# Node ID %s\n" % hex(node))
1419 1419 write("# Parent %s\n" % hex(prev))
1420 1420 if len(parents) > 1:
1421 1421 write("# Parent %s\n" % hex(parents[1]))
1422 1422
1423 1423 for headerid in extraexport:
1424 1424 header = extraexportmap[headerid](seqno, ctx)
1425 1425 if header is not None:
1426 1426 write('# %s\n' % header)
1427 1427 write(ctx.description().rstrip())
1428 1428 write("\n\n")
1429 1429
1430 1430 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1431 1431 write(chunk, label=label)
1432 1432
1433 1433 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1434 1434 opts=None, match=None):
1435 1435 '''export changesets as hg patches
1436 1436
1437 1437 Args:
1438 1438 repo: The repository from which we're exporting revisions.
1439 1439 revs: A list of revisions to export as revision numbers.
1440 1440 fntemplate: An optional string to use for generating patch file names.
1441 1441 fp: An optional file-like object to which patches should be written.
1442 1442 switch_parent: If True, show diffs against second parent when not nullid.
1443 1443 Default is false, which always shows diff against p1.
1444 1444 opts: diff options to use for generating the patch.
1445 1445 match: If specified, only export changes to files matching this matcher.
1446 1446
1447 1447 Returns:
1448 1448 Nothing.
1449 1449
1450 1450 Side Effect:
1451 1451 "HG Changeset Patch" data is emitted to one of the following
1452 1452 destinations:
1453 1453 fp is specified: All revs are written to the specified
1454 1454 file-like object.
1455 1455 fntemplate specified: Each rev is written to a unique file named using
1456 1456 the given template.
1457 1457 Neither fp nor template specified: All revs written to repo.ui.write()
1458 1458 '''
1459 1459
1460 1460 total = len(revs)
1461 1461 revwidth = max(len(str(rev)) for rev in revs)
1462 1462 filemode = {}
1463 1463
1464 1464 write = None
1465 1465 dest = '<unnamed>'
1466 1466 if fp:
1467 1467 dest = getattr(fp, 'name', dest)
1468 1468 def write(s, **kw):
1469 1469 fp.write(s)
1470 1470 elif not fntemplate:
1471 1471 write = repo.ui.write
1472 1472
1473 1473 for seqno, rev in enumerate(revs, 1):
1474 1474 ctx = repo[rev]
1475 1475 fo = None
1476 1476 if not fp and fntemplate:
1477 1477 desc_lines = ctx.description().rstrip().split('\n')
1478 1478 desc = desc_lines[0] #Commit always has a first line.
1479 1479 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1480 1480 total=total, seqno=seqno, revwidth=revwidth,
1481 1481 mode='wb', modemap=filemode)
1482 1482 dest = fo.name
1483 1483 def write(s, **kw):
1484 1484 fo.write(s)
1485 1485 if not dest.startswith('<'):
1486 1486 repo.ui.note("%s\n" % dest)
1487 1487 _exportsingle(
1488 1488 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1489 1489 if fo is not None:
1490 1490 fo.close()
1491 1491
1492 1492 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1493 1493 changes=None, stat=False, fp=None, prefix='',
1494 1494 root='', listsubrepos=False, hunksfilterfn=None):
1495 1495 '''show diff or diffstat.'''
1496 1496 if fp is None:
1497 1497 write = ui.write
1498 1498 else:
1499 1499 def write(s, **kw):
1500 1500 fp.write(s)
1501 1501
1502 1502 if root:
1503 1503 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1504 1504 else:
1505 1505 relroot = ''
1506 1506 if relroot != '':
1507 1507 # XXX relative roots currently don't work if the root is within a
1508 1508 # subrepo
1509 1509 uirelroot = match.uipath(relroot)
1510 1510 relroot += '/'
1511 1511 for matchroot in match.files():
1512 1512 if not matchroot.startswith(relroot):
1513 1513 ui.warn(_('warning: %s not inside relative root %s\n') % (
1514 1514 match.uipath(matchroot), uirelroot))
1515 1515
1516 1516 if stat:
1517 1517 diffopts = diffopts.copy(context=0)
1518 1518 width = 80
1519 1519 if not ui.plain():
1520 1520 width = ui.termwidth()
1521 1521 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1522 1522 prefix=prefix, relroot=relroot,
1523 1523 hunksfilterfn=hunksfilterfn)
1524 1524 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1525 1525 width=width):
1526 1526 write(chunk, label=label)
1527 1527 else:
1528 1528 for chunk, label in patch.diffui(repo, node1, node2, match,
1529 1529 changes, diffopts, prefix=prefix,
1530 1530 relroot=relroot,
1531 1531 hunksfilterfn=hunksfilterfn):
1532 1532 write(chunk, label=label)
1533 1533
1534 1534 if listsubrepos:
1535 1535 ctx1 = repo[node1]
1536 1536 ctx2 = repo[node2]
1537 1537 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1538 1538 tempnode2 = node2
1539 1539 try:
1540 1540 if node2 is not None:
1541 1541 tempnode2 = ctx2.substate[subpath][1]
1542 1542 except KeyError:
1543 1543 # A subrepo that existed in node1 was deleted between node1 and
1544 1544 # node2 (inclusive). Thus, ctx2's substate won't contain that
1545 1545 # subpath. The best we can do is to ignore it.
1546 1546 tempnode2 = None
1547 1547 submatch = matchmod.subdirmatcher(subpath, match)
1548 1548 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1549 1549 stat=stat, fp=fp, prefix=prefix)
1550 1550
1551 1551 def _changesetlabels(ctx):
1552 1552 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1553 1553 if ctx.obsolete():
1554 1554 labels.append('changeset.obsolete')
1555 1555 if ctx.isunstable():
1556 1556 labels.append('changeset.unstable')
1557 1557 for instability in ctx.instabilities():
1558 1558 labels.append('instability.%s' % instability)
1559 1559 return ' '.join(labels)
1560 1560
1561 1561 class changeset_printer(object):
1562 1562 '''show changeset information when templating not requested.'''
1563 1563
1564 1564 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1565 1565 self.ui = ui
1566 1566 self.repo = repo
1567 1567 self.buffered = buffered
1568 1568 self.matchfn = matchfn
1569 1569 self.diffopts = diffopts
1570 1570 self.header = {}
1571 1571 self.hunk = {}
1572 1572 self.lastheader = None
1573 1573 self.footer = None
1574 1574
1575 1575 def flush(self, ctx):
1576 1576 rev = ctx.rev()
1577 1577 if rev in self.header:
1578 1578 h = self.header[rev]
1579 1579 if h != self.lastheader:
1580 1580 self.lastheader = h
1581 1581 self.ui.write(h)
1582 1582 del self.header[rev]
1583 1583 if rev in self.hunk:
1584 1584 self.ui.write(self.hunk[rev])
1585 1585 del self.hunk[rev]
1586 1586 return 1
1587 1587 return 0
1588 1588
1589 1589 def close(self):
1590 1590 if self.footer:
1591 1591 self.ui.write(self.footer)
1592 1592
1593 1593 def show(self, ctx, copies=None, matchfn=None, hunksfilterfn=None,
1594 1594 **props):
1595 1595 props = pycompat.byteskwargs(props)
1596 1596 if self.buffered:
1597 1597 self.ui.pushbuffer(labeled=True)
1598 1598 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1599 1599 self.hunk[ctx.rev()] = self.ui.popbuffer()
1600 1600 else:
1601 1601 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1602 1602
1603 1603 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1604 1604 '''show a single changeset or file revision'''
1605 1605 changenode = ctx.node()
1606 1606 rev = ctx.rev()
1607 1607
1608 1608 if self.ui.quiet:
1609 1609 self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
1610 1610 label='log.node')
1611 1611 return
1612 1612
1613 1613 date = util.datestr(ctx.date())
1614 1614
1615 1615 # i18n: column positioning for "hg log"
1616 1616 self.ui.write(_("changeset: %s\n") % scmutil.formatchangeid(ctx),
1617 1617 label=_changesetlabels(ctx))
1618 1618
1619 1619 # branches are shown first before any other names due to backwards
1620 1620 # compatibility
1621 1621 branch = ctx.branch()
1622 1622 # don't show the default branch name
1623 1623 if branch != 'default':
1624 1624 # i18n: column positioning for "hg log"
1625 1625 self.ui.write(_("branch: %s\n") % branch,
1626 1626 label='log.branch')
1627 1627
1628 1628 for nsname, ns in self.repo.names.iteritems():
1629 1629 # branches has special logic already handled above, so here we just
1630 1630 # skip it
1631 1631 if nsname == 'branches':
1632 1632 continue
1633 1633 # we will use the templatename as the color name since those two
1634 1634 # should be the same
1635 1635 for name in ns.names(self.repo, changenode):
1636 1636 self.ui.write(ns.logfmt % name,
1637 1637 label='log.%s' % ns.colorname)
1638 1638 if self.ui.debugflag:
1639 1639 # i18n: column positioning for "hg log"
1640 1640 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1641 1641 label='log.phase')
1642 1642 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1643 1643 label = 'log.parent changeset.%s' % pctx.phasestr()
1644 1644 # i18n: column positioning for "hg log"
1645 1645 self.ui.write(_("parent: %s\n") % scmutil.formatchangeid(pctx),
1646 1646 label=label)
1647 1647
1648 1648 if self.ui.debugflag and rev is not None:
1649 1649 mnode = ctx.manifestnode()
1650 1650 mrev = self.repo.manifestlog._revlog.rev(mnode)
1651 1651 # i18n: column positioning for "hg log"
1652 1652 self.ui.write(_("manifest: %s\n")
1653 1653 % scmutil.formatrevnode(self.ui, mrev, mnode),
1654 1654 label='ui.debug log.manifest')
1655 1655 # i18n: column positioning for "hg log"
1656 1656 self.ui.write(_("user: %s\n") % ctx.user(),
1657 1657 label='log.user')
1658 1658 # i18n: column positioning for "hg log"
1659 1659 self.ui.write(_("date: %s\n") % date,
1660 1660 label='log.date')
1661 1661
1662 1662 if ctx.isunstable():
1663 1663 # i18n: column positioning for "hg log"
1664 1664 instabilities = ctx.instabilities()
1665 1665 self.ui.write(_("instability: %s\n") % ', '.join(instabilities),
1666 1666 label='log.instability')
1667 1667
1668 1668 elif ctx.obsolete():
1669 1669 self._showobsfate(ctx)
1670 1670
1671 1671 self._exthook(ctx)
1672 1672
1673 1673 if self.ui.debugflag:
1674 1674 files = ctx.p1().status(ctx)[:3]
1675 1675 for key, value in zip([# i18n: column positioning for "hg log"
1676 1676 _("files:"),
1677 1677 # i18n: column positioning for "hg log"
1678 1678 _("files+:"),
1679 1679 # i18n: column positioning for "hg log"
1680 1680 _("files-:")], files):
1681 1681 if value:
1682 1682 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1683 1683 label='ui.debug log.files')
1684 1684 elif ctx.files() and self.ui.verbose:
1685 1685 # i18n: column positioning for "hg log"
1686 1686 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1687 1687 label='ui.note log.files')
1688 1688 if copies and self.ui.verbose:
1689 1689 copies = ['%s (%s)' % c for c in copies]
1690 1690 # i18n: column positioning for "hg log"
1691 1691 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1692 1692 label='ui.note log.copies')
1693 1693
1694 1694 extra = ctx.extra()
1695 1695 if extra and self.ui.debugflag:
1696 1696 for key, value in sorted(extra.items()):
1697 1697 # i18n: column positioning for "hg log"
1698 1698 self.ui.write(_("extra: %s=%s\n")
1699 1699 % (key, util.escapestr(value)),
1700 1700 label='ui.debug log.extra')
1701 1701
1702 1702 description = ctx.description().strip()
1703 1703 if description:
1704 1704 if self.ui.verbose:
1705 1705 self.ui.write(_("description:\n"),
1706 1706 label='ui.note log.description')
1707 1707 self.ui.write(description,
1708 1708 label='ui.note log.description')
1709 1709 self.ui.write("\n\n")
1710 1710 else:
1711 1711 # i18n: column positioning for "hg log"
1712 1712 self.ui.write(_("summary: %s\n") %
1713 1713 description.splitlines()[0],
1714 1714 label='log.summary')
1715 1715 self.ui.write("\n")
1716 1716
1717 1717 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1718 1718
1719 1719 def _showobsfate(self, ctx):
1720 1720 obsfate = templatekw.showobsfate(repo=self.repo, ctx=ctx, ui=self.ui)
1721 1721
1722 1722 if obsfate:
1723 1723 for obsfateline in obsfate:
1724 1724 # i18n: column positioning for "hg log"
1725 1725 self.ui.write(_("obsolete: %s\n") % obsfateline,
1726 1726 label='log.obsfate')
1727 1727
1728 1728 def _exthook(self, ctx):
1729 1729 '''empty method used by extension as a hook point
1730 1730 '''
1731 1731
1732 1732 def showpatch(self, ctx, matchfn, hunksfilterfn=None):
1733 1733 if not matchfn:
1734 1734 matchfn = self.matchfn
1735 1735 if matchfn:
1736 1736 stat = self.diffopts.get('stat')
1737 1737 diff = self.diffopts.get('patch')
1738 1738 diffopts = patch.diffallopts(self.ui, self.diffopts)
1739 1739 node = ctx.node()
1740 1740 prev = ctx.p1().node()
1741 1741 if stat:
1742 1742 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1743 1743 match=matchfn, stat=True,
1744 1744 hunksfilterfn=hunksfilterfn)
1745 1745 if diff:
1746 1746 if stat:
1747 1747 self.ui.write("\n")
1748 1748 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1749 1749 match=matchfn, stat=False,
1750 1750 hunksfilterfn=hunksfilterfn)
1751 1751 self.ui.write("\n")
1752 1752
1753 1753 class jsonchangeset(changeset_printer):
1754 1754 '''format changeset information.'''
1755 1755
1756 1756 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1757 1757 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1758 1758 self.cache = {}
1759 1759 self._first = True
1760 1760
1761 1761 def close(self):
1762 1762 if not self._first:
1763 1763 self.ui.write("\n]\n")
1764 1764 else:
1765 1765 self.ui.write("[]\n")
1766 1766
1767 1767 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1768 1768 '''show a single changeset or file revision'''
1769 1769 rev = ctx.rev()
1770 1770 if rev is None:
1771 1771 jrev = jnode = 'null'
1772 1772 else:
1773 1773 jrev = '%d' % rev
1774 1774 jnode = '"%s"' % hex(ctx.node())
1775 1775 j = encoding.jsonescape
1776 1776
1777 1777 if self._first:
1778 1778 self.ui.write("[\n {")
1779 1779 self._first = False
1780 1780 else:
1781 1781 self.ui.write(",\n {")
1782 1782
1783 1783 if self.ui.quiet:
1784 1784 self.ui.write(('\n "rev": %s') % jrev)
1785 1785 self.ui.write((',\n "node": %s') % jnode)
1786 1786 self.ui.write('\n }')
1787 1787 return
1788 1788
1789 1789 self.ui.write(('\n "rev": %s') % jrev)
1790 1790 self.ui.write((',\n "node": %s') % jnode)
1791 1791 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1792 1792 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1793 1793 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1794 1794 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1795 1795 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1796 1796
1797 1797 self.ui.write((',\n "bookmarks": [%s]') %
1798 1798 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1799 1799 self.ui.write((',\n "tags": [%s]') %
1800 1800 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1801 1801 self.ui.write((',\n "parents": [%s]') %
1802 1802 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1803 1803
1804 1804 if self.ui.debugflag:
1805 1805 if rev is None:
1806 1806 jmanifestnode = 'null'
1807 1807 else:
1808 1808 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1809 1809 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1810 1810
1811 1811 self.ui.write((',\n "extra": {%s}') %
1812 1812 ", ".join('"%s": "%s"' % (j(k), j(v))
1813 1813 for k, v in ctx.extra().items()))
1814 1814
1815 1815 files = ctx.p1().status(ctx)
1816 1816 self.ui.write((',\n "modified": [%s]') %
1817 1817 ", ".join('"%s"' % j(f) for f in files[0]))
1818 1818 self.ui.write((',\n "added": [%s]') %
1819 1819 ", ".join('"%s"' % j(f) for f in files[1]))
1820 1820 self.ui.write((',\n "removed": [%s]') %
1821 1821 ", ".join('"%s"' % j(f) for f in files[2]))
1822 1822
1823 1823 elif self.ui.verbose:
1824 1824 self.ui.write((',\n "files": [%s]') %
1825 1825 ", ".join('"%s"' % j(f) for f in ctx.files()))
1826 1826
1827 1827 if copies:
1828 1828 self.ui.write((',\n "copies": {%s}') %
1829 1829 ", ".join('"%s": "%s"' % (j(k), j(v))
1830 1830 for k, v in copies))
1831 1831
1832 1832 matchfn = self.matchfn
1833 1833 if matchfn:
1834 1834 stat = self.diffopts.get('stat')
1835 1835 diff = self.diffopts.get('patch')
1836 1836 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1837 1837 node, prev = ctx.node(), ctx.p1().node()
1838 1838 if stat:
1839 1839 self.ui.pushbuffer()
1840 1840 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1841 1841 match=matchfn, stat=True)
1842 1842 self.ui.write((',\n "diffstat": "%s"')
1843 1843 % j(self.ui.popbuffer()))
1844 1844 if diff:
1845 1845 self.ui.pushbuffer()
1846 1846 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1847 1847 match=matchfn, stat=False)
1848 1848 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1849 1849
1850 1850 self.ui.write("\n }")
1851 1851
1852 1852 class changeset_templater(changeset_printer):
1853 1853 '''format changeset information.
1854 1854
1855 1855 Note: there are a variety of convenience functions to build a
1856 1856 changeset_templater for common cases. See functions such as:
1857 1857 makelogtemplater, show_changeset, buildcommittemplate, or other
1858 1858 functions that use changesest_templater.
1859 1859 '''
1860 1860
1861 1861 # Arguments before "buffered" used to be positional. Consider not
1862 1862 # adding/removing arguments before "buffered" to not break callers.
1863 1863 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1864 1864 buffered=False):
1865 1865 diffopts = diffopts or {}
1866 1866
1867 1867 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1868 1868 self.t = formatter.loadtemplater(ui, tmplspec,
1869 1869 cache=templatekw.defaulttempl)
1870 1870 self._counter = itertools.count()
1871 1871 self.cache = {}
1872 1872
1873 1873 self._tref = tmplspec.ref
1874 1874 self._parts = {'header': '', 'footer': '',
1875 1875 tmplspec.ref: tmplspec.ref,
1876 1876 'docheader': '', 'docfooter': '',
1877 1877 'separator': ''}
1878 1878 if tmplspec.mapfile:
1879 1879 # find correct templates for current mode, for backward
1880 1880 # compatibility with 'log -v/-q/--debug' using a mapfile
1881 1881 tmplmodes = [
1882 1882 (True, ''),
1883 1883 (self.ui.verbose, '_verbose'),
1884 1884 (self.ui.quiet, '_quiet'),
1885 1885 (self.ui.debugflag, '_debug'),
1886 1886 ]
1887 1887 for mode, postfix in tmplmodes:
1888 1888 for t in self._parts:
1889 1889 cur = t + postfix
1890 1890 if mode and cur in self.t:
1891 1891 self._parts[t] = cur
1892 1892 else:
1893 1893 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1894 1894 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1895 1895 self._parts.update(m)
1896 1896
1897 1897 if self._parts['docheader']:
1898 1898 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1899 1899
1900 1900 def close(self):
1901 1901 if self._parts['docfooter']:
1902 1902 if not self.footer:
1903 1903 self.footer = ""
1904 1904 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1905 1905 return super(changeset_templater, self).close()
1906 1906
1907 1907 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1908 1908 '''show a single changeset or file revision'''
1909 1909 props = props.copy()
1910 1910 props.update(templatekw.keywords)
1911 1911 props['templ'] = self.t
1912 1912 props['ctx'] = ctx
1913 1913 props['repo'] = self.repo
1914 1914 props['ui'] = self.repo.ui
1915 1915 props['index'] = index = next(self._counter)
1916 1916 props['revcache'] = {'copies': copies}
1917 1917 props['cache'] = self.cache
1918 1918 props = pycompat.strkwargs(props)
1919 1919
1920 1920 # write separator, which wouldn't work well with the header part below
1921 1921 # since there's inherently a conflict between header (across items) and
1922 1922 # separator (per item)
1923 1923 if self._parts['separator'] and index > 0:
1924 1924 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1925 1925
1926 1926 # write header
1927 1927 if self._parts['header']:
1928 1928 h = templater.stringify(self.t(self._parts['header'], **props))
1929 1929 if self.buffered:
1930 1930 self.header[ctx.rev()] = h
1931 1931 else:
1932 1932 if self.lastheader != h:
1933 1933 self.lastheader = h
1934 1934 self.ui.write(h)
1935 1935
1936 1936 # write changeset metadata, then patch if requested
1937 1937 key = self._parts[self._tref]
1938 1938 self.ui.write(templater.stringify(self.t(key, **props)))
1939 1939 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1940 1940
1941 1941 if self._parts['footer']:
1942 1942 if not self.footer:
1943 1943 self.footer = templater.stringify(
1944 1944 self.t(self._parts['footer'], **props))
1945 1945
1946 1946 def logtemplatespec(tmpl, mapfile):
1947 1947 if mapfile:
1948 1948 return formatter.templatespec('changeset', tmpl, mapfile)
1949 1949 else:
1950 1950 return formatter.templatespec('', tmpl, None)
1951 1951
1952 1952 def _lookuplogtemplate(ui, tmpl, style):
1953 1953 """Find the template matching the given template spec or style
1954 1954
1955 1955 See formatter.lookuptemplate() for details.
1956 1956 """
1957 1957
1958 1958 # ui settings
1959 1959 if not tmpl and not style: # template are stronger than style
1960 1960 tmpl = ui.config('ui', 'logtemplate')
1961 1961 if tmpl:
1962 1962 return logtemplatespec(templater.unquotestring(tmpl), None)
1963 1963 else:
1964 1964 style = util.expandpath(ui.config('ui', 'style'))
1965 1965
1966 1966 if not tmpl and style:
1967 1967 mapfile = style
1968 1968 if not os.path.split(mapfile)[0]:
1969 1969 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1970 1970 or templater.templatepath(mapfile))
1971 1971 if mapname:
1972 1972 mapfile = mapname
1973 1973 return logtemplatespec(None, mapfile)
1974 1974
1975 1975 if not tmpl:
1976 1976 return logtemplatespec(None, None)
1977 1977
1978 1978 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1979 1979
1980 1980 def makelogtemplater(ui, repo, tmpl, buffered=False):
1981 1981 """Create a changeset_templater from a literal template 'tmpl'
1982 1982 byte-string."""
1983 1983 spec = logtemplatespec(tmpl, None)
1984 1984 return changeset_templater(ui, repo, spec, buffered=buffered)
1985 1985
1986 1986 def show_changeset(ui, repo, opts, buffered=False):
1987 1987 """show one changeset using template or regular display.
1988 1988
1989 1989 Display format will be the first non-empty hit of:
1990 1990 1. option 'template'
1991 1991 2. option 'style'
1992 1992 3. [ui] setting 'logtemplate'
1993 1993 4. [ui] setting 'style'
1994 1994 If all of these values are either the unset or the empty string,
1995 1995 regular display via changeset_printer() is done.
1996 1996 """
1997 1997 # options
1998 1998 match = None
1999 1999 if opts.get('patch') or opts.get('stat'):
2000 2000 match = scmutil.matchall(repo)
2001 2001
2002 2002 if opts.get('template') == 'json':
2003 2003 return jsonchangeset(ui, repo, match, opts, buffered)
2004 2004
2005 2005 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
2006 2006
2007 2007 if not spec.ref and not spec.tmpl and not spec.mapfile:
2008 2008 return changeset_printer(ui, repo, match, opts, buffered)
2009 2009
2010 2010 return changeset_templater(ui, repo, spec, match, opts, buffered)
2011 2011
2012 2012 def showmarker(fm, marker, index=None):
2013 2013 """utility function to display obsolescence marker in a readable way
2014 2014
2015 2015 To be used by debug function."""
2016 2016 if index is not None:
2017 2017 fm.write('index', '%i ', index)
2018 2018 fm.write('prednode', '%s ', hex(marker.prednode()))
2019 2019 succs = marker.succnodes()
2020 2020 fm.condwrite(succs, 'succnodes', '%s ',
2021 2021 fm.formatlist(map(hex, succs), name='node'))
2022 2022 fm.write('flag', '%X ', marker.flags())
2023 2023 parents = marker.parentnodes()
2024 2024 if parents is not None:
2025 2025 fm.write('parentnodes', '{%s} ',
2026 2026 fm.formatlist(map(hex, parents), name='node', sep=', '))
2027 2027 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
2028 2028 meta = marker.metadata().copy()
2029 2029 meta.pop('date', None)
2030 2030 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
2031 2031 fm.plain('\n')
2032 2032
2033 2033 def finddate(ui, repo, date):
2034 2034 """Find the tipmost changeset that matches the given date spec"""
2035 2035
2036 2036 df = util.matchdate(date)
2037 2037 m = scmutil.matchall(repo)
2038 2038 results = {}
2039 2039
2040 2040 def prep(ctx, fns):
2041 2041 d = ctx.date()
2042 2042 if df(d[0]):
2043 2043 results[ctx.rev()] = d
2044 2044
2045 2045 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
2046 2046 rev = ctx.rev()
2047 2047 if rev in results:
2048 2048 ui.status(_("found revision %s from %s\n") %
2049 2049 (rev, util.datestr(results[rev])))
2050 2050 return '%d' % rev
2051 2051
2052 2052 raise error.Abort(_("revision matching date not found"))
2053 2053
2054 2054 def increasingwindows(windowsize=8, sizelimit=512):
2055 2055 while True:
2056 2056 yield windowsize
2057 2057 if windowsize < sizelimit:
2058 2058 windowsize *= 2
2059 2059
2060 2060 class FileWalkError(Exception):
2061 2061 pass
2062 2062
2063 2063 def walkfilerevs(repo, match, follow, revs, fncache):
2064 2064 '''Walks the file history for the matched files.
2065 2065
2066 2066 Returns the changeset revs that are involved in the file history.
2067 2067
2068 2068 Throws FileWalkError if the file history can't be walked using
2069 2069 filelogs alone.
2070 2070 '''
2071 2071 wanted = set()
2072 2072 copies = []
2073 2073 minrev, maxrev = min(revs), max(revs)
2074 2074 def filerevgen(filelog, last):
2075 2075 """
2076 2076 Only files, no patterns. Check the history of each file.
2077 2077
2078 2078 Examines filelog entries within minrev, maxrev linkrev range
2079 2079 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2080 2080 tuples in backwards order
2081 2081 """
2082 2082 cl_count = len(repo)
2083 2083 revs = []
2084 2084 for j in xrange(0, last + 1):
2085 2085 linkrev = filelog.linkrev(j)
2086 2086 if linkrev < minrev:
2087 2087 continue
2088 2088 # only yield rev for which we have the changelog, it can
2089 2089 # happen while doing "hg log" during a pull or commit
2090 2090 if linkrev >= cl_count:
2091 2091 break
2092 2092
2093 2093 parentlinkrevs = []
2094 2094 for p in filelog.parentrevs(j):
2095 2095 if p != nullrev:
2096 2096 parentlinkrevs.append(filelog.linkrev(p))
2097 2097 n = filelog.node(j)
2098 2098 revs.append((linkrev, parentlinkrevs,
2099 2099 follow and filelog.renamed(n)))
2100 2100
2101 2101 return reversed(revs)
2102 2102 def iterfiles():
2103 2103 pctx = repo['.']
2104 2104 for filename in match.files():
2105 2105 if follow:
2106 2106 if filename not in pctx:
2107 2107 raise error.Abort(_('cannot follow file not in parent '
2108 2108 'revision: "%s"') % filename)
2109 2109 yield filename, pctx[filename].filenode()
2110 2110 else:
2111 2111 yield filename, None
2112 2112 for filename_node in copies:
2113 2113 yield filename_node
2114 2114
2115 2115 for file_, node in iterfiles():
2116 2116 filelog = repo.file(file_)
2117 2117 if not len(filelog):
2118 2118 if node is None:
2119 2119 # A zero count may be a directory or deleted file, so
2120 2120 # try to find matching entries on the slow path.
2121 2121 if follow:
2122 2122 raise error.Abort(
2123 2123 _('cannot follow nonexistent file: "%s"') % file_)
2124 2124 raise FileWalkError("Cannot walk via filelog")
2125 2125 else:
2126 2126 continue
2127 2127
2128 2128 if node is None:
2129 2129 last = len(filelog) - 1
2130 2130 else:
2131 2131 last = filelog.rev(node)
2132 2132
2133 2133 # keep track of all ancestors of the file
2134 2134 ancestors = {filelog.linkrev(last)}
2135 2135
2136 2136 # iterate from latest to oldest revision
2137 2137 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2138 2138 if not follow:
2139 2139 if rev > maxrev:
2140 2140 continue
2141 2141 else:
2142 2142 # Note that last might not be the first interesting
2143 2143 # rev to us:
2144 2144 # if the file has been changed after maxrev, we'll
2145 2145 # have linkrev(last) > maxrev, and we still need
2146 2146 # to explore the file graph
2147 2147 if rev not in ancestors:
2148 2148 continue
2149 2149 # XXX insert 1327 fix here
2150 2150 if flparentlinkrevs:
2151 2151 ancestors.update(flparentlinkrevs)
2152 2152
2153 2153 fncache.setdefault(rev, []).append(file_)
2154 2154 wanted.add(rev)
2155 2155 if copied:
2156 2156 copies.append(copied)
2157 2157
2158 2158 return wanted
2159 2159
2160 2160 class _followfilter(object):
2161 2161 def __init__(self, repo, onlyfirst=False):
2162 2162 self.repo = repo
2163 2163 self.startrev = nullrev
2164 2164 self.roots = set()
2165 2165 self.onlyfirst = onlyfirst
2166 2166
2167 2167 def match(self, rev):
2168 2168 def realparents(rev):
2169 2169 if self.onlyfirst:
2170 2170 return self.repo.changelog.parentrevs(rev)[0:1]
2171 2171 else:
2172 2172 return filter(lambda x: x != nullrev,
2173 2173 self.repo.changelog.parentrevs(rev))
2174 2174
2175 2175 if self.startrev == nullrev:
2176 2176 self.startrev = rev
2177 2177 return True
2178 2178
2179 2179 if rev > self.startrev:
2180 2180 # forward: all descendants
2181 2181 if not self.roots:
2182 2182 self.roots.add(self.startrev)
2183 2183 for parent in realparents(rev):
2184 2184 if parent in self.roots:
2185 2185 self.roots.add(rev)
2186 2186 return True
2187 2187 else:
2188 2188 # backwards: all parents
2189 2189 if not self.roots:
2190 2190 self.roots.update(realparents(self.startrev))
2191 2191 if rev in self.roots:
2192 2192 self.roots.remove(rev)
2193 2193 self.roots.update(realparents(rev))
2194 2194 return True
2195 2195
2196 2196 return False
2197 2197
2198 2198 def walkchangerevs(repo, match, opts, prepare):
2199 2199 '''Iterate over files and the revs in which they changed.
2200 2200
2201 2201 Callers most commonly need to iterate backwards over the history
2202 2202 in which they are interested. Doing so has awful (quadratic-looking)
2203 2203 performance, so we use iterators in a "windowed" way.
2204 2204
2205 2205 We walk a window of revisions in the desired order. Within the
2206 2206 window, we first walk forwards to gather data, then in the desired
2207 2207 order (usually backwards) to display it.
2208 2208
2209 2209 This function returns an iterator yielding contexts. Before
2210 2210 yielding each context, the iterator will first call the prepare
2211 2211 function on each context in the window in forward order.'''
2212 2212
2213 2213 follow = opts.get('follow') or opts.get('follow_first')
2214 2214 revs = _logrevs(repo, opts)
2215 2215 if not revs:
2216 2216 return []
2217 2217 wanted = set()
2218 2218 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2219 2219 opts.get('removed'))
2220 2220 fncache = {}
2221 2221 change = repo.changectx
2222 2222
2223 2223 # First step is to fill wanted, the set of revisions that we want to yield.
2224 2224 # When it does not induce extra cost, we also fill fncache for revisions in
2225 2225 # wanted: a cache of filenames that were changed (ctx.files()) and that
2226 2226 # match the file filtering conditions.
2227 2227
2228 2228 if match.always():
2229 2229 # No files, no patterns. Display all revs.
2230 2230 wanted = revs
2231 2231 elif not slowpath:
2232 2232 # We only have to read through the filelog to find wanted revisions
2233 2233
2234 2234 try:
2235 2235 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2236 2236 except FileWalkError:
2237 2237 slowpath = True
2238 2238
2239 2239 # We decided to fall back to the slowpath because at least one
2240 2240 # of the paths was not a file. Check to see if at least one of them
2241 2241 # existed in history, otherwise simply return
2242 2242 for path in match.files():
2243 2243 if path == '.' or path in repo.store:
2244 2244 break
2245 2245 else:
2246 2246 return []
2247 2247
2248 2248 if slowpath:
2249 2249 # We have to read the changelog to match filenames against
2250 2250 # changed files
2251 2251
2252 2252 if follow:
2253 2253 raise error.Abort(_('can only follow copies/renames for explicit '
2254 2254 'filenames'))
2255 2255
2256 2256 # The slow path checks files modified in every changeset.
2257 2257 # This is really slow on large repos, so compute the set lazily.
2258 2258 class lazywantedset(object):
2259 2259 def __init__(self):
2260 2260 self.set = set()
2261 2261 self.revs = set(revs)
2262 2262
2263 2263 # No need to worry about locality here because it will be accessed
2264 2264 # in the same order as the increasing window below.
2265 2265 def __contains__(self, value):
2266 2266 if value in self.set:
2267 2267 return True
2268 2268 elif not value in self.revs:
2269 2269 return False
2270 2270 else:
2271 2271 self.revs.discard(value)
2272 2272 ctx = change(value)
2273 2273 matches = filter(match, ctx.files())
2274 2274 if matches:
2275 2275 fncache[value] = matches
2276 2276 self.set.add(value)
2277 2277 return True
2278 2278 return False
2279 2279
2280 2280 def discard(self, value):
2281 2281 self.revs.discard(value)
2282 2282 self.set.discard(value)
2283 2283
2284 2284 wanted = lazywantedset()
2285 2285
2286 2286 # it might be worthwhile to do this in the iterator if the rev range
2287 2287 # is descending and the prune args are all within that range
2288 2288 for rev in opts.get('prune', ()):
2289 2289 rev = repo[rev].rev()
2290 2290 ff = _followfilter(repo)
2291 2291 stop = min(revs[0], revs[-1])
2292 2292 for x in xrange(rev, stop - 1, -1):
2293 2293 if ff.match(x):
2294 2294 wanted = wanted - [x]
2295 2295
2296 2296 # Now that wanted is correctly initialized, we can iterate over the
2297 2297 # revision range, yielding only revisions in wanted.
2298 2298 def iterate():
2299 2299 if follow and match.always():
2300 2300 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2301 2301 def want(rev):
2302 2302 return ff.match(rev) and rev in wanted
2303 2303 else:
2304 2304 def want(rev):
2305 2305 return rev in wanted
2306 2306
2307 2307 it = iter(revs)
2308 2308 stopiteration = False
2309 2309 for windowsize in increasingwindows():
2310 2310 nrevs = []
2311 2311 for i in xrange(windowsize):
2312 2312 rev = next(it, None)
2313 2313 if rev is None:
2314 2314 stopiteration = True
2315 2315 break
2316 2316 elif want(rev):
2317 2317 nrevs.append(rev)
2318 2318 for rev in sorted(nrevs):
2319 2319 fns = fncache.get(rev)
2320 2320 ctx = change(rev)
2321 2321 if not fns:
2322 2322 def fns_generator():
2323 2323 for f in ctx.files():
2324 2324 if match(f):
2325 2325 yield f
2326 2326 fns = fns_generator()
2327 2327 prepare(ctx, fns)
2328 2328 for rev in nrevs:
2329 2329 yield change(rev)
2330 2330
2331 2331 if stopiteration:
2332 2332 break
2333 2333
2334 2334 return iterate()
2335 2335
2336 2336 def _makefollowlogfilematcher(repo, files, followfirst):
2337 2337 # When displaying a revision with --patch --follow FILE, we have
2338 2338 # to know which file of the revision must be diffed. With
2339 2339 # --follow, we want the names of the ancestors of FILE in the
2340 2340 # revision, stored in "fcache". "fcache" is populated by
2341 2341 # reproducing the graph traversal already done by --follow revset
2342 2342 # and relating revs to file names (which is not "correct" but
2343 2343 # good enough).
2344 2344 fcache = {}
2345 2345 fcacheready = [False]
2346 2346 pctx = repo['.']
2347 2347
2348 2348 def populate():
2349 2349 for fn in files:
2350 2350 fctx = pctx[fn]
2351 2351 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2352 2352 for c in fctx.ancestors(followfirst=followfirst):
2353 2353 fcache.setdefault(c.rev(), set()).add(c.path())
2354 2354
2355 2355 def filematcher(rev):
2356 2356 if not fcacheready[0]:
2357 2357 # Lazy initialization
2358 2358 fcacheready[0] = True
2359 2359 populate()
2360 2360 return scmutil.matchfiles(repo, fcache.get(rev, []))
2361 2361
2362 2362 return filematcher
2363 2363
2364 2364 def _makenofollowlogfilematcher(repo, pats, opts):
2365 2365 '''hook for extensions to override the filematcher for non-follow cases'''
2366 2366 return None
2367 2367
2368 2368 def _makelogrevset(repo, pats, opts, revs):
2369 2369 """Return (expr, filematcher) where expr is a revset string built
2370 2370 from log options and file patterns or None. If --stat or --patch
2371 2371 are not passed filematcher is None. Otherwise it is a callable
2372 2372 taking a revision number and returning a match objects filtering
2373 2373 the files to be detailed when displaying the revision.
2374 2374 """
2375 2375 opt2revset = {
2376 2376 'no_merges': ('not merge()', None),
2377 2377 'only_merges': ('merge()', None),
2378 2378 '_ancestors': ('ancestors(%(val)s)', None),
2379 2379 '_fancestors': ('_firstancestors(%(val)s)', None),
2380 2380 '_descendants': ('descendants(%(val)s)', None),
2381 2381 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2382 2382 '_matchfiles': ('_matchfiles(%(val)s)', None),
2383 2383 'date': ('date(%(val)r)', None),
2384 2384 'branch': ('branch(%(val)r)', ' or '),
2385 2385 '_patslog': ('filelog(%(val)r)', ' or '),
2386 2386 '_patsfollow': ('follow(%(val)r)', ' or '),
2387 2387 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2388 2388 'keyword': ('keyword(%(val)r)', ' or '),
2389 2389 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2390 2390 'user': ('user(%(val)r)', ' or '),
2391 2391 }
2392 2392
2393 2393 opts = dict(opts)
2394 2394 # follow or not follow?
2395 2395 follow = opts.get('follow') or opts.get('follow_first')
2396 2396 if opts.get('follow_first'):
2397 2397 followfirst = 1
2398 2398 else:
2399 2399 followfirst = 0
2400 2400 # --follow with FILE behavior depends on revs...
2401 2401 it = iter(revs)
2402 2402 startrev = next(it)
2403 2403 followdescendants = startrev < next(it, startrev)
2404 2404
2405 2405 # branch and only_branch are really aliases and must be handled at
2406 2406 # the same time
2407 2407 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2408 2408 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2409 2409 # pats/include/exclude are passed to match.match() directly in
2410 2410 # _matchfiles() revset but walkchangerevs() builds its matcher with
2411 2411 # scmutil.match(). The difference is input pats are globbed on
2412 2412 # platforms without shell expansion (windows).
2413 2413 wctx = repo[None]
2414 2414 match, pats = scmutil.matchandpats(wctx, pats, opts)
2415 2415 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2416 2416 opts.get('removed'))
2417 2417 if not slowpath:
2418 2418 for f in match.files():
2419 2419 if follow and f not in wctx:
2420 2420 # If the file exists, it may be a directory, so let it
2421 2421 # take the slow path.
2422 2422 if os.path.exists(repo.wjoin(f)):
2423 2423 slowpath = True
2424 2424 continue
2425 2425 else:
2426 2426 raise error.Abort(_('cannot follow file not in parent '
2427 2427 'revision: "%s"') % f)
2428 2428 filelog = repo.file(f)
2429 2429 if not filelog:
2430 2430 # A zero count may be a directory or deleted file, so
2431 2431 # try to find matching entries on the slow path.
2432 2432 if follow:
2433 2433 raise error.Abort(
2434 2434 _('cannot follow nonexistent file: "%s"') % f)
2435 2435 slowpath = True
2436 2436
2437 2437 # We decided to fall back to the slowpath because at least one
2438 2438 # of the paths was not a file. Check to see if at least one of them
2439 2439 # existed in history - in that case, we'll continue down the
2440 2440 # slowpath; otherwise, we can turn off the slowpath
2441 2441 if slowpath:
2442 2442 for path in match.files():
2443 2443 if path == '.' or path in repo.store:
2444 2444 break
2445 2445 else:
2446 2446 slowpath = False
2447 2447
2448 2448 fpats = ('_patsfollow', '_patsfollowfirst')
2449 2449 fnopats = (('_ancestors', '_fancestors'),
2450 2450 ('_descendants', '_fdescendants'))
2451 2451 if slowpath:
2452 2452 # See walkchangerevs() slow path.
2453 2453 #
2454 2454 # pats/include/exclude cannot be represented as separate
2455 2455 # revset expressions as their filtering logic applies at file
2456 2456 # level. For instance "-I a -X a" matches a revision touching
2457 2457 # "a" and "b" while "file(a) and not file(b)" does
2458 2458 # not. Besides, filesets are evaluated against the working
2459 2459 # directory.
2460 2460 matchargs = ['r:', 'd:relpath']
2461 2461 for p in pats:
2462 2462 matchargs.append('p:' + p)
2463 2463 for p in opts.get('include', []):
2464 2464 matchargs.append('i:' + p)
2465 2465 for p in opts.get('exclude', []):
2466 2466 matchargs.append('x:' + p)
2467 2467 matchargs = ','.join(('%r' % p) for p in matchargs)
2468 2468 opts['_matchfiles'] = matchargs
2469 2469 if follow:
2470 2470 opts[fnopats[0][followfirst]] = '.'
2471 2471 else:
2472 2472 if follow:
2473 2473 if pats:
2474 2474 # follow() revset interprets its file argument as a
2475 2475 # manifest entry, so use match.files(), not pats.
2476 2476 opts[fpats[followfirst]] = list(match.files())
2477 2477 else:
2478 2478 op = fnopats[followdescendants][followfirst]
2479 2479 opts[op] = 'rev(%d)' % startrev
2480 2480 else:
2481 2481 opts['_patslog'] = list(pats)
2482 2482
2483 2483 filematcher = None
2484 2484 if opts.get('patch') or opts.get('stat'):
2485 2485 # When following files, track renames via a special matcher.
2486 2486 # If we're forced to take the slowpath it means we're following
2487 2487 # at least one pattern/directory, so don't bother with rename tracking.
2488 2488 if follow and not match.always() and not slowpath:
2489 2489 # _makefollowlogfilematcher expects its files argument to be
2490 2490 # relative to the repo root, so use match.files(), not pats.
2491 2491 filematcher = _makefollowlogfilematcher(repo, match.files(),
2492 2492 followfirst)
2493 2493 else:
2494 2494 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2495 2495 if filematcher is None:
2496 2496 filematcher = lambda rev: match
2497 2497
2498 2498 expr = []
2499 2499 for op, val in sorted(opts.iteritems()):
2500 2500 if not val:
2501 2501 continue
2502 2502 if op not in opt2revset:
2503 2503 continue
2504 2504 revop, andor = opt2revset[op]
2505 2505 if '%(val)' not in revop:
2506 2506 expr.append(revop)
2507 2507 else:
2508 2508 if not isinstance(val, list):
2509 2509 e = revop % {'val': val}
2510 2510 else:
2511 2511 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2512 2512 expr.append(e)
2513 2513
2514 2514 if expr:
2515 2515 expr = '(' + ' and '.join(expr) + ')'
2516 2516 else:
2517 2517 expr = None
2518 2518 return expr, filematcher
2519 2519
2520 2520 def _logrevs(repo, opts):
2521 2521 # Default --rev value depends on --follow but --follow behavior
2522 2522 # depends on revisions resolved from --rev...
2523 2523 follow = opts.get('follow') or opts.get('follow_first')
2524 2524 if opts.get('rev'):
2525 2525 revs = scmutil.revrange(repo, opts['rev'])
2526 2526 elif follow and repo.dirstate.p1() == nullid:
2527 2527 revs = smartset.baseset()
2528 2528 elif follow:
2529 2529 revs = repo.revs('reverse(:.)')
2530 2530 else:
2531 2531 revs = smartset.spanset(repo)
2532 2532 revs.reverse()
2533 2533 return revs
2534 2534
2535 2535 def getgraphlogrevs(repo, pats, opts):
2536 2536 """Return (revs, expr, filematcher) where revs is an iterable of
2537 2537 revision numbers, expr is a revset string built from log options
2538 2538 and file patterns or None, and used to filter 'revs'. If --stat or
2539 2539 --patch are not passed filematcher is None. Otherwise it is a
2540 2540 callable taking a revision number and returning a match objects
2541 2541 filtering the files to be detailed when displaying the revision.
2542 2542 """
2543 2543 limit = loglimit(opts)
2544 2544 revs = _logrevs(repo, opts)
2545 2545 if not revs:
2546 2546 return smartset.baseset(), None, None
2547 2547 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2548 2548 if opts.get('rev'):
2549 2549 # User-specified revs might be unsorted, but don't sort before
2550 2550 # _makelogrevset because it might depend on the order of revs
2551 2551 if not (revs.isdescending() or revs.istopo()):
2552 2552 revs.sort(reverse=True)
2553 2553 if expr:
2554 2554 matcher = revset.match(repo.ui, expr)
2555 2555 revs = matcher(repo, revs)
2556 2556 if limit is not None:
2557 2557 limitedrevs = []
2558 2558 for idx, rev in enumerate(revs):
2559 2559 if idx >= limit:
2560 2560 break
2561 2561 limitedrevs.append(rev)
2562 2562 revs = smartset.baseset(limitedrevs)
2563 2563
2564 2564 return revs, expr, filematcher
2565 2565
2566 2566 def getlogrevs(repo, pats, opts):
2567 2567 """Return (revs, expr, filematcher) where revs is an iterable of
2568 2568 revision numbers, expr is a revset string built from log options
2569 2569 and file patterns or None, and used to filter 'revs'. If --stat or
2570 2570 --patch are not passed filematcher is None. Otherwise it is a
2571 2571 callable taking a revision number and returning a match objects
2572 2572 filtering the files to be detailed when displaying the revision.
2573 2573 """
2574 2574 limit = loglimit(opts)
2575 2575 revs = _logrevs(repo, opts)
2576 2576 if not revs:
2577 2577 return smartset.baseset([]), None, None
2578 2578 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2579 2579 if expr:
2580 2580 matcher = revset.match(repo.ui, expr)
2581 2581 revs = matcher(repo, revs)
2582 2582 if limit is not None:
2583 2583 limitedrevs = []
2584 2584 for idx, r in enumerate(revs):
2585 2585 if limit <= idx:
2586 2586 break
2587 2587 limitedrevs.append(r)
2588 2588 revs = smartset.baseset(limitedrevs)
2589 2589
2590 2590 return revs, expr, filematcher
2591 2591
2592 2592 def _parselinerangelogopt(repo, opts):
2593 2593 """Parse --line-range log option and return a list of tuples (filename,
2594 2594 (fromline, toline)).
2595 2595 """
2596 2596 linerangebyfname = []
2597 2597 for pat in opts.get('line_range', []):
2598 2598 try:
2599 2599 pat, linerange = pat.rsplit(',', 1)
2600 2600 except ValueError:
2601 2601 raise error.Abort(_('malformatted line-range pattern %s') % pat)
2602 2602 try:
2603 2603 fromline, toline = map(int, linerange.split(':'))
2604 2604 except ValueError:
2605 2605 raise error.Abort(_("invalid line range for %s") % pat)
2606 2606 msg = _("line range pattern '%s' must match exactly one file") % pat
2607 2607 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
2608 2608 linerangebyfname.append(
2609 2609 (fname, util.processlinerange(fromline, toline)))
2610 2610 return linerangebyfname
2611 2611
2612 2612 def getloglinerangerevs(repo, userrevs, opts):
2613 2613 """Return (revs, filematcher, hunksfilter).
2614 2614
2615 2615 "revs" are revisions obtained by processing "line-range" log options and
2616 2616 walking block ancestors of each specified file/line-range.
2617 2617
2618 2618 "filematcher(rev) -> match" is a factory function returning a match object
2619 2619 for a given revision for file patterns specified in --line-range option.
2620 2620 If neither --stat nor --patch options are passed, "filematcher" is None.
2621 2621
2622 2622 "hunksfilter(rev) -> filterfn(fctx, hunks)" is a factory function
2623 2623 returning a hunks filtering function.
2624 2624 If neither --stat nor --patch options are passed, "filterhunks" is None.
2625 2625 """
2626 2626 wctx = repo[None]
2627 2627
2628 2628 # Two-levels map of "rev -> file ctx -> [line range]".
2629 2629 linerangesbyrev = {}
2630 2630 for fname, (fromline, toline) in _parselinerangelogopt(repo, opts):
2631 2631 if fname not in wctx:
2632 2632 raise error.Abort(_('cannot follow file not in parent '
2633 2633 'revision: "%s"') % fname)
2634 2634 fctx = wctx.filectx(fname)
2635 2635 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
2636 2636 rev = fctx.introrev()
2637 2637 if rev not in userrevs:
2638 2638 continue
2639 2639 linerangesbyrev.setdefault(
2640 2640 rev, {}).setdefault(
2641 2641 fctx.path(), []).append(linerange)
2642 2642
2643 2643 filematcher = None
2644 2644 hunksfilter = None
2645 2645 if opts.get('patch') or opts.get('stat'):
2646 2646
2647 2647 def nofilterhunksfn(fctx, hunks):
2648 2648 return hunks
2649 2649
2650 2650 def hunksfilter(rev):
2651 2651 fctxlineranges = linerangesbyrev.get(rev)
2652 2652 if fctxlineranges is None:
2653 2653 return nofilterhunksfn
2654 2654
2655 2655 def filterfn(fctx, hunks):
2656 2656 lineranges = fctxlineranges.get(fctx.path())
2657 2657 if lineranges is not None:
2658 2658 for hr, lines in hunks:
2659 2659 if hr is None: # binary
2660 2660 yield hr, lines
2661 2661 continue
2662 2662 if any(mdiff.hunkinrange(hr[2:], lr)
2663 2663 for lr in lineranges):
2664 2664 yield hr, lines
2665 2665 else:
2666 2666 for hunk in hunks:
2667 2667 yield hunk
2668 2668
2669 2669 return filterfn
2670 2670
2671 2671 def filematcher(rev):
2672 2672 files = list(linerangesbyrev.get(rev, []))
2673 2673 return scmutil.matchfiles(repo, files)
2674 2674
2675 2675 revs = sorted(linerangesbyrev, reverse=True)
2676 2676
2677 2677 return revs, filematcher, hunksfilter
2678 2678
2679 2679 def _graphnodeformatter(ui, displayer):
2680 2680 spec = ui.config('ui', 'graphnodetemplate')
2681 2681 if not spec:
2682 2682 return templatekw.showgraphnode # fast path for "{graphnode}"
2683 2683
2684 2684 spec = templater.unquotestring(spec)
2685 2685 templ = formatter.maketemplater(ui, spec)
2686 2686 cache = {}
2687 2687 if isinstance(displayer, changeset_templater):
2688 2688 cache = displayer.cache # reuse cache of slow templates
2689 2689 props = templatekw.keywords.copy()
2690 2690 props['templ'] = templ
2691 2691 props['cache'] = cache
2692 2692 def formatnode(repo, ctx):
2693 2693 props['ctx'] = ctx
2694 2694 props['repo'] = repo
2695 2695 props['ui'] = repo.ui
2696 2696 props['revcache'] = {}
2697 2697 return templ.render(props)
2698 2698 return formatnode
2699 2699
2700 2700 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2701 2701 filematcher=None, props=None):
2702 2702 props = props or {}
2703 2703 formatnode = _graphnodeformatter(ui, displayer)
2704 2704 state = graphmod.asciistate()
2705 2705 styles = state['styles']
2706 2706
2707 2707 # only set graph styling if HGPLAIN is not set.
2708 2708 if ui.plain('graph'):
2709 2709 # set all edge styles to |, the default pre-3.8 behaviour
2710 2710 styles.update(dict.fromkeys(styles, '|'))
2711 2711 else:
2712 2712 edgetypes = {
2713 2713 'parent': graphmod.PARENT,
2714 2714 'grandparent': graphmod.GRANDPARENT,
2715 2715 'missing': graphmod.MISSINGPARENT
2716 2716 }
2717 2717 for name, key in edgetypes.items():
2718 2718 # experimental config: experimental.graphstyle.*
2719 2719 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2720 2720 styles[key])
2721 2721 if not styles[key]:
2722 2722 styles[key] = None
2723 2723
2724 2724 # experimental config: experimental.graphshorten
2725 2725 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2726 2726
2727 2727 for rev, type, ctx, parents in dag:
2728 2728 char = formatnode(repo, ctx)
2729 2729 copies = None
2730 2730 if getrenamed and ctx.rev():
2731 2731 copies = []
2732 2732 for fn in ctx.files():
2733 2733 rename = getrenamed(fn, ctx.rev())
2734 2734 if rename:
2735 2735 copies.append((fn, rename[0]))
2736 2736 revmatchfn = None
2737 2737 if filematcher is not None:
2738 2738 revmatchfn = filematcher(ctx.rev())
2739 2739 edges = edgefn(type, char, state, rev, parents)
2740 2740 firstedge = next(edges)
2741 2741 width = firstedge[2]
2742 2742 displayer.show(ctx, copies=copies, matchfn=revmatchfn,
2743 2743 _graphwidth=width, **props)
2744 2744 lines = displayer.hunk.pop(rev).split('\n')
2745 2745 if not lines[-1]:
2746 2746 del lines[-1]
2747 2747 displayer.flush(ctx)
2748 2748 for type, char, width, coldata in itertools.chain([firstedge], edges):
2749 2749 graphmod.ascii(ui, state, type, char, lines, coldata)
2750 2750 lines = []
2751 2751 displayer.close()
2752 2752
2753 2753 def graphlog(ui, repo, pats, opts):
2754 2754 # Parameters are identical to log command ones
2755 2755 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2756 2756 revdag = graphmod.dagwalker(repo, revs)
2757 2757
2758 2758 getrenamed = None
2759 2759 if opts.get('copies'):
2760 2760 endrev = None
2761 2761 if opts.get('rev'):
2762 2762 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2763 2763 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2764 2764
2765 2765 ui.pager('log')
2766 2766 displayer = show_changeset(ui, repo, opts, buffered=True)
2767 2767 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2768 2768 filematcher)
2769 2769
2770 2770 def checkunsupportedgraphflags(pats, opts):
2771 2771 for op in ["newest_first"]:
2772 2772 if op in opts and opts[op]:
2773 2773 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2774 2774 % op.replace("_", "-"))
2775 2775
2776 2776 def graphrevs(repo, nodes, opts):
2777 2777 limit = loglimit(opts)
2778 2778 nodes.reverse()
2779 2779 if limit is not None:
2780 2780 nodes = nodes[:limit]
2781 2781 return graphmod.nodes(repo, nodes)
2782 2782
2783 2783 def add(ui, repo, match, prefix, explicitonly, **opts):
2784 2784 join = lambda f: os.path.join(prefix, f)
2785 2785 bad = []
2786 2786
2787 2787 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2788 2788 names = []
2789 2789 wctx = repo[None]
2790 2790 cca = None
2791 2791 abort, warn = scmutil.checkportabilityalert(ui)
2792 2792 if abort or warn:
2793 2793 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2794 2794
2795 2795 badmatch = matchmod.badmatch(match, badfn)
2796 2796 dirstate = repo.dirstate
2797 2797 # We don't want to just call wctx.walk here, since it would return a lot of
2798 2798 # clean files, which we aren't interested in and takes time.
2799 2799 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2800 2800 unknown=True, ignored=False, full=False)):
2801 2801 exact = match.exact(f)
2802 2802 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2803 2803 if cca:
2804 2804 cca(f)
2805 2805 names.append(f)
2806 2806 if ui.verbose or not exact:
2807 2807 ui.status(_('adding %s\n') % match.rel(f))
2808 2808
2809 2809 for subpath in sorted(wctx.substate):
2810 2810 sub = wctx.sub(subpath)
2811 2811 try:
2812 2812 submatch = matchmod.subdirmatcher(subpath, match)
2813 2813 if opts.get(r'subrepos'):
2814 2814 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2815 2815 else:
2816 2816 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2817 2817 except error.LookupError:
2818 2818 ui.status(_("skipping missing subrepository: %s\n")
2819 2819 % join(subpath))
2820 2820
2821 2821 if not opts.get(r'dry_run'):
2822 2822 rejected = wctx.add(names, prefix)
2823 2823 bad.extend(f for f in rejected if f in match.files())
2824 2824 return bad
2825 2825
2826 2826 def addwebdirpath(repo, serverpath, webconf):
2827 2827 webconf[serverpath] = repo.root
2828 2828 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2829 2829
2830 2830 for r in repo.revs('filelog("path:.hgsub")'):
2831 2831 ctx = repo[r]
2832 2832 for subpath in ctx.substate:
2833 2833 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2834 2834
2835 2835 def forget(ui, repo, match, prefix, explicitonly):
2836 2836 join = lambda f: os.path.join(prefix, f)
2837 2837 bad = []
2838 2838 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2839 2839 wctx = repo[None]
2840 2840 forgot = []
2841 2841
2842 2842 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2843 2843 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2844 2844 if explicitonly:
2845 2845 forget = [f for f in forget if match.exact(f)]
2846 2846
2847 2847 for subpath in sorted(wctx.substate):
2848 2848 sub = wctx.sub(subpath)
2849 2849 try:
2850 2850 submatch = matchmod.subdirmatcher(subpath, match)
2851 2851 subbad, subforgot = sub.forget(submatch, prefix)
2852 2852 bad.extend([subpath + '/' + f for f in subbad])
2853 2853 forgot.extend([subpath + '/' + f for f in subforgot])
2854 2854 except error.LookupError:
2855 2855 ui.status(_("skipping missing subrepository: %s\n")
2856 2856 % join(subpath))
2857 2857
2858 2858 if not explicitonly:
2859 2859 for f in match.files():
2860 2860 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2861 2861 if f not in forgot:
2862 2862 if repo.wvfs.exists(f):
2863 2863 # Don't complain if the exact case match wasn't given.
2864 2864 # But don't do this until after checking 'forgot', so
2865 2865 # that subrepo files aren't normalized, and this op is
2866 2866 # purely from data cached by the status walk above.
2867 2867 if repo.dirstate.normalize(f) in repo.dirstate:
2868 2868 continue
2869 2869 ui.warn(_('not removing %s: '
2870 2870 'file is already untracked\n')
2871 2871 % match.rel(f))
2872 2872 bad.append(f)
2873 2873
2874 2874 for f in forget:
2875 2875 if ui.verbose or not match.exact(f):
2876 2876 ui.status(_('removing %s\n') % match.rel(f))
2877 2877
2878 2878 rejected = wctx.forget(forget, prefix)
2879 2879 bad.extend(f for f in rejected if f in match.files())
2880 2880 forgot.extend(f for f in forget if f not in rejected)
2881 2881 return bad, forgot
2882 2882
2883 2883 def files(ui, ctx, m, fm, fmt, subrepos):
2884 2884 rev = ctx.rev()
2885 2885 ret = 1
2886 2886 ds = ctx.repo().dirstate
2887 2887
2888 2888 for f in ctx.matches(m):
2889 2889 if rev is None and ds[f] == 'r':
2890 2890 continue
2891 2891 fm.startitem()
2892 2892 if ui.verbose:
2893 2893 fc = ctx[f]
2894 2894 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2895 2895 fm.data(abspath=f)
2896 2896 fm.write('path', fmt, m.rel(f))
2897 2897 ret = 0
2898 2898
2899 2899 for subpath in sorted(ctx.substate):
2900 2900 submatch = matchmod.subdirmatcher(subpath, m)
2901 2901 if (subrepos or m.exact(subpath) or any(submatch.files())):
2902 2902 sub = ctx.sub(subpath)
2903 2903 try:
2904 2904 recurse = m.exact(subpath) or subrepos
2905 2905 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2906 2906 ret = 0
2907 2907 except error.LookupError:
2908 2908 ui.status(_("skipping missing subrepository: %s\n")
2909 2909 % m.abs(subpath))
2910 2910
2911 2911 return ret
2912 2912
2913 2913 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2914 2914 join = lambda f: os.path.join(prefix, f)
2915 2915 ret = 0
2916 2916 s = repo.status(match=m, clean=True)
2917 2917 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2918 2918
2919 2919 wctx = repo[None]
2920 2920
2921 2921 if warnings is None:
2922 2922 warnings = []
2923 2923 warn = True
2924 2924 else:
2925 2925 warn = False
2926 2926
2927 2927 subs = sorted(wctx.substate)
2928 2928 total = len(subs)
2929 2929 count = 0
2930 2930 for subpath in subs:
2931 2931 count += 1
2932 2932 submatch = matchmod.subdirmatcher(subpath, m)
2933 2933 if subrepos or m.exact(subpath) or any(submatch.files()):
2934 2934 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2935 2935 sub = wctx.sub(subpath)
2936 2936 try:
2937 2937 if sub.removefiles(submatch, prefix, after, force, subrepos,
2938 2938 warnings):
2939 2939 ret = 1
2940 2940 except error.LookupError:
2941 2941 warnings.append(_("skipping missing subrepository: %s\n")
2942 2942 % join(subpath))
2943 2943 ui.progress(_('searching'), None)
2944 2944
2945 2945 # warn about failure to delete explicit files/dirs
2946 2946 deleteddirs = util.dirs(deleted)
2947 2947 files = m.files()
2948 2948 total = len(files)
2949 2949 count = 0
2950 2950 for f in files:
2951 2951 def insubrepo():
2952 2952 for subpath in wctx.substate:
2953 2953 if f.startswith(subpath + '/'):
2954 2954 return True
2955 2955 return False
2956 2956
2957 2957 count += 1
2958 2958 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2959 2959 isdir = f in deleteddirs or wctx.hasdir(f)
2960 2960 if (f in repo.dirstate or isdir or f == '.'
2961 2961 or insubrepo() or f in subs):
2962 2962 continue
2963 2963
2964 2964 if repo.wvfs.exists(f):
2965 2965 if repo.wvfs.isdir(f):
2966 2966 warnings.append(_('not removing %s: no tracked files\n')
2967 2967 % m.rel(f))
2968 2968 else:
2969 2969 warnings.append(_('not removing %s: file is untracked\n')
2970 2970 % m.rel(f))
2971 2971 # missing files will generate a warning elsewhere
2972 2972 ret = 1
2973 2973 ui.progress(_('deleting'), None)
2974 2974
2975 2975 if force:
2976 2976 list = modified + deleted + clean + added
2977 2977 elif after:
2978 2978 list = deleted
2979 2979 remaining = modified + added + clean
2980 2980 total = len(remaining)
2981 2981 count = 0
2982 2982 for f in remaining:
2983 2983 count += 1
2984 2984 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2985 2985 if ui.verbose or (f in files):
2986 2986 warnings.append(_('not removing %s: file still exists\n')
2987 2987 % m.rel(f))
2988 2988 ret = 1
2989 2989 ui.progress(_('skipping'), None)
2990 2990 else:
2991 2991 list = deleted + clean
2992 2992 total = len(modified) + len(added)
2993 2993 count = 0
2994 2994 for f in modified:
2995 2995 count += 1
2996 2996 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2997 2997 warnings.append(_('not removing %s: file is modified (use -f'
2998 2998 ' to force removal)\n') % m.rel(f))
2999 2999 ret = 1
3000 3000 for f in added:
3001 3001 count += 1
3002 3002 ui.progress(_('skipping'), count, total=total, unit=_('files'))
3003 3003 warnings.append(_("not removing %s: file has been marked for add"
3004 3004 " (use 'hg forget' to undo add)\n") % m.rel(f))
3005 3005 ret = 1
3006 3006 ui.progress(_('skipping'), None)
3007 3007
3008 3008 list = sorted(list)
3009 3009 total = len(list)
3010 3010 count = 0
3011 3011 for f in list:
3012 3012 count += 1
3013 3013 if ui.verbose or not m.exact(f):
3014 3014 ui.progress(_('deleting'), count, total=total, unit=_('files'))
3015 3015 ui.status(_('removing %s\n') % m.rel(f))
3016 3016 ui.progress(_('deleting'), None)
3017 3017
3018 3018 with repo.wlock():
3019 3019 if not after:
3020 3020 for f in list:
3021 3021 if f in added:
3022 3022 continue # we never unlink added files on remove
3023 3023 repo.wvfs.unlinkpath(f, ignoremissing=True)
3024 3024 repo[None].forget(list)
3025 3025
3026 3026 if warn:
3027 3027 for warning in warnings:
3028 3028 ui.warn(warning)
3029 3029
3030 3030 return ret
3031 3031
3032 3032 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
3033 3033 err = 1
3034 3034
3035 3035 def write(path):
3036 3036 filename = None
3037 3037 if fntemplate:
3038 3038 filename = makefilename(repo, fntemplate, ctx.node(),
3039 3039 pathname=os.path.join(prefix, path))
3040 3040 # attempt to create the directory if it does not already exist
3041 3041 try:
3042 3042 os.makedirs(os.path.dirname(filename))
3043 3043 except OSError:
3044 3044 pass
3045 3045 with formatter.maybereopen(basefm, filename, opts) as fm:
3046 3046 data = ctx[path].data()
3047 3047 if opts.get('decode'):
3048 3048 data = repo.wwritedata(path, data)
3049 3049 fm.startitem()
3050 3050 fm.write('data', '%s', data)
3051 3051 fm.data(abspath=path, path=matcher.rel(path))
3052 3052
3053 3053 # Automation often uses hg cat on single files, so special case it
3054 3054 # for performance to avoid the cost of parsing the manifest.
3055 3055 if len(matcher.files()) == 1 and not matcher.anypats():
3056 3056 file = matcher.files()[0]
3057 3057 mfl = repo.manifestlog
3058 3058 mfnode = ctx.manifestnode()
3059 3059 try:
3060 3060 if mfnode and mfl[mfnode].find(file)[0]:
3061 3061 write(file)
3062 3062 return 0
3063 3063 except KeyError:
3064 3064 pass
3065 3065
3066 3066 for abs in ctx.walk(matcher):
3067 3067 write(abs)
3068 3068 err = 0
3069 3069
3070 3070 for subpath in sorted(ctx.substate):
3071 3071 sub = ctx.sub(subpath)
3072 3072 try:
3073 3073 submatch = matchmod.subdirmatcher(subpath, matcher)
3074 3074
3075 3075 if not sub.cat(submatch, basefm, fntemplate,
3076 3076 os.path.join(prefix, sub._path), **opts):
3077 3077 err = 0
3078 3078 except error.RepoLookupError:
3079 3079 ui.status(_("skipping missing subrepository: %s\n")
3080 3080 % os.path.join(prefix, subpath))
3081 3081
3082 3082 return err
3083 3083
3084 3084 def commit(ui, repo, commitfunc, pats, opts):
3085 3085 '''commit the specified files or all outstanding changes'''
3086 3086 date = opts.get('date')
3087 3087 if date:
3088 3088 opts['date'] = util.parsedate(date)
3089 3089 message = logmessage(ui, opts)
3090 3090 matcher = scmutil.match(repo[None], pats, opts)
3091 3091
3092 3092 dsguard = None
3093 3093 # extract addremove carefully -- this function can be called from a command
3094 3094 # that doesn't support addremove
3095 3095 if opts.get('addremove'):
3096 3096 dsguard = dirstateguard.dirstateguard(repo, 'commit')
3097 3097 with dsguard or util.nullcontextmanager():
3098 3098 if dsguard:
3099 3099 if scmutil.addremove(repo, matcher, "", opts) != 0:
3100 3100 raise error.Abort(
3101 3101 _("failed to mark all new/missing files as added/removed"))
3102 3102
3103 3103 return commitfunc(ui, repo, message, matcher, opts)
3104 3104
3105 3105 def samefile(f, ctx1, ctx2):
3106 3106 if f in ctx1.manifest():
3107 3107 a = ctx1.filectx(f)
3108 3108 if f in ctx2.manifest():
3109 3109 b = ctx2.filectx(f)
3110 3110 return (not a.cmp(b)
3111 3111 and a.flags() == b.flags())
3112 3112 else:
3113 3113 return False
3114 3114 else:
3115 3115 return f not in ctx2.manifest()
3116 3116
3117 3117 def amend(ui, repo, old, extra, pats, opts):
3118 3118 # avoid cycle context -> subrepo -> cmdutil
3119 3119 from . import context
3120 3120
3121 3121 # amend will reuse the existing user if not specified, but the obsolete
3122 3122 # marker creation requires that the current user's name is specified.
3123 3123 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3124 3124 ui.username() # raise exception if username not set
3125 3125
3126 3126 ui.note(_('amending changeset %s\n') % old)
3127 3127 base = old.p1()
3128 3128
3129 3129 with repo.wlock(), repo.lock(), repo.transaction('amend'):
3130 3130 # Participating changesets:
3131 3131 #
3132 3132 # wctx o - workingctx that contains changes from working copy
3133 3133 # | to go into amending commit
3134 3134 # |
3135 3135 # old o - changeset to amend
3136 3136 # |
3137 3137 # base o - first parent of the changeset to amend
3138 3138 wctx = repo[None]
3139 3139
3140 3140 # Copy to avoid mutating input
3141 3141 extra = extra.copy()
3142 3142 # Update extra dict from amended commit (e.g. to preserve graft
3143 3143 # source)
3144 3144 extra.update(old.extra())
3145 3145
3146 3146 # Also update it from the from the wctx
3147 3147 extra.update(wctx.extra())
3148 3148
3149 3149 user = opts.get('user') or old.user()
3150 3150 date = opts.get('date') or old.date()
3151 3151
3152 3152 # Parse the date to allow comparison between date and old.date()
3153 3153 date = util.parsedate(date)
3154 3154
3155 3155 if len(old.parents()) > 1:
3156 3156 # ctx.files() isn't reliable for merges, so fall back to the
3157 3157 # slower repo.status() method
3158 3158 files = set([fn for st in repo.status(base, old)[:3]
3159 3159 for fn in st])
3160 3160 else:
3161 3161 files = set(old.files())
3162 3162
3163 3163 # add/remove the files to the working copy if the "addremove" option
3164 3164 # was specified.
3165 3165 matcher = scmutil.match(wctx, pats, opts)
3166 3166 if (opts.get('addremove')
3167 3167 and scmutil.addremove(repo, matcher, "", opts)):
3168 3168 raise error.Abort(
3169 3169 _("failed to mark all new/missing files as added/removed"))
3170 3170
3171 3171 # Check subrepos. This depends on in-place wctx._status update in
3172 3172 # subrepo.precommit(). To minimize the risk of this hack, we do
3173 3173 # nothing if .hgsub does not exist.
3174 3174 if '.hgsub' in wctx or '.hgsub' in old:
3175 3175 from . import subrepo # avoid cycle: cmdutil -> subrepo -> cmdutil
3176 3176 subs, commitsubs, newsubstate = subrepo.precommit(
3177 3177 ui, wctx, wctx._status, matcher)
3178 3178 # amend should abort if commitsubrepos is enabled
3179 3179 assert not commitsubs
3180 3180 if subs:
3181 3181 subrepo.writestate(repo, newsubstate)
3182 3182
3183 3183 filestoamend = set(f for f in wctx.files() if matcher(f))
3184 3184
3185 3185 changes = (len(filestoamend) > 0)
3186 3186 if changes:
3187 3187 # Recompute copies (avoid recording a -> b -> a)
3188 3188 copied = copies.pathcopies(base, wctx, matcher)
3189 3189 if old.p2:
3190 3190 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3191 3191
3192 3192 # Prune files which were reverted by the updates: if old
3193 3193 # introduced file X and the file was renamed in the working
3194 3194 # copy, then those two files are the same and
3195 3195 # we can discard X from our list of files. Likewise if X
3196 3196 # was removed, it's no longer relevant. If X is missing (aka
3197 3197 # deleted), old X must be preserved.
3198 3198 files.update(filestoamend)
3199 3199 files = [f for f in files if (not samefile(f, wctx, base)
3200 3200 or f in wctx.deleted())]
3201 3201
3202 3202 def filectxfn(repo, ctx_, path):
3203 3203 try:
3204 3204 # If the file being considered is not amongst the files
3205 3205 # to be amended, we should return the file context from the
3206 3206 # old changeset. This avoids issues when only some files in
3207 3207 # the working copy are being amended but there are also
3208 3208 # changes to other files from the old changeset.
3209 3209 if path not in filestoamend:
3210 3210 return old.filectx(path)
3211 3211
3212 3212 # Return None for removed files.
3213 3213 if path in wctx.removed():
3214 3214 return None
3215 3215
3216 3216 fctx = wctx[path]
3217 3217 flags = fctx.flags()
3218 3218 mctx = context.memfilectx(repo,
3219 3219 fctx.path(), fctx.data(),
3220 3220 islink='l' in flags,
3221 3221 isexec='x' in flags,
3222 3222 copied=copied.get(path))
3223 3223 return mctx
3224 3224 except KeyError:
3225 3225 return None
3226 3226 else:
3227 3227 ui.note(_('copying changeset %s to %s\n') % (old, base))
3228 3228
3229 3229 # Use version of files as in the old cset
3230 3230 def filectxfn(repo, ctx_, path):
3231 3231 try:
3232 3232 return old.filectx(path)
3233 3233 except KeyError:
3234 3234 return None
3235 3235
3236 3236 # See if we got a message from -m or -l, if not, open the editor with
3237 3237 # the message of the changeset to amend.
3238 3238 message = logmessage(ui, opts)
3239 3239
3240 3240 editform = mergeeditform(old, 'commit.amend')
3241 3241 editor = getcommiteditor(editform=editform,
3242 3242 **pycompat.strkwargs(opts))
3243 3243
3244 3244 if not message:
3245 3245 editor = getcommiteditor(edit=True, editform=editform)
3246 3246 message = old.description()
3247 3247
3248 3248 pureextra = extra.copy()
3249 3249 extra['amend_source'] = old.hex()
3250 3250
3251 3251 new = context.memctx(repo,
3252 3252 parents=[base.node(), old.p2().node()],
3253 3253 text=message,
3254 3254 files=files,
3255 3255 filectxfn=filectxfn,
3256 3256 user=user,
3257 3257 date=date,
3258 3258 extra=extra,
3259 3259 editor=editor)
3260 3260
3261 3261 newdesc = changelog.stripdesc(new.description())
3262 3262 if ((not changes)
3263 3263 and newdesc == old.description()
3264 3264 and user == old.user()
3265 3265 and date == old.date()
3266 3266 and pureextra == old.extra()):
3267 3267 # nothing changed. continuing here would create a new node
3268 3268 # anyway because of the amend_source noise.
3269 3269 #
3270 3270 # This not what we expect from amend.
3271 3271 return old.node()
3272 3272
3273 3273 if opts.get('secret'):
3274 3274 commitphase = 'secret'
3275 3275 else:
3276 3276 commitphase = old.phase()
3277 3277 overrides = {('phases', 'new-commit'): commitphase}
3278 3278 with ui.configoverride(overrides, 'amend'):
3279 3279 newid = repo.commitctx(new)
3280 3280
3281 3281 # Reroute the working copy parent to the new changeset
3282 3282 repo.setparents(newid, nullid)
3283 3283 mapping = {old.node(): (newid,)}
3284 3284 obsmetadata = None
3285 3285 if opts.get('note'):
3286 3286 obsmetadata = {'note': opts['note']}
3287 3287 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
3288 3288
3289 3289 # Fixing the dirstate because localrepo.commitctx does not update
3290 3290 # it. This is rather convenient because we did not need to update
3291 3291 # the dirstate for all the files in the new commit which commitctx
3292 3292 # could have done if it updated the dirstate. Now, we can
3293 3293 # selectively update the dirstate only for the amended files.
3294 3294 dirstate = repo.dirstate
3295 3295
3296 3296 # Update the state of the files which were added and
3297 3297 # and modified in the amend to "normal" in the dirstate.
3298 3298 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3299 3299 for f in normalfiles:
3300 3300 dirstate.normal(f)
3301 3301
3302 3302 # Update the state of files which were removed in the amend
3303 3303 # to "removed" in the dirstate.
3304 3304 removedfiles = set(wctx.removed()) & filestoamend
3305 3305 for f in removedfiles:
3306 3306 dirstate.drop(f)
3307 3307
3308 3308 return newid
3309 3309
3310 3310 def commiteditor(repo, ctx, subs, editform=''):
3311 3311 if ctx.description():
3312 3312 return ctx.description()
3313 3313 return commitforceeditor(repo, ctx, subs, editform=editform,
3314 3314 unchangedmessagedetection=True)
3315 3315
3316 3316 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3317 3317 editform='', unchangedmessagedetection=False):
3318 3318 if not extramsg:
3319 3319 extramsg = _("Leave message empty to abort commit.")
3320 3320
3321 3321 forms = [e for e in editform.split('.') if e]
3322 3322 forms.insert(0, 'changeset')
3323 3323 templatetext = None
3324 3324 while forms:
3325 3325 ref = '.'.join(forms)
3326 3326 if repo.ui.config('committemplate', ref):
3327 3327 templatetext = committext = buildcommittemplate(
3328 3328 repo, ctx, subs, extramsg, ref)
3329 3329 break
3330 3330 forms.pop()
3331 3331 else:
3332 3332 committext = buildcommittext(repo, ctx, subs, extramsg)
3333 3333
3334 3334 # run editor in the repository root
3335 3335 olddir = pycompat.getcwd()
3336 3336 os.chdir(repo.root)
3337 3337
3338 3338 # make in-memory changes visible to external process
3339 3339 tr = repo.currenttransaction()
3340 3340 repo.dirstate.write(tr)
3341 3341 pending = tr and tr.writepending() and repo.root
3342 3342
3343 3343 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3344 3344 editform=editform, pending=pending,
3345 3345 repopath=repo.path, action='commit')
3346 3346 text = editortext
3347 3347
3348 3348 # strip away anything below this special string (used for editors that want
3349 3349 # to display the diff)
3350 3350 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3351 3351 if stripbelow:
3352 3352 text = text[:stripbelow.start()]
3353 3353
3354 3354 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3355 3355 os.chdir(olddir)
3356 3356
3357 3357 if finishdesc:
3358 3358 text = finishdesc(text)
3359 3359 if not text.strip():
3360 3360 raise error.Abort(_("empty commit message"))
3361 3361 if unchangedmessagedetection and editortext == templatetext:
3362 3362 raise error.Abort(_("commit message unchanged"))
3363 3363
3364 3364 return text
3365 3365
3366 3366 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3367 3367 ui = repo.ui
3368 3368 spec = formatter.templatespec(ref, None, None)
3369 3369 t = changeset_templater(ui, repo, spec, None, {}, False)
3370 3370 t.t.cache.update((k, templater.unquotestring(v))
3371 3371 for k, v in repo.ui.configitems('committemplate'))
3372 3372
3373 3373 if not extramsg:
3374 3374 extramsg = '' # ensure that extramsg is string
3375 3375
3376 3376 ui.pushbuffer()
3377 3377 t.show(ctx, extramsg=extramsg)
3378 3378 return ui.popbuffer()
3379 3379
3380 3380 def hgprefix(msg):
3381 3381 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3382 3382
3383 3383 def buildcommittext(repo, ctx, subs, extramsg):
3384 3384 edittext = []
3385 3385 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3386 3386 if ctx.description():
3387 3387 edittext.append(ctx.description())
3388 3388 edittext.append("")
3389 3389 edittext.append("") # Empty line between message and comments.
3390 3390 edittext.append(hgprefix(_("Enter commit message."
3391 3391 " Lines beginning with 'HG:' are removed.")))
3392 3392 edittext.append(hgprefix(extramsg))
3393 3393 edittext.append("HG: --")
3394 3394 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3395 3395 if ctx.p2():
3396 3396 edittext.append(hgprefix(_("branch merge")))
3397 3397 if ctx.branch():
3398 3398 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3399 3399 if bookmarks.isactivewdirparent(repo):
3400 3400 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3401 3401 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3402 3402 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3403 3403 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3404 3404 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3405 3405 if not added and not modified and not removed:
3406 3406 edittext.append(hgprefix(_("no files changed")))
3407 3407 edittext.append("")
3408 3408
3409 3409 return "\n".join(edittext)
3410 3410
3411 3411 def commitstatus(repo, node, branch, bheads=None, opts=None):
3412 3412 if opts is None:
3413 3413 opts = {}
3414 3414 ctx = repo[node]
3415 3415 parents = ctx.parents()
3416 3416
3417 3417 if (not opts.get('amend') and bheads and node not in bheads and not
3418 3418 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3419 3419 repo.ui.status(_('created new head\n'))
3420 3420 # The message is not printed for initial roots. For the other
3421 3421 # changesets, it is printed in the following situations:
3422 3422 #
3423 3423 # Par column: for the 2 parents with ...
3424 3424 # N: null or no parent
3425 3425 # B: parent is on another named branch
3426 3426 # C: parent is a regular non head changeset
3427 3427 # H: parent was a branch head of the current branch
3428 3428 # Msg column: whether we print "created new head" message
3429 3429 # In the following, it is assumed that there already exists some
3430 3430 # initial branch heads of the current branch, otherwise nothing is
3431 3431 # printed anyway.
3432 3432 #
3433 3433 # Par Msg Comment
3434 3434 # N N y additional topo root
3435 3435 #
3436 3436 # B N y additional branch root
3437 3437 # C N y additional topo head
3438 3438 # H N n usual case
3439 3439 #
3440 3440 # B B y weird additional branch root
3441 3441 # C B y branch merge
3442 3442 # H B n merge with named branch
3443 3443 #
3444 3444 # C C y additional head from merge
3445 3445 # C H n merge with a head
3446 3446 #
3447 3447 # H H n head merge: head count decreases
3448 3448
3449 3449 if not opts.get('close_branch'):
3450 3450 for r in parents:
3451 3451 if r.closesbranch() and r.branch() == branch:
3452 3452 repo.ui.status(_('reopening closed branch head %d\n') % r)
3453 3453
3454 3454 if repo.ui.debugflag:
3455 3455 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3456 3456 elif repo.ui.verbose:
3457 3457 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3458 3458
3459 3459 def postcommitstatus(repo, pats, opts):
3460 3460 return repo.status(match=scmutil.match(repo[None], pats, opts))
3461 3461
3462 3462 def revert(ui, repo, ctx, parents, *pats, **opts):
3463 3463 opts = pycompat.byteskwargs(opts)
3464 3464 parent, p2 = parents
3465 3465 node = ctx.node()
3466 3466
3467 3467 mf = ctx.manifest()
3468 3468 if node == p2:
3469 3469 parent = p2
3470 3470
3471 3471 # need all matching names in dirstate and manifest of target rev,
3472 3472 # so have to walk both. do not print errors if files exist in one
3473 3473 # but not other. in both cases, filesets should be evaluated against
3474 3474 # workingctx to get consistent result (issue4497). this means 'set:**'
3475 3475 # cannot be used to select missing files from target rev.
3476 3476
3477 3477 # `names` is a mapping for all elements in working copy and target revision
3478 3478 # The mapping is in the form:
3479 3479 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3480 3480 names = {}
3481 3481
3482 3482 with repo.wlock():
3483 3483 ## filling of the `names` mapping
3484 3484 # walk dirstate to fill `names`
3485 3485
3486 3486 interactive = opts.get('interactive', False)
3487 3487 wctx = repo[None]
3488 3488 m = scmutil.match(wctx, pats, opts)
3489 3489
3490 3490 # we'll need this later
3491 3491 targetsubs = sorted(s for s in wctx.substate if m(s))
3492 3492
3493 3493 if not m.always():
3494 3494 matcher = matchmod.badmatch(m, lambda x, y: False)
3495 3495 for abs in wctx.walk(matcher):
3496 3496 names[abs] = m.rel(abs), m.exact(abs)
3497 3497
3498 3498 # walk target manifest to fill `names`
3499 3499
3500 3500 def badfn(path, msg):
3501 3501 if path in names:
3502 3502 return
3503 3503 if path in ctx.substate:
3504 3504 return
3505 3505 path_ = path + '/'
3506 3506 for f in names:
3507 3507 if f.startswith(path_):
3508 3508 return
3509 3509 ui.warn("%s: %s\n" % (m.rel(path), msg))
3510 3510
3511 3511 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3512 3512 if abs not in names:
3513 3513 names[abs] = m.rel(abs), m.exact(abs)
3514 3514
3515 3515 # Find status of all file in `names`.
3516 3516 m = scmutil.matchfiles(repo, names)
3517 3517
3518 3518 changes = repo.status(node1=node, match=m,
3519 3519 unknown=True, ignored=True, clean=True)
3520 3520 else:
3521 3521 changes = repo.status(node1=node, match=m)
3522 3522 for kind in changes:
3523 3523 for abs in kind:
3524 3524 names[abs] = m.rel(abs), m.exact(abs)
3525 3525
3526 3526 m = scmutil.matchfiles(repo, names)
3527 3527
3528 3528 modified = set(changes.modified)
3529 3529 added = set(changes.added)
3530 3530 removed = set(changes.removed)
3531 3531 _deleted = set(changes.deleted)
3532 3532 unknown = set(changes.unknown)
3533 3533 unknown.update(changes.ignored)
3534 3534 clean = set(changes.clean)
3535 3535 modadded = set()
3536 3536
3537 3537 # We need to account for the state of the file in the dirstate,
3538 3538 # even when we revert against something else than parent. This will
3539 3539 # slightly alter the behavior of revert (doing back up or not, delete
3540 3540 # or just forget etc).
3541 3541 if parent == node:
3542 3542 dsmodified = modified
3543 3543 dsadded = added
3544 3544 dsremoved = removed
3545 3545 # store all local modifications, useful later for rename detection
3546 3546 localchanges = dsmodified | dsadded
3547 3547 modified, added, removed = set(), set(), set()
3548 3548 else:
3549 3549 changes = repo.status(node1=parent, match=m)
3550 3550 dsmodified = set(changes.modified)
3551 3551 dsadded = set(changes.added)
3552 3552 dsremoved = set(changes.removed)
3553 3553 # store all local modifications, useful later for rename detection
3554 3554 localchanges = dsmodified | dsadded
3555 3555
3556 3556 # only take into account for removes between wc and target
3557 3557 clean |= dsremoved - removed
3558 3558 dsremoved &= removed
3559 3559 # distinct between dirstate remove and other
3560 3560 removed -= dsremoved
3561 3561
3562 3562 modadded = added & dsmodified
3563 3563 added -= modadded
3564 3564
3565 3565 # tell newly modified apart.
3566 3566 dsmodified &= modified
3567 3567 dsmodified |= modified & dsadded # dirstate added may need backup
3568 3568 modified -= dsmodified
3569 3569
3570 3570 # We need to wait for some post-processing to update this set
3571 3571 # before making the distinction. The dirstate will be used for
3572 3572 # that purpose.
3573 3573 dsadded = added
3574 3574
3575 3575 # in case of merge, files that are actually added can be reported as
3576 3576 # modified, we need to post process the result
3577 3577 if p2 != nullid:
3578 3578 mergeadd = set(dsmodified)
3579 3579 for path in dsmodified:
3580 3580 if path in mf:
3581 3581 mergeadd.remove(path)
3582 3582 dsadded |= mergeadd
3583 3583 dsmodified -= mergeadd
3584 3584
3585 3585 # if f is a rename, update `names` to also revert the source
3586 3586 cwd = repo.getcwd()
3587 3587 for f in localchanges:
3588 3588 src = repo.dirstate.copied(f)
3589 3589 # XXX should we check for rename down to target node?
3590 3590 if src and src not in names and repo.dirstate[src] == 'r':
3591 3591 dsremoved.add(src)
3592 3592 names[src] = (repo.pathto(src, cwd), True)
3593 3593
3594 3594 # determine the exact nature of the deleted changesets
3595 3595 deladded = set(_deleted)
3596 3596 for path in _deleted:
3597 3597 if path in mf:
3598 3598 deladded.remove(path)
3599 3599 deleted = _deleted - deladded
3600 3600
3601 3601 # distinguish between file to forget and the other
3602 3602 added = set()
3603 3603 for abs in dsadded:
3604 3604 if repo.dirstate[abs] != 'a':
3605 3605 added.add(abs)
3606 3606 dsadded -= added
3607 3607
3608 3608 for abs in deladded:
3609 3609 if repo.dirstate[abs] == 'a':
3610 3610 dsadded.add(abs)
3611 3611 deladded -= dsadded
3612 3612
3613 3613 # For files marked as removed, we check if an unknown file is present at
3614 3614 # the same path. If a such file exists it may need to be backed up.
3615 3615 # Making the distinction at this stage helps have simpler backup
3616 3616 # logic.
3617 3617 removunk = set()
3618 3618 for abs in removed:
3619 3619 target = repo.wjoin(abs)
3620 3620 if os.path.lexists(target):
3621 3621 removunk.add(abs)
3622 3622 removed -= removunk
3623 3623
3624 3624 dsremovunk = set()
3625 3625 for abs in dsremoved:
3626 3626 target = repo.wjoin(abs)
3627 3627 if os.path.lexists(target):
3628 3628 dsremovunk.add(abs)
3629 3629 dsremoved -= dsremovunk
3630 3630
3631 3631 # action to be actually performed by revert
3632 3632 # (<list of file>, message>) tuple
3633 3633 actions = {'revert': ([], _('reverting %s\n')),
3634 3634 'add': ([], _('adding %s\n')),
3635 3635 'remove': ([], _('removing %s\n')),
3636 3636 'drop': ([], _('removing %s\n')),
3637 3637 'forget': ([], _('forgetting %s\n')),
3638 3638 'undelete': ([], _('undeleting %s\n')),
3639 3639 'noop': (None, _('no changes needed to %s\n')),
3640 3640 'unknown': (None, _('file not managed: %s\n')),
3641 3641 }
3642 3642
3643 3643 # "constant" that convey the backup strategy.
3644 3644 # All set to `discard` if `no-backup` is set do avoid checking
3645 3645 # no_backup lower in the code.
3646 3646 # These values are ordered for comparison purposes
3647 3647 backupinteractive = 3 # do backup if interactively modified
3648 3648 backup = 2 # unconditionally do backup
3649 3649 check = 1 # check if the existing file differs from target
3650 3650 discard = 0 # never do backup
3651 3651 if opts.get('no_backup'):
3652 3652 backupinteractive = backup = check = discard
3653 3653 if interactive:
3654 3654 dsmodifiedbackup = backupinteractive
3655 3655 else:
3656 3656 dsmodifiedbackup = backup
3657 3657 tobackup = set()
3658 3658
3659 3659 backupanddel = actions['remove']
3660 3660 if not opts.get('no_backup'):
3661 3661 backupanddel = actions['drop']
3662 3662
3663 3663 disptable = (
3664 3664 # dispatch table:
3665 3665 # file state
3666 3666 # action
3667 3667 # make backup
3668 3668
3669 3669 ## Sets that results that will change file on disk
3670 3670 # Modified compared to target, no local change
3671 3671 (modified, actions['revert'], discard),
3672 3672 # Modified compared to target, but local file is deleted
3673 3673 (deleted, actions['revert'], discard),
3674 3674 # Modified compared to target, local change
3675 3675 (dsmodified, actions['revert'], dsmodifiedbackup),
3676 3676 # Added since target
3677 3677 (added, actions['remove'], discard),
3678 3678 # Added in working directory
3679 3679 (dsadded, actions['forget'], discard),
3680 3680 # Added since target, have local modification
3681 3681 (modadded, backupanddel, backup),
3682 3682 # Added since target but file is missing in working directory
3683 3683 (deladded, actions['drop'], discard),
3684 3684 # Removed since target, before working copy parent
3685 3685 (removed, actions['add'], discard),
3686 3686 # Same as `removed` but an unknown file exists at the same path
3687 3687 (removunk, actions['add'], check),
3688 3688 # Removed since targe, marked as such in working copy parent
3689 3689 (dsremoved, actions['undelete'], discard),
3690 3690 # Same as `dsremoved` but an unknown file exists at the same path
3691 3691 (dsremovunk, actions['undelete'], check),
3692 3692 ## the following sets does not result in any file changes
3693 3693 # File with no modification
3694 3694 (clean, actions['noop'], discard),
3695 3695 # Existing file, not tracked anywhere
3696 3696 (unknown, actions['unknown'], discard),
3697 3697 )
3698 3698
3699 3699 for abs, (rel, exact) in sorted(names.items()):
3700 3700 # target file to be touch on disk (relative to cwd)
3701 3701 target = repo.wjoin(abs)
3702 3702 # search the entry in the dispatch table.
3703 3703 # if the file is in any of these sets, it was touched in the working
3704 3704 # directory parent and we are sure it needs to be reverted.
3705 3705 for table, (xlist, msg), dobackup in disptable:
3706 3706 if abs not in table:
3707 3707 continue
3708 3708 if xlist is not None:
3709 3709 xlist.append(abs)
3710 3710 if dobackup:
3711 3711 # If in interactive mode, don't automatically create
3712 3712 # .orig files (issue4793)
3713 3713 if dobackup == backupinteractive:
3714 3714 tobackup.add(abs)
3715 3715 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3716 3716 bakname = scmutil.origpath(ui, repo, rel)
3717 3717 ui.note(_('saving current version of %s as %s\n') %
3718 3718 (rel, bakname))
3719 3719 if not opts.get('dry_run'):
3720 3720 if interactive:
3721 3721 util.copyfile(target, bakname)
3722 3722 else:
3723 3723 util.rename(target, bakname)
3724 3724 if ui.verbose or not exact:
3725 3725 if not isinstance(msg, bytes):
3726 3726 msg = msg(abs)
3727 3727 ui.status(msg % rel)
3728 3728 elif exact:
3729 3729 ui.warn(msg % rel)
3730 3730 break
3731 3731
3732 3732 if not opts.get('dry_run'):
3733 3733 needdata = ('revert', 'add', 'undelete')
3734 3734 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3735 3735 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3736 3736
3737 3737 if targetsubs:
3738 3738 # Revert the subrepos on the revert list
3739 3739 for sub in targetsubs:
3740 3740 try:
3741 3741 wctx.sub(sub).revert(ctx.substate[sub], *pats,
3742 3742 **pycompat.strkwargs(opts))
3743 3743 except KeyError:
3744 3744 raise error.Abort("subrepository '%s' does not exist in %s!"
3745 3745 % (sub, short(ctx.node())))
3746 3746
3747 3747 def _revertprefetch(repo, ctx, *files):
3748 3748 """Let extension changing the storage layer prefetch content"""
3749 3749
3750 3750 def _performrevert(repo, parents, ctx, actions, interactive=False,
3751 3751 tobackup=None):
3752 3752 """function that actually perform all the actions computed for revert
3753 3753
3754 3754 This is an independent function to let extension to plug in and react to
3755 3755 the imminent revert.
3756 3756
3757 3757 Make sure you have the working directory locked when calling this function.
3758 3758 """
3759 3759 parent, p2 = parents
3760 3760 node = ctx.node()
3761 3761 excluded_files = []
3762 3762 matcher_opts = {"exclude": excluded_files}
3763 3763
3764 3764 def checkout(f):
3765 3765 fc = ctx[f]
3766 3766 repo.wwrite(f, fc.data(), fc.flags())
3767 3767
3768 3768 def doremove(f):
3769 3769 try:
3770 3770 repo.wvfs.unlinkpath(f)
3771 3771 except OSError:
3772 3772 pass
3773 3773 repo.dirstate.remove(f)
3774 3774
3775 3775 audit_path = pathutil.pathauditor(repo.root, cached=True)
3776 3776 for f in actions['forget'][0]:
3777 3777 if interactive:
3778 3778 choice = repo.ui.promptchoice(
3779 3779 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3780 3780 if choice == 0:
3781 3781 repo.dirstate.drop(f)
3782 3782 else:
3783 3783 excluded_files.append(repo.wjoin(f))
3784 3784 else:
3785 3785 repo.dirstate.drop(f)
3786 3786 for f in actions['remove'][0]:
3787 3787 audit_path(f)
3788 3788 if interactive:
3789 3789 choice = repo.ui.promptchoice(
3790 3790 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3791 3791 if choice == 0:
3792 3792 doremove(f)
3793 3793 else:
3794 3794 excluded_files.append(repo.wjoin(f))
3795 3795 else:
3796 3796 doremove(f)
3797 3797 for f in actions['drop'][0]:
3798 3798 audit_path(f)
3799 3799 repo.dirstate.remove(f)
3800 3800
3801 3801 normal = None
3802 3802 if node == parent:
3803 3803 # We're reverting to our parent. If possible, we'd like status
3804 3804 # to report the file as clean. We have to use normallookup for
3805 3805 # merges to avoid losing information about merged/dirty files.
3806 3806 if p2 != nullid:
3807 3807 normal = repo.dirstate.normallookup
3808 3808 else:
3809 3809 normal = repo.dirstate.normal
3810 3810
3811 3811 newlyaddedandmodifiedfiles = set()
3812 3812 if interactive:
3813 3813 # Prompt the user for changes to revert
3814 3814 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3815 3815 m = scmutil.match(ctx, torevert, matcher_opts)
3816 3816 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3817 3817 diffopts.nodates = True
3818 3818 diffopts.git = True
3819 3819 operation = 'discard'
3820 3820 reversehunks = True
3821 3821 if node != parent:
3822 3822 operation = 'apply'
3823 3823 reversehunks = False
3824 3824 if reversehunks:
3825 3825 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3826 3826 else:
3827 3827 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3828 3828 originalchunks = patch.parsepatch(diff)
3829 3829
3830 3830 try:
3831 3831
3832 3832 chunks, opts = recordfilter(repo.ui, originalchunks,
3833 3833 operation=operation)
3834 3834 if reversehunks:
3835 3835 chunks = patch.reversehunks(chunks)
3836 3836
3837 3837 except error.PatchError as err:
3838 3838 raise error.Abort(_('error parsing patch: %s') % err)
3839 3839
3840 3840 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3841 3841 if tobackup is None:
3842 3842 tobackup = set()
3843 3843 # Apply changes
3844 3844 fp = stringio()
3845 3845 for c in chunks:
3846 3846 # Create a backup file only if this hunk should be backed up
3847 3847 if ishunk(c) and c.header.filename() in tobackup:
3848 3848 abs = c.header.filename()
3849 3849 target = repo.wjoin(abs)
3850 3850 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3851 3851 util.copyfile(target, bakname)
3852 3852 tobackup.remove(abs)
3853 3853 c.write(fp)
3854 3854 dopatch = fp.tell()
3855 3855 fp.seek(0)
3856 3856 if dopatch:
3857 3857 try:
3858 3858 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3859 3859 except error.PatchError as err:
3860 3860 raise error.Abort(str(err))
3861 3861 del fp
3862 3862 else:
3863 3863 for f in actions['revert'][0]:
3864 3864 checkout(f)
3865 3865 if normal:
3866 3866 normal(f)
3867 3867
3868 3868 for f in actions['add'][0]:
3869 3869 # Don't checkout modified files, they are already created by the diff
3870 3870 if f not in newlyaddedandmodifiedfiles:
3871 3871 checkout(f)
3872 3872 repo.dirstate.add(f)
3873 3873
3874 3874 normal = repo.dirstate.normallookup
3875 3875 if node == parent and p2 == nullid:
3876 3876 normal = repo.dirstate.normal
3877 3877 for f in actions['undelete'][0]:
3878 3878 checkout(f)
3879 3879 normal(f)
3880 3880
3881 3881 copied = copies.pathcopies(repo[parent], ctx)
3882 3882
3883 3883 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3884 3884 if f in copied:
3885 3885 repo.dirstate.copy(copied[f], f)
3886 3886
3887 3887 class command(registrar.command):
3888 3888 """deprecated: used registrar.command instead"""
3889 3889 def _doregister(self, func, name, *args, **kwargs):
3890 3890 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3891 3891 return super(command, self)._doregister(func, name, *args, **kwargs)
3892 3892
3893 3893 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3894 3894 # commands.outgoing. "missing" is "missing" of the result of
3895 3895 # "findcommonoutgoing()"
3896 3896 outgoinghooks = util.hooks()
3897 3897
3898 3898 # a list of (ui, repo) functions called by commands.summary
3899 3899 summaryhooks = util.hooks()
3900 3900
3901 3901 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3902 3902 #
3903 3903 # functions should return tuple of booleans below, if 'changes' is None:
3904 3904 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3905 3905 #
3906 3906 # otherwise, 'changes' is a tuple of tuples below:
3907 3907 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3908 3908 # - (desturl, destbranch, destpeer, outgoing)
3909 3909 summaryremotehooks = util.hooks()
3910 3910
3911 3911 # A list of state files kept by multistep operations like graft.
3912 3912 # Since graft cannot be aborted, it is considered 'clearable' by update.
3913 3913 # note: bisect is intentionally excluded
3914 3914 # (state file, clearable, allowcommit, error, hint)
3915 3915 unfinishedstates = [
3916 3916 ('graftstate', True, False, _('graft in progress'),
3917 3917 _("use 'hg graft --continue' or 'hg update' to abort")),
3918 3918 ('updatestate', True, False, _('last update was interrupted'),
3919 3919 _("use 'hg update' to get a consistent checkout"))
3920 3920 ]
3921 3921
3922 3922 def checkunfinished(repo, commit=False):
3923 3923 '''Look for an unfinished multistep operation, like graft, and abort
3924 3924 if found. It's probably good to check this right before
3925 3925 bailifchanged().
3926 3926 '''
3927 3927 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3928 3928 if commit and allowcommit:
3929 3929 continue
3930 3930 if repo.vfs.exists(f):
3931 3931 raise error.Abort(msg, hint=hint)
3932 3932
3933 3933 def clearunfinished(repo):
3934 3934 '''Check for unfinished operations (as above), and clear the ones
3935 3935 that are clearable.
3936 3936 '''
3937 3937 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3938 3938 if not clearable and repo.vfs.exists(f):
3939 3939 raise error.Abort(msg, hint=hint)
3940 3940 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3941 3941 if clearable and repo.vfs.exists(f):
3942 3942 util.unlink(repo.vfs.join(f))
3943 3943
3944 3944 afterresolvedstates = [
3945 3945 ('graftstate',
3946 3946 _('hg graft --continue')),
3947 3947 ]
3948 3948
3949 3949 def howtocontinue(repo):
3950 3950 '''Check for an unfinished operation and return the command to finish
3951 3951 it.
3952 3952
3953 3953 afterresolvedstates tuples define a .hg/{file} and the corresponding
3954 3954 command needed to finish it.
3955 3955
3956 3956 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3957 3957 a boolean.
3958 3958 '''
3959 3959 contmsg = _("continue: %s")
3960 3960 for f, msg in afterresolvedstates:
3961 3961 if repo.vfs.exists(f):
3962 3962 return contmsg % msg, True
3963 3963 if repo[None].dirty(missing=True, merge=False, branch=False):
3964 3964 return contmsg % _("hg commit"), False
3965 3965 return None, None
3966 3966
3967 3967 def checkafterresolved(repo):
3968 3968 '''Inform the user about the next action after completing hg resolve
3969 3969
3970 3970 If there's a matching afterresolvedstates, howtocontinue will yield
3971 3971 repo.ui.warn as the reporter.
3972 3972
3973 3973 Otherwise, it will yield repo.ui.note.
3974 3974 '''
3975 3975 msg, warning = howtocontinue(repo)
3976 3976 if msg is not None:
3977 3977 if warning:
3978 3978 repo.ui.warn("%s\n" % msg)
3979 3979 else:
3980 3980 repo.ui.note("%s\n" % msg)
3981 3981
3982 3982 def wrongtooltocontinue(repo, task):
3983 3983 '''Raise an abort suggesting how to properly continue if there is an
3984 3984 active task.
3985 3985
3986 3986 Uses howtocontinue() to find the active task.
3987 3987
3988 3988 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3989 3989 a hint.
3990 3990 '''
3991 3991 after = howtocontinue(repo)
3992 3992 hint = None
3993 3993 if after[1]:
3994 3994 hint = after[0]
3995 3995 raise error.Abort(_('no %s in progress') % task, hint=hint)
General Comments 0
You need to be logged in to leave comments. Login now