##// END OF EJS Templates
templater: register keywords to defaults table...
Yuya Nishihara -
r35499:817a3d20 default
parent child Browse files
Show More
@@ -1,3967 +1,3966
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import itertools
12 12 import os
13 13 import re
14 14 import tempfile
15 15
16 16 from .i18n import _
17 17 from .node import (
18 18 hex,
19 19 nullid,
20 20 nullrev,
21 21 short,
22 22 )
23 23
24 24 from . import (
25 25 bookmarks,
26 26 changelog,
27 27 copies,
28 28 crecord as crecordmod,
29 29 dagop,
30 30 dirstateguard,
31 31 encoding,
32 32 error,
33 33 formatter,
34 34 graphmod,
35 35 match as matchmod,
36 36 mdiff,
37 37 obsolete,
38 38 patch,
39 39 pathutil,
40 40 pycompat,
41 41 registrar,
42 42 revlog,
43 43 revset,
44 44 scmutil,
45 45 smartset,
46 46 templatekw,
47 47 templater,
48 48 util,
49 49 vfs as vfsmod,
50 50 )
51 51 stringio = util.stringio
52 52
53 53 # templates of common command options
54 54
55 55 dryrunopts = [
56 56 ('n', 'dry-run', None,
57 57 _('do not perform actions, just print output')),
58 58 ]
59 59
60 60 remoteopts = [
61 61 ('e', 'ssh', '',
62 62 _('specify ssh command to use'), _('CMD')),
63 63 ('', 'remotecmd', '',
64 64 _('specify hg command to run on the remote side'), _('CMD')),
65 65 ('', 'insecure', None,
66 66 _('do not verify server certificate (ignoring web.cacerts config)')),
67 67 ]
68 68
69 69 walkopts = [
70 70 ('I', 'include', [],
71 71 _('include names matching the given patterns'), _('PATTERN')),
72 72 ('X', 'exclude', [],
73 73 _('exclude names matching the given patterns'), _('PATTERN')),
74 74 ]
75 75
76 76 commitopts = [
77 77 ('m', 'message', '',
78 78 _('use text as commit message'), _('TEXT')),
79 79 ('l', 'logfile', '',
80 80 _('read commit message from file'), _('FILE')),
81 81 ]
82 82
83 83 commitopts2 = [
84 84 ('d', 'date', '',
85 85 _('record the specified date as commit date'), _('DATE')),
86 86 ('u', 'user', '',
87 87 _('record the specified user as committer'), _('USER')),
88 88 ]
89 89
90 90 # hidden for now
91 91 formatteropts = [
92 92 ('T', 'template', '',
93 93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
94 94 ]
95 95
96 96 templateopts = [
97 97 ('', 'style', '',
98 98 _('display using template map file (DEPRECATED)'), _('STYLE')),
99 99 ('T', 'template', '',
100 100 _('display with template'), _('TEMPLATE')),
101 101 ]
102 102
103 103 logopts = [
104 104 ('p', 'patch', None, _('show patch')),
105 105 ('g', 'git', None, _('use git extended diff format')),
106 106 ('l', 'limit', '',
107 107 _('limit number of changes displayed'), _('NUM')),
108 108 ('M', 'no-merges', None, _('do not show merges')),
109 109 ('', 'stat', None, _('output diffstat-style summary of changes')),
110 110 ('G', 'graph', None, _("show the revision DAG")),
111 111 ] + templateopts
112 112
113 113 diffopts = [
114 114 ('a', 'text', None, _('treat all files as text')),
115 115 ('g', 'git', None, _('use git extended diff format')),
116 116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
117 117 ('', 'nodates', None, _('omit dates from diff headers'))
118 118 ]
119 119
120 120 diffwsopts = [
121 121 ('w', 'ignore-all-space', None,
122 122 _('ignore white space when comparing lines')),
123 123 ('b', 'ignore-space-change', None,
124 124 _('ignore changes in the amount of white space')),
125 125 ('B', 'ignore-blank-lines', None,
126 126 _('ignore changes whose lines are all blank')),
127 127 ('Z', 'ignore-space-at-eol', None,
128 128 _('ignore changes in whitespace at EOL')),
129 129 ]
130 130
131 131 diffopts2 = [
132 132 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
133 133 ('p', 'show-function', None, _('show which function each change is in')),
134 134 ('', 'reverse', None, _('produce a diff that undoes the changes')),
135 135 ] + diffwsopts + [
136 136 ('U', 'unified', '',
137 137 _('number of lines of context to show'), _('NUM')),
138 138 ('', 'stat', None, _('output diffstat-style summary of changes')),
139 139 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
140 140 ]
141 141
142 142 mergetoolopts = [
143 143 ('t', 'tool', '', _('specify merge tool')),
144 144 ]
145 145
146 146 similarityopts = [
147 147 ('s', 'similarity', '',
148 148 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
149 149 ]
150 150
151 151 subrepoopts = [
152 152 ('S', 'subrepos', None,
153 153 _('recurse into subrepositories'))
154 154 ]
155 155
156 156 debugrevlogopts = [
157 157 ('c', 'changelog', False, _('open changelog')),
158 158 ('m', 'manifest', False, _('open manifest')),
159 159 ('', 'dir', '', _('open directory manifest')),
160 160 ]
161 161
162 162 # special string such that everything below this line will be ingored in the
163 163 # editor text
164 164 _linebelow = "^HG: ------------------------ >8 ------------------------$"
165 165
166 166 def ishunk(x):
167 167 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
168 168 return isinstance(x, hunkclasses)
169 169
170 170 def newandmodified(chunks, originalchunks):
171 171 newlyaddedandmodifiedfiles = set()
172 172 for chunk in chunks:
173 173 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
174 174 originalchunks:
175 175 newlyaddedandmodifiedfiles.add(chunk.header.filename())
176 176 return newlyaddedandmodifiedfiles
177 177
178 178 def parsealiases(cmd):
179 179 return cmd.lstrip("^").split("|")
180 180
181 181 def setupwrapcolorwrite(ui):
182 182 # wrap ui.write so diff output can be labeled/colorized
183 183 def wrapwrite(orig, *args, **kw):
184 184 label = kw.pop(r'label', '')
185 185 for chunk, l in patch.difflabel(lambda: args):
186 186 orig(chunk, label=label + l)
187 187
188 188 oldwrite = ui.write
189 189 def wrap(*args, **kwargs):
190 190 return wrapwrite(oldwrite, *args, **kwargs)
191 191 setattr(ui, 'write', wrap)
192 192 return oldwrite
193 193
194 194 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
195 195 if usecurses:
196 196 if testfile:
197 197 recordfn = crecordmod.testdecorator(testfile,
198 198 crecordmod.testchunkselector)
199 199 else:
200 200 recordfn = crecordmod.chunkselector
201 201
202 202 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
203 203
204 204 else:
205 205 return patch.filterpatch(ui, originalhunks, operation)
206 206
207 207 def recordfilter(ui, originalhunks, operation=None):
208 208 """ Prompts the user to filter the originalhunks and return a list of
209 209 selected hunks.
210 210 *operation* is used for to build ui messages to indicate the user what
211 211 kind of filtering they are doing: reverting, committing, shelving, etc.
212 212 (see patch.filterpatch).
213 213 """
214 214 usecurses = crecordmod.checkcurses(ui)
215 215 testfile = ui.config('experimental', 'crecordtest')
216 216 oldwrite = setupwrapcolorwrite(ui)
217 217 try:
218 218 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
219 219 testfile, operation)
220 220 finally:
221 221 ui.write = oldwrite
222 222 return newchunks, newopts
223 223
224 224 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
225 225 filterfn, *pats, **opts):
226 226 from . import merge as mergemod
227 227 opts = pycompat.byteskwargs(opts)
228 228 if not ui.interactive():
229 229 if cmdsuggest:
230 230 msg = _('running non-interactively, use %s instead') % cmdsuggest
231 231 else:
232 232 msg = _('running non-interactively')
233 233 raise error.Abort(msg)
234 234
235 235 # make sure username is set before going interactive
236 236 if not opts.get('user'):
237 237 ui.username() # raise exception, username not provided
238 238
239 239 def recordfunc(ui, repo, message, match, opts):
240 240 """This is generic record driver.
241 241
242 242 Its job is to interactively filter local changes, and
243 243 accordingly prepare working directory into a state in which the
244 244 job can be delegated to a non-interactive commit command such as
245 245 'commit' or 'qrefresh'.
246 246
247 247 After the actual job is done by non-interactive command, the
248 248 working directory is restored to its original state.
249 249
250 250 In the end we'll record interesting changes, and everything else
251 251 will be left in place, so the user can continue working.
252 252 """
253 253
254 254 checkunfinished(repo, commit=True)
255 255 wctx = repo[None]
256 256 merge = len(wctx.parents()) > 1
257 257 if merge:
258 258 raise error.Abort(_('cannot partially commit a merge '
259 259 '(use "hg commit" instead)'))
260 260
261 261 def fail(f, msg):
262 262 raise error.Abort('%s: %s' % (f, msg))
263 263
264 264 force = opts.get('force')
265 265 if not force:
266 266 vdirs = []
267 267 match.explicitdir = vdirs.append
268 268 match.bad = fail
269 269
270 270 status = repo.status(match=match)
271 271 if not force:
272 272 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
273 273 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
274 274 diffopts.nodates = True
275 275 diffopts.git = True
276 276 diffopts.showfunc = True
277 277 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
278 278 originalchunks = patch.parsepatch(originaldiff)
279 279
280 280 # 1. filter patch, since we are intending to apply subset of it
281 281 try:
282 282 chunks, newopts = filterfn(ui, originalchunks)
283 283 except error.PatchError as err:
284 284 raise error.Abort(_('error parsing patch: %s') % err)
285 285 opts.update(newopts)
286 286
287 287 # We need to keep a backup of files that have been newly added and
288 288 # modified during the recording process because there is a previous
289 289 # version without the edit in the workdir
290 290 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
291 291 contenders = set()
292 292 for h in chunks:
293 293 try:
294 294 contenders.update(set(h.files()))
295 295 except AttributeError:
296 296 pass
297 297
298 298 changed = status.modified + status.added + status.removed
299 299 newfiles = [f for f in changed if f in contenders]
300 300 if not newfiles:
301 301 ui.status(_('no changes to record\n'))
302 302 return 0
303 303
304 304 modified = set(status.modified)
305 305
306 306 # 2. backup changed files, so we can restore them in the end
307 307
308 308 if backupall:
309 309 tobackup = changed
310 310 else:
311 311 tobackup = [f for f in newfiles if f in modified or f in \
312 312 newlyaddedandmodifiedfiles]
313 313 backups = {}
314 314 if tobackup:
315 315 backupdir = repo.vfs.join('record-backups')
316 316 try:
317 317 os.mkdir(backupdir)
318 318 except OSError as err:
319 319 if err.errno != errno.EEXIST:
320 320 raise
321 321 try:
322 322 # backup continues
323 323 for f in tobackup:
324 324 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
325 325 dir=backupdir)
326 326 os.close(fd)
327 327 ui.debug('backup %r as %r\n' % (f, tmpname))
328 328 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
329 329 backups[f] = tmpname
330 330
331 331 fp = stringio()
332 332 for c in chunks:
333 333 fname = c.filename()
334 334 if fname in backups:
335 335 c.write(fp)
336 336 dopatch = fp.tell()
337 337 fp.seek(0)
338 338
339 339 # 2.5 optionally review / modify patch in text editor
340 340 if opts.get('review', False):
341 341 patchtext = (crecordmod.diffhelptext
342 342 + crecordmod.patchhelptext
343 343 + fp.read())
344 344 reviewedpatch = ui.edit(patchtext, "",
345 345 action="diff",
346 346 repopath=repo.path)
347 347 fp.truncate(0)
348 348 fp.write(reviewedpatch)
349 349 fp.seek(0)
350 350
351 351 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
352 352 # 3a. apply filtered patch to clean repo (clean)
353 353 if backups:
354 354 # Equivalent to hg.revert
355 355 m = scmutil.matchfiles(repo, backups.keys())
356 356 mergemod.update(repo, repo.dirstate.p1(),
357 357 False, True, matcher=m)
358 358
359 359 # 3b. (apply)
360 360 if dopatch:
361 361 try:
362 362 ui.debug('applying patch\n')
363 363 ui.debug(fp.getvalue())
364 364 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
365 365 except error.PatchError as err:
366 366 raise error.Abort(str(err))
367 367 del fp
368 368
369 369 # 4. We prepared working directory according to filtered
370 370 # patch. Now is the time to delegate the job to
371 371 # commit/qrefresh or the like!
372 372
373 373 # Make all of the pathnames absolute.
374 374 newfiles = [repo.wjoin(nf) for nf in newfiles]
375 375 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
376 376 finally:
377 377 # 5. finally restore backed-up files
378 378 try:
379 379 dirstate = repo.dirstate
380 380 for realname, tmpname in backups.iteritems():
381 381 ui.debug('restoring %r to %r\n' % (tmpname, realname))
382 382
383 383 if dirstate[realname] == 'n':
384 384 # without normallookup, restoring timestamp
385 385 # may cause partially committed files
386 386 # to be treated as unmodified
387 387 dirstate.normallookup(realname)
388 388
389 389 # copystat=True here and above are a hack to trick any
390 390 # editors that have f open that we haven't modified them.
391 391 #
392 392 # Also note that this racy as an editor could notice the
393 393 # file's mtime before we've finished writing it.
394 394 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
395 395 os.unlink(tmpname)
396 396 if tobackup:
397 397 os.rmdir(backupdir)
398 398 except OSError:
399 399 pass
400 400
401 401 def recordinwlock(ui, repo, message, match, opts):
402 402 with repo.wlock():
403 403 return recordfunc(ui, repo, message, match, opts)
404 404
405 405 return commit(ui, repo, recordinwlock, pats, opts)
406 406
407 407 class dirnode(object):
408 408 """
409 409 Represent a directory in user working copy with information required for
410 410 the purpose of tersing its status.
411 411
412 412 path is the path to the directory
413 413
414 414 statuses is a set of statuses of all files in this directory (this includes
415 415 all the files in all the subdirectories too)
416 416
417 417 files is a list of files which are direct child of this directory
418 418
419 419 subdirs is a dictionary of sub-directory name as the key and it's own
420 420 dirnode object as the value
421 421 """
422 422
423 423 def __init__(self, dirpath):
424 424 self.path = dirpath
425 425 self.statuses = set([])
426 426 self.files = []
427 427 self.subdirs = {}
428 428
429 429 def _addfileindir(self, filename, status):
430 430 """Add a file in this directory as a direct child."""
431 431 self.files.append((filename, status))
432 432
433 433 def addfile(self, filename, status):
434 434 """
435 435 Add a file to this directory or to its direct parent directory.
436 436
437 437 If the file is not direct child of this directory, we traverse to the
438 438 directory of which this file is a direct child of and add the file
439 439 there.
440 440 """
441 441
442 442 # the filename contains a path separator, it means it's not the direct
443 443 # child of this directory
444 444 if '/' in filename:
445 445 subdir, filep = filename.split('/', 1)
446 446
447 447 # does the dirnode object for subdir exists
448 448 if subdir not in self.subdirs:
449 449 subdirpath = os.path.join(self.path, subdir)
450 450 self.subdirs[subdir] = dirnode(subdirpath)
451 451
452 452 # try adding the file in subdir
453 453 self.subdirs[subdir].addfile(filep, status)
454 454
455 455 else:
456 456 self._addfileindir(filename, status)
457 457
458 458 if status not in self.statuses:
459 459 self.statuses.add(status)
460 460
461 461 def iterfilepaths(self):
462 462 """Yield (status, path) for files directly under this directory."""
463 463 for f, st in self.files:
464 464 yield st, os.path.join(self.path, f)
465 465
466 466 def tersewalk(self, terseargs):
467 467 """
468 468 Yield (status, path) obtained by processing the status of this
469 469 dirnode.
470 470
471 471 terseargs is the string of arguments passed by the user with `--terse`
472 472 flag.
473 473
474 474 Following are the cases which can happen:
475 475
476 476 1) All the files in the directory (including all the files in its
477 477 subdirectories) share the same status and the user has asked us to terse
478 478 that status. -> yield (status, dirpath)
479 479
480 480 2) Otherwise, we do following:
481 481
482 482 a) Yield (status, filepath) for all the files which are in this
483 483 directory (only the ones in this directory, not the subdirs)
484 484
485 485 b) Recurse the function on all the subdirectories of this
486 486 directory
487 487 """
488 488
489 489 if len(self.statuses) == 1:
490 490 onlyst = self.statuses.pop()
491 491
492 492 # Making sure we terse only when the status abbreviation is
493 493 # passed as terse argument
494 494 if onlyst in terseargs:
495 495 yield onlyst, self.path + pycompat.ossep
496 496 return
497 497
498 498 # add the files to status list
499 499 for st, fpath in self.iterfilepaths():
500 500 yield st, fpath
501 501
502 502 #recurse on the subdirs
503 503 for dirobj in self.subdirs.values():
504 504 for st, fpath in dirobj.tersewalk(terseargs):
505 505 yield st, fpath
506 506
507 507 def tersedir(statuslist, terseargs):
508 508 """
509 509 Terse the status if all the files in a directory shares the same status.
510 510
511 511 statuslist is scmutil.status() object which contains a list of files for
512 512 each status.
513 513 terseargs is string which is passed by the user as the argument to `--terse`
514 514 flag.
515 515
516 516 The function makes a tree of objects of dirnode class, and at each node it
517 517 stores the information required to know whether we can terse a certain
518 518 directory or not.
519 519 """
520 520 # the order matters here as that is used to produce final list
521 521 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
522 522
523 523 # checking the argument validity
524 524 for s in pycompat.bytestr(terseargs):
525 525 if s not in allst:
526 526 raise error.Abort(_("'%s' not recognized") % s)
527 527
528 528 # creating a dirnode object for the root of the repo
529 529 rootobj = dirnode('')
530 530 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
531 531 'ignored', 'removed')
532 532
533 533 tersedict = {}
534 534 for attrname in pstatus:
535 535 statuschar = attrname[0:1]
536 536 for f in getattr(statuslist, attrname):
537 537 rootobj.addfile(f, statuschar)
538 538 tersedict[statuschar] = []
539 539
540 540 # we won't be tersing the root dir, so add files in it
541 541 for st, fpath in rootobj.iterfilepaths():
542 542 tersedict[st].append(fpath)
543 543
544 544 # process each sub-directory and build tersedict
545 545 for subdir in rootobj.subdirs.values():
546 546 for st, f in subdir.tersewalk(terseargs):
547 547 tersedict[st].append(f)
548 548
549 549 tersedlist = []
550 550 for st in allst:
551 551 tersedict[st].sort()
552 552 tersedlist.append(tersedict[st])
553 553
554 554 return tersedlist
555 555
556 556 def _commentlines(raw):
557 557 '''Surround lineswith a comment char and a new line'''
558 558 lines = raw.splitlines()
559 559 commentedlines = ['# %s' % line for line in lines]
560 560 return '\n'.join(commentedlines) + '\n'
561 561
562 562 def _conflictsmsg(repo):
563 563 # avoid merge cycle
564 564 from . import merge as mergemod
565 565 mergestate = mergemod.mergestate.read(repo)
566 566 if not mergestate.active():
567 567 return
568 568
569 569 m = scmutil.match(repo[None])
570 570 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
571 571 if unresolvedlist:
572 572 mergeliststr = '\n'.join(
573 573 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
574 574 for path in unresolvedlist])
575 575 msg = _('''Unresolved merge conflicts:
576 576
577 577 %s
578 578
579 579 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
580 580 else:
581 581 msg = _('No unresolved merge conflicts.')
582 582
583 583 return _commentlines(msg)
584 584
585 585 def _helpmessage(continuecmd, abortcmd):
586 586 msg = _('To continue: %s\n'
587 587 'To abort: %s') % (continuecmd, abortcmd)
588 588 return _commentlines(msg)
589 589
590 590 def _rebasemsg():
591 591 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
592 592
593 593 def _histeditmsg():
594 594 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
595 595
596 596 def _unshelvemsg():
597 597 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
598 598
599 599 def _updatecleanmsg(dest=None):
600 600 warning = _('warning: this will discard uncommitted changes')
601 601 return 'hg update --clean %s (%s)' % (dest or '.', warning)
602 602
603 603 def _graftmsg():
604 604 # tweakdefaults requires `update` to have a rev hence the `.`
605 605 return _helpmessage('hg graft --continue', _updatecleanmsg())
606 606
607 607 def _mergemsg():
608 608 # tweakdefaults requires `update` to have a rev hence the `.`
609 609 return _helpmessage('hg commit', _updatecleanmsg())
610 610
611 611 def _bisectmsg():
612 612 msg = _('To mark the changeset good: hg bisect --good\n'
613 613 'To mark the changeset bad: hg bisect --bad\n'
614 614 'To abort: hg bisect --reset\n')
615 615 return _commentlines(msg)
616 616
617 617 def fileexistspredicate(filename):
618 618 return lambda repo: repo.vfs.exists(filename)
619 619
620 620 def _mergepredicate(repo):
621 621 return len(repo[None].parents()) > 1
622 622
623 623 STATES = (
624 624 # (state, predicate to detect states, helpful message function)
625 625 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
626 626 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
627 627 ('graft', fileexistspredicate('graftstate'), _graftmsg),
628 628 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
629 629 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
630 630 # The merge state is part of a list that will be iterated over.
631 631 # They need to be last because some of the other unfinished states may also
632 632 # be in a merge or update state (eg. rebase, histedit, graft, etc).
633 633 # We want those to have priority.
634 634 ('merge', _mergepredicate, _mergemsg),
635 635 )
636 636
637 637 def _getrepostate(repo):
638 638 # experimental config: commands.status.skipstates
639 639 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
640 640 for state, statedetectionpredicate, msgfn in STATES:
641 641 if state in skip:
642 642 continue
643 643 if statedetectionpredicate(repo):
644 644 return (state, statedetectionpredicate, msgfn)
645 645
646 646 def morestatus(repo, fm):
647 647 statetuple = _getrepostate(repo)
648 648 label = 'status.morestatus'
649 649 if statetuple:
650 650 fm.startitem()
651 651 state, statedetectionpredicate, helpfulmsg = statetuple
652 652 statemsg = _('The repository is in an unfinished *%s* state.') % state
653 653 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
654 654 conmsg = _conflictsmsg(repo)
655 655 if conmsg:
656 656 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
657 657 if helpfulmsg:
658 658 helpmsg = helpfulmsg()
659 659 fm.write('helpmsg', '%s\n', helpmsg, label=label)
660 660
661 661 def findpossible(cmd, table, strict=False):
662 662 """
663 663 Return cmd -> (aliases, command table entry)
664 664 for each matching command.
665 665 Return debug commands (or their aliases) only if no normal command matches.
666 666 """
667 667 choice = {}
668 668 debugchoice = {}
669 669
670 670 if cmd in table:
671 671 # short-circuit exact matches, "log" alias beats "^log|history"
672 672 keys = [cmd]
673 673 else:
674 674 keys = table.keys()
675 675
676 676 allcmds = []
677 677 for e in keys:
678 678 aliases = parsealiases(e)
679 679 allcmds.extend(aliases)
680 680 found = None
681 681 if cmd in aliases:
682 682 found = cmd
683 683 elif not strict:
684 684 for a in aliases:
685 685 if a.startswith(cmd):
686 686 found = a
687 687 break
688 688 if found is not None:
689 689 if aliases[0].startswith("debug") or found.startswith("debug"):
690 690 debugchoice[found] = (aliases, table[e])
691 691 else:
692 692 choice[found] = (aliases, table[e])
693 693
694 694 if not choice and debugchoice:
695 695 choice = debugchoice
696 696
697 697 return choice, allcmds
698 698
699 699 def findcmd(cmd, table, strict=True):
700 700 """Return (aliases, command table entry) for command string."""
701 701 choice, allcmds = findpossible(cmd, table, strict)
702 702
703 703 if cmd in choice:
704 704 return choice[cmd]
705 705
706 706 if len(choice) > 1:
707 707 clist = sorted(choice)
708 708 raise error.AmbiguousCommand(cmd, clist)
709 709
710 710 if choice:
711 711 return list(choice.values())[0]
712 712
713 713 raise error.UnknownCommand(cmd, allcmds)
714 714
715 715 def findrepo(p):
716 716 while not os.path.isdir(os.path.join(p, ".hg")):
717 717 oldp, p = p, os.path.dirname(p)
718 718 if p == oldp:
719 719 return None
720 720
721 721 return p
722 722
723 723 def bailifchanged(repo, merge=True, hint=None):
724 724 """ enforce the precondition that working directory must be clean.
725 725
726 726 'merge' can be set to false if a pending uncommitted merge should be
727 727 ignored (such as when 'update --check' runs).
728 728
729 729 'hint' is the usual hint given to Abort exception.
730 730 """
731 731
732 732 if merge and repo.dirstate.p2() != nullid:
733 733 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
734 734 modified, added, removed, deleted = repo.status()[:4]
735 735 if modified or added or removed or deleted:
736 736 raise error.Abort(_('uncommitted changes'), hint=hint)
737 737 ctx = repo[None]
738 738 for s in sorted(ctx.substate):
739 739 ctx.sub(s).bailifchanged(hint=hint)
740 740
741 741 def logmessage(ui, opts):
742 742 """ get the log message according to -m and -l option """
743 743 message = opts.get('message')
744 744 logfile = opts.get('logfile')
745 745
746 746 if message and logfile:
747 747 raise error.Abort(_('options --message and --logfile are mutually '
748 748 'exclusive'))
749 749 if not message and logfile:
750 750 try:
751 751 if isstdiofilename(logfile):
752 752 message = ui.fin.read()
753 753 else:
754 754 message = '\n'.join(util.readfile(logfile).splitlines())
755 755 except IOError as inst:
756 756 raise error.Abort(_("can't read commit message '%s': %s") %
757 757 (logfile, encoding.strtolocal(inst.strerror)))
758 758 return message
759 759
760 760 def mergeeditform(ctxorbool, baseformname):
761 761 """return appropriate editform name (referencing a committemplate)
762 762
763 763 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
764 764 merging is committed.
765 765
766 766 This returns baseformname with '.merge' appended if it is a merge,
767 767 otherwise '.normal' is appended.
768 768 """
769 769 if isinstance(ctxorbool, bool):
770 770 if ctxorbool:
771 771 return baseformname + ".merge"
772 772 elif 1 < len(ctxorbool.parents()):
773 773 return baseformname + ".merge"
774 774
775 775 return baseformname + ".normal"
776 776
777 777 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
778 778 editform='', **opts):
779 779 """get appropriate commit message editor according to '--edit' option
780 780
781 781 'finishdesc' is a function to be called with edited commit message
782 782 (= 'description' of the new changeset) just after editing, but
783 783 before checking empty-ness. It should return actual text to be
784 784 stored into history. This allows to change description before
785 785 storing.
786 786
787 787 'extramsg' is a extra message to be shown in the editor instead of
788 788 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
789 789 is automatically added.
790 790
791 791 'editform' is a dot-separated list of names, to distinguish
792 792 the purpose of commit text editing.
793 793
794 794 'getcommiteditor' returns 'commitforceeditor' regardless of
795 795 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
796 796 they are specific for usage in MQ.
797 797 """
798 798 if edit or finishdesc or extramsg:
799 799 return lambda r, c, s: commitforceeditor(r, c, s,
800 800 finishdesc=finishdesc,
801 801 extramsg=extramsg,
802 802 editform=editform)
803 803 elif editform:
804 804 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
805 805 else:
806 806 return commiteditor
807 807
808 808 def loglimit(opts):
809 809 """get the log limit according to option -l/--limit"""
810 810 limit = opts.get('limit')
811 811 if limit:
812 812 try:
813 813 limit = int(limit)
814 814 except ValueError:
815 815 raise error.Abort(_('limit must be a positive integer'))
816 816 if limit <= 0:
817 817 raise error.Abort(_('limit must be positive'))
818 818 else:
819 819 limit = None
820 820 return limit
821 821
822 822 def makefilename(repo, pat, node, desc=None,
823 823 total=None, seqno=None, revwidth=None, pathname=None):
824 824 node_expander = {
825 825 'H': lambda: hex(node),
826 826 'R': lambda: '%d' % repo.changelog.rev(node),
827 827 'h': lambda: short(node),
828 828 'm': lambda: re.sub('[^\w]', '_', desc or '')
829 829 }
830 830 expander = {
831 831 '%': lambda: '%',
832 832 'b': lambda: os.path.basename(repo.root),
833 833 }
834 834
835 835 try:
836 836 if node:
837 837 expander.update(node_expander)
838 838 if node:
839 839 expander['r'] = (lambda:
840 840 ('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
841 841 if total is not None:
842 842 expander['N'] = lambda: '%d' % total
843 843 if seqno is not None:
844 844 expander['n'] = lambda: '%d' % seqno
845 845 if total is not None and seqno is not None:
846 846 expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
847 847 if pathname is not None:
848 848 expander['s'] = lambda: os.path.basename(pathname)
849 849 expander['d'] = lambda: os.path.dirname(pathname) or '.'
850 850 expander['p'] = lambda: pathname
851 851
852 852 newname = []
853 853 patlen = len(pat)
854 854 i = 0
855 855 while i < patlen:
856 856 c = pat[i:i + 1]
857 857 if c == '%':
858 858 i += 1
859 859 c = pat[i:i + 1]
860 860 c = expander[c]()
861 861 newname.append(c)
862 862 i += 1
863 863 return ''.join(newname)
864 864 except KeyError as inst:
865 865 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
866 866 inst.args[0])
867 867
868 868 def isstdiofilename(pat):
869 869 """True if the given pat looks like a filename denoting stdin/stdout"""
870 870 return not pat or pat == '-'
871 871
872 872 class _unclosablefile(object):
873 873 def __init__(self, fp):
874 874 self._fp = fp
875 875
876 876 def close(self):
877 877 pass
878 878
879 879 def __iter__(self):
880 880 return iter(self._fp)
881 881
882 882 def __getattr__(self, attr):
883 883 return getattr(self._fp, attr)
884 884
885 885 def __enter__(self):
886 886 return self
887 887
888 888 def __exit__(self, exc_type, exc_value, exc_tb):
889 889 pass
890 890
891 891 def makefileobj(repo, pat, node=None, desc=None, total=None,
892 892 seqno=None, revwidth=None, mode='wb', modemap=None,
893 893 pathname=None):
894 894
895 895 writable = mode not in ('r', 'rb')
896 896
897 897 if isstdiofilename(pat):
898 898 if writable:
899 899 fp = repo.ui.fout
900 900 else:
901 901 fp = repo.ui.fin
902 902 return _unclosablefile(fp)
903 903 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
904 904 if modemap is not None:
905 905 mode = modemap.get(fn, mode)
906 906 if mode == 'wb':
907 907 modemap[fn] = 'ab'
908 908 return open(fn, mode)
909 909
910 910 def openrevlog(repo, cmd, file_, opts):
911 911 """opens the changelog, manifest, a filelog or a given revlog"""
912 912 cl = opts['changelog']
913 913 mf = opts['manifest']
914 914 dir = opts['dir']
915 915 msg = None
916 916 if cl and mf:
917 917 msg = _('cannot specify --changelog and --manifest at the same time')
918 918 elif cl and dir:
919 919 msg = _('cannot specify --changelog and --dir at the same time')
920 920 elif cl or mf or dir:
921 921 if file_:
922 922 msg = _('cannot specify filename with --changelog or --manifest')
923 923 elif not repo:
924 924 msg = _('cannot specify --changelog or --manifest or --dir '
925 925 'without a repository')
926 926 if msg:
927 927 raise error.Abort(msg)
928 928
929 929 r = None
930 930 if repo:
931 931 if cl:
932 932 r = repo.unfiltered().changelog
933 933 elif dir:
934 934 if 'treemanifest' not in repo.requirements:
935 935 raise error.Abort(_("--dir can only be used on repos with "
936 936 "treemanifest enabled"))
937 937 dirlog = repo.manifestlog._revlog.dirlog(dir)
938 938 if len(dirlog):
939 939 r = dirlog
940 940 elif mf:
941 941 r = repo.manifestlog._revlog
942 942 elif file_:
943 943 filelog = repo.file(file_)
944 944 if len(filelog):
945 945 r = filelog
946 946 if not r:
947 947 if not file_:
948 948 raise error.CommandError(cmd, _('invalid arguments'))
949 949 if not os.path.isfile(file_):
950 950 raise error.Abort(_("revlog '%s' not found") % file_)
951 951 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
952 952 file_[:-2] + ".i")
953 953 return r
954 954
955 955 def copy(ui, repo, pats, opts, rename=False):
956 956 # called with the repo lock held
957 957 #
958 958 # hgsep => pathname that uses "/" to separate directories
959 959 # ossep => pathname that uses os.sep to separate directories
960 960 cwd = repo.getcwd()
961 961 targets = {}
962 962 after = opts.get("after")
963 963 dryrun = opts.get("dry_run")
964 964 wctx = repo[None]
965 965
966 966 def walkpat(pat):
967 967 srcs = []
968 968 if after:
969 969 badstates = '?'
970 970 else:
971 971 badstates = '?r'
972 972 m = scmutil.match(wctx, [pat], opts, globbed=True)
973 973 for abs in wctx.walk(m):
974 974 state = repo.dirstate[abs]
975 975 rel = m.rel(abs)
976 976 exact = m.exact(abs)
977 977 if state in badstates:
978 978 if exact and state == '?':
979 979 ui.warn(_('%s: not copying - file is not managed\n') % rel)
980 980 if exact and state == 'r':
981 981 ui.warn(_('%s: not copying - file has been marked for'
982 982 ' remove\n') % rel)
983 983 continue
984 984 # abs: hgsep
985 985 # rel: ossep
986 986 srcs.append((abs, rel, exact))
987 987 return srcs
988 988
989 989 # abssrc: hgsep
990 990 # relsrc: ossep
991 991 # otarget: ossep
992 992 def copyfile(abssrc, relsrc, otarget, exact):
993 993 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
994 994 if '/' in abstarget:
995 995 # We cannot normalize abstarget itself, this would prevent
996 996 # case only renames, like a => A.
997 997 abspath, absname = abstarget.rsplit('/', 1)
998 998 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
999 999 reltarget = repo.pathto(abstarget, cwd)
1000 1000 target = repo.wjoin(abstarget)
1001 1001 src = repo.wjoin(abssrc)
1002 1002 state = repo.dirstate[abstarget]
1003 1003
1004 1004 scmutil.checkportable(ui, abstarget)
1005 1005
1006 1006 # check for collisions
1007 1007 prevsrc = targets.get(abstarget)
1008 1008 if prevsrc is not None:
1009 1009 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1010 1010 (reltarget, repo.pathto(abssrc, cwd),
1011 1011 repo.pathto(prevsrc, cwd)))
1012 1012 return
1013 1013
1014 1014 # check for overwrites
1015 1015 exists = os.path.lexists(target)
1016 1016 samefile = False
1017 1017 if exists and abssrc != abstarget:
1018 1018 if (repo.dirstate.normalize(abssrc) ==
1019 1019 repo.dirstate.normalize(abstarget)):
1020 1020 if not rename:
1021 1021 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1022 1022 return
1023 1023 exists = False
1024 1024 samefile = True
1025 1025
1026 1026 if not after and exists or after and state in 'mn':
1027 1027 if not opts['force']:
1028 1028 if state in 'mn':
1029 1029 msg = _('%s: not overwriting - file already committed\n')
1030 1030 if after:
1031 1031 flags = '--after --force'
1032 1032 else:
1033 1033 flags = '--force'
1034 1034 if rename:
1035 1035 hint = _('(hg rename %s to replace the file by '
1036 1036 'recording a rename)\n') % flags
1037 1037 else:
1038 1038 hint = _('(hg copy %s to replace the file by '
1039 1039 'recording a copy)\n') % flags
1040 1040 else:
1041 1041 msg = _('%s: not overwriting - file exists\n')
1042 1042 if rename:
1043 1043 hint = _('(hg rename --after to record the rename)\n')
1044 1044 else:
1045 1045 hint = _('(hg copy --after to record the copy)\n')
1046 1046 ui.warn(msg % reltarget)
1047 1047 ui.warn(hint)
1048 1048 return
1049 1049
1050 1050 if after:
1051 1051 if not exists:
1052 1052 if rename:
1053 1053 ui.warn(_('%s: not recording move - %s does not exist\n') %
1054 1054 (relsrc, reltarget))
1055 1055 else:
1056 1056 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1057 1057 (relsrc, reltarget))
1058 1058 return
1059 1059 elif not dryrun:
1060 1060 try:
1061 1061 if exists:
1062 1062 os.unlink(target)
1063 1063 targetdir = os.path.dirname(target) or '.'
1064 1064 if not os.path.isdir(targetdir):
1065 1065 os.makedirs(targetdir)
1066 1066 if samefile:
1067 1067 tmp = target + "~hgrename"
1068 1068 os.rename(src, tmp)
1069 1069 os.rename(tmp, target)
1070 1070 else:
1071 1071 util.copyfile(src, target)
1072 1072 srcexists = True
1073 1073 except IOError as inst:
1074 1074 if inst.errno == errno.ENOENT:
1075 1075 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1076 1076 srcexists = False
1077 1077 else:
1078 1078 ui.warn(_('%s: cannot copy - %s\n') %
1079 1079 (relsrc, encoding.strtolocal(inst.strerror)))
1080 1080 return True # report a failure
1081 1081
1082 1082 if ui.verbose or not exact:
1083 1083 if rename:
1084 1084 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1085 1085 else:
1086 1086 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1087 1087
1088 1088 targets[abstarget] = abssrc
1089 1089
1090 1090 # fix up dirstate
1091 1091 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1092 1092 dryrun=dryrun, cwd=cwd)
1093 1093 if rename and not dryrun:
1094 1094 if not after and srcexists and not samefile:
1095 1095 repo.wvfs.unlinkpath(abssrc)
1096 1096 wctx.forget([abssrc])
1097 1097
1098 1098 # pat: ossep
1099 1099 # dest ossep
1100 1100 # srcs: list of (hgsep, hgsep, ossep, bool)
1101 1101 # return: function that takes hgsep and returns ossep
1102 1102 def targetpathfn(pat, dest, srcs):
1103 1103 if os.path.isdir(pat):
1104 1104 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1105 1105 abspfx = util.localpath(abspfx)
1106 1106 if destdirexists:
1107 1107 striplen = len(os.path.split(abspfx)[0])
1108 1108 else:
1109 1109 striplen = len(abspfx)
1110 1110 if striplen:
1111 1111 striplen += len(pycompat.ossep)
1112 1112 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1113 1113 elif destdirexists:
1114 1114 res = lambda p: os.path.join(dest,
1115 1115 os.path.basename(util.localpath(p)))
1116 1116 else:
1117 1117 res = lambda p: dest
1118 1118 return res
1119 1119
1120 1120 # pat: ossep
1121 1121 # dest ossep
1122 1122 # srcs: list of (hgsep, hgsep, ossep, bool)
1123 1123 # return: function that takes hgsep and returns ossep
1124 1124 def targetpathafterfn(pat, dest, srcs):
1125 1125 if matchmod.patkind(pat):
1126 1126 # a mercurial pattern
1127 1127 res = lambda p: os.path.join(dest,
1128 1128 os.path.basename(util.localpath(p)))
1129 1129 else:
1130 1130 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1131 1131 if len(abspfx) < len(srcs[0][0]):
1132 1132 # A directory. Either the target path contains the last
1133 1133 # component of the source path or it does not.
1134 1134 def evalpath(striplen):
1135 1135 score = 0
1136 1136 for s in srcs:
1137 1137 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1138 1138 if os.path.lexists(t):
1139 1139 score += 1
1140 1140 return score
1141 1141
1142 1142 abspfx = util.localpath(abspfx)
1143 1143 striplen = len(abspfx)
1144 1144 if striplen:
1145 1145 striplen += len(pycompat.ossep)
1146 1146 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1147 1147 score = evalpath(striplen)
1148 1148 striplen1 = len(os.path.split(abspfx)[0])
1149 1149 if striplen1:
1150 1150 striplen1 += len(pycompat.ossep)
1151 1151 if evalpath(striplen1) > score:
1152 1152 striplen = striplen1
1153 1153 res = lambda p: os.path.join(dest,
1154 1154 util.localpath(p)[striplen:])
1155 1155 else:
1156 1156 # a file
1157 1157 if destdirexists:
1158 1158 res = lambda p: os.path.join(dest,
1159 1159 os.path.basename(util.localpath(p)))
1160 1160 else:
1161 1161 res = lambda p: dest
1162 1162 return res
1163 1163
1164 1164 pats = scmutil.expandpats(pats)
1165 1165 if not pats:
1166 1166 raise error.Abort(_('no source or destination specified'))
1167 1167 if len(pats) == 1:
1168 1168 raise error.Abort(_('no destination specified'))
1169 1169 dest = pats.pop()
1170 1170 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1171 1171 if not destdirexists:
1172 1172 if len(pats) > 1 or matchmod.patkind(pats[0]):
1173 1173 raise error.Abort(_('with multiple sources, destination must be an '
1174 1174 'existing directory'))
1175 1175 if util.endswithsep(dest):
1176 1176 raise error.Abort(_('destination %s is not a directory') % dest)
1177 1177
1178 1178 tfn = targetpathfn
1179 1179 if after:
1180 1180 tfn = targetpathafterfn
1181 1181 copylist = []
1182 1182 for pat in pats:
1183 1183 srcs = walkpat(pat)
1184 1184 if not srcs:
1185 1185 continue
1186 1186 copylist.append((tfn(pat, dest, srcs), srcs))
1187 1187 if not copylist:
1188 1188 raise error.Abort(_('no files to copy'))
1189 1189
1190 1190 errors = 0
1191 1191 for targetpath, srcs in copylist:
1192 1192 for abssrc, relsrc, exact in srcs:
1193 1193 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1194 1194 errors += 1
1195 1195
1196 1196 if errors:
1197 1197 ui.warn(_('(consider using --after)\n'))
1198 1198
1199 1199 return errors != 0
1200 1200
1201 1201 ## facility to let extension process additional data into an import patch
1202 1202 # list of identifier to be executed in order
1203 1203 extrapreimport = [] # run before commit
1204 1204 extrapostimport = [] # run after commit
1205 1205 # mapping from identifier to actual import function
1206 1206 #
1207 1207 # 'preimport' are run before the commit is made and are provided the following
1208 1208 # arguments:
1209 1209 # - repo: the localrepository instance,
1210 1210 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1211 1211 # - extra: the future extra dictionary of the changeset, please mutate it,
1212 1212 # - opts: the import options.
1213 1213 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1214 1214 # mutation of in memory commit and more. Feel free to rework the code to get
1215 1215 # there.
1216 1216 extrapreimportmap = {}
1217 1217 # 'postimport' are run after the commit is made and are provided the following
1218 1218 # argument:
1219 1219 # - ctx: the changectx created by import.
1220 1220 extrapostimportmap = {}
1221 1221
1222 1222 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1223 1223 """Utility function used by commands.import to import a single patch
1224 1224
1225 1225 This function is explicitly defined here to help the evolve extension to
1226 1226 wrap this part of the import logic.
1227 1227
1228 1228 The API is currently a bit ugly because it a simple code translation from
1229 1229 the import command. Feel free to make it better.
1230 1230
1231 1231 :hunk: a patch (as a binary string)
1232 1232 :parents: nodes that will be parent of the created commit
1233 1233 :opts: the full dict of option passed to the import command
1234 1234 :msgs: list to save commit message to.
1235 1235 (used in case we need to save it when failing)
1236 1236 :updatefunc: a function that update a repo to a given node
1237 1237 updatefunc(<repo>, <node>)
1238 1238 """
1239 1239 # avoid cycle context -> subrepo -> cmdutil
1240 1240 from . import context
1241 1241 extractdata = patch.extract(ui, hunk)
1242 1242 tmpname = extractdata.get('filename')
1243 1243 message = extractdata.get('message')
1244 1244 user = opts.get('user') or extractdata.get('user')
1245 1245 date = opts.get('date') or extractdata.get('date')
1246 1246 branch = extractdata.get('branch')
1247 1247 nodeid = extractdata.get('nodeid')
1248 1248 p1 = extractdata.get('p1')
1249 1249 p2 = extractdata.get('p2')
1250 1250
1251 1251 nocommit = opts.get('no_commit')
1252 1252 importbranch = opts.get('import_branch')
1253 1253 update = not opts.get('bypass')
1254 1254 strip = opts["strip"]
1255 1255 prefix = opts["prefix"]
1256 1256 sim = float(opts.get('similarity') or 0)
1257 1257 if not tmpname:
1258 1258 return (None, None, False)
1259 1259
1260 1260 rejects = False
1261 1261
1262 1262 try:
1263 1263 cmdline_message = logmessage(ui, opts)
1264 1264 if cmdline_message:
1265 1265 # pickup the cmdline msg
1266 1266 message = cmdline_message
1267 1267 elif message:
1268 1268 # pickup the patch msg
1269 1269 message = message.strip()
1270 1270 else:
1271 1271 # launch the editor
1272 1272 message = None
1273 1273 ui.debug('message:\n%s\n' % message)
1274 1274
1275 1275 if len(parents) == 1:
1276 1276 parents.append(repo[nullid])
1277 1277 if opts.get('exact'):
1278 1278 if not nodeid or not p1:
1279 1279 raise error.Abort(_('not a Mercurial patch'))
1280 1280 p1 = repo[p1]
1281 1281 p2 = repo[p2 or nullid]
1282 1282 elif p2:
1283 1283 try:
1284 1284 p1 = repo[p1]
1285 1285 p2 = repo[p2]
1286 1286 # Without any options, consider p2 only if the
1287 1287 # patch is being applied on top of the recorded
1288 1288 # first parent.
1289 1289 if p1 != parents[0]:
1290 1290 p1 = parents[0]
1291 1291 p2 = repo[nullid]
1292 1292 except error.RepoError:
1293 1293 p1, p2 = parents
1294 1294 if p2.node() == nullid:
1295 1295 ui.warn(_("warning: import the patch as a normal revision\n"
1296 1296 "(use --exact to import the patch as a merge)\n"))
1297 1297 else:
1298 1298 p1, p2 = parents
1299 1299
1300 1300 n = None
1301 1301 if update:
1302 1302 if p1 != parents[0]:
1303 1303 updatefunc(repo, p1.node())
1304 1304 if p2 != parents[1]:
1305 1305 repo.setparents(p1.node(), p2.node())
1306 1306
1307 1307 if opts.get('exact') or importbranch:
1308 1308 repo.dirstate.setbranch(branch or 'default')
1309 1309
1310 1310 partial = opts.get('partial', False)
1311 1311 files = set()
1312 1312 try:
1313 1313 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1314 1314 files=files, eolmode=None, similarity=sim / 100.0)
1315 1315 except error.PatchError as e:
1316 1316 if not partial:
1317 1317 raise error.Abort(str(e))
1318 1318 if partial:
1319 1319 rejects = True
1320 1320
1321 1321 files = list(files)
1322 1322 if nocommit:
1323 1323 if message:
1324 1324 msgs.append(message)
1325 1325 else:
1326 1326 if opts.get('exact') or p2:
1327 1327 # If you got here, you either use --force and know what
1328 1328 # you are doing or used --exact or a merge patch while
1329 1329 # being updated to its first parent.
1330 1330 m = None
1331 1331 else:
1332 1332 m = scmutil.matchfiles(repo, files or [])
1333 1333 editform = mergeeditform(repo[None], 'import.normal')
1334 1334 if opts.get('exact'):
1335 1335 editor = None
1336 1336 else:
1337 1337 editor = getcommiteditor(editform=editform,
1338 1338 **pycompat.strkwargs(opts))
1339 1339 extra = {}
1340 1340 for idfunc in extrapreimport:
1341 1341 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1342 1342 overrides = {}
1343 1343 if partial:
1344 1344 overrides[('ui', 'allowemptycommit')] = True
1345 1345 with repo.ui.configoverride(overrides, 'import'):
1346 1346 n = repo.commit(message, user,
1347 1347 date, match=m,
1348 1348 editor=editor, extra=extra)
1349 1349 for idfunc in extrapostimport:
1350 1350 extrapostimportmap[idfunc](repo[n])
1351 1351 else:
1352 1352 if opts.get('exact') or importbranch:
1353 1353 branch = branch or 'default'
1354 1354 else:
1355 1355 branch = p1.branch()
1356 1356 store = patch.filestore()
1357 1357 try:
1358 1358 files = set()
1359 1359 try:
1360 1360 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1361 1361 files, eolmode=None)
1362 1362 except error.PatchError as e:
1363 1363 raise error.Abort(str(e))
1364 1364 if opts.get('exact'):
1365 1365 editor = None
1366 1366 else:
1367 1367 editor = getcommiteditor(editform='import.bypass')
1368 1368 memctx = context.memctx(repo, (p1.node(), p2.node()),
1369 1369 message,
1370 1370 files=files,
1371 1371 filectxfn=store,
1372 1372 user=user,
1373 1373 date=date,
1374 1374 branch=branch,
1375 1375 editor=editor)
1376 1376 n = memctx.commit()
1377 1377 finally:
1378 1378 store.close()
1379 1379 if opts.get('exact') and nocommit:
1380 1380 # --exact with --no-commit is still useful in that it does merge
1381 1381 # and branch bits
1382 1382 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1383 1383 elif opts.get('exact') and hex(n) != nodeid:
1384 1384 raise error.Abort(_('patch is damaged or loses information'))
1385 1385 msg = _('applied to working directory')
1386 1386 if n:
1387 1387 # i18n: refers to a short changeset id
1388 1388 msg = _('created %s') % short(n)
1389 1389 return (msg, n, rejects)
1390 1390 finally:
1391 1391 os.unlink(tmpname)
1392 1392
1393 1393 # facility to let extensions include additional data in an exported patch
1394 1394 # list of identifiers to be executed in order
1395 1395 extraexport = []
1396 1396 # mapping from identifier to actual export function
1397 1397 # function as to return a string to be added to the header or None
1398 1398 # it is given two arguments (sequencenumber, changectx)
1399 1399 extraexportmap = {}
1400 1400
1401 1401 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1402 1402 node = scmutil.binnode(ctx)
1403 1403 parents = [p.node() for p in ctx.parents() if p]
1404 1404 branch = ctx.branch()
1405 1405 if switch_parent:
1406 1406 parents.reverse()
1407 1407
1408 1408 if parents:
1409 1409 prev = parents[0]
1410 1410 else:
1411 1411 prev = nullid
1412 1412
1413 1413 write("# HG changeset patch\n")
1414 1414 write("# User %s\n" % ctx.user())
1415 1415 write("# Date %d %d\n" % ctx.date())
1416 1416 write("# %s\n" % util.datestr(ctx.date()))
1417 1417 if branch and branch != 'default':
1418 1418 write("# Branch %s\n" % branch)
1419 1419 write("# Node ID %s\n" % hex(node))
1420 1420 write("# Parent %s\n" % hex(prev))
1421 1421 if len(parents) > 1:
1422 1422 write("# Parent %s\n" % hex(parents[1]))
1423 1423
1424 1424 for headerid in extraexport:
1425 1425 header = extraexportmap[headerid](seqno, ctx)
1426 1426 if header is not None:
1427 1427 write('# %s\n' % header)
1428 1428 write(ctx.description().rstrip())
1429 1429 write("\n\n")
1430 1430
1431 1431 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1432 1432 write(chunk, label=label)
1433 1433
1434 1434 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1435 1435 opts=None, match=None):
1436 1436 '''export changesets as hg patches
1437 1437
1438 1438 Args:
1439 1439 repo: The repository from which we're exporting revisions.
1440 1440 revs: A list of revisions to export as revision numbers.
1441 1441 fntemplate: An optional string to use for generating patch file names.
1442 1442 fp: An optional file-like object to which patches should be written.
1443 1443 switch_parent: If True, show diffs against second parent when not nullid.
1444 1444 Default is false, which always shows diff against p1.
1445 1445 opts: diff options to use for generating the patch.
1446 1446 match: If specified, only export changes to files matching this matcher.
1447 1447
1448 1448 Returns:
1449 1449 Nothing.
1450 1450
1451 1451 Side Effect:
1452 1452 "HG Changeset Patch" data is emitted to one of the following
1453 1453 destinations:
1454 1454 fp is specified: All revs are written to the specified
1455 1455 file-like object.
1456 1456 fntemplate specified: Each rev is written to a unique file named using
1457 1457 the given template.
1458 1458 Neither fp nor template specified: All revs written to repo.ui.write()
1459 1459 '''
1460 1460
1461 1461 total = len(revs)
1462 1462 revwidth = max(len(str(rev)) for rev in revs)
1463 1463 filemode = {}
1464 1464
1465 1465 write = None
1466 1466 dest = '<unnamed>'
1467 1467 if fp:
1468 1468 dest = getattr(fp, 'name', dest)
1469 1469 def write(s, **kw):
1470 1470 fp.write(s)
1471 1471 elif not fntemplate:
1472 1472 write = repo.ui.write
1473 1473
1474 1474 for seqno, rev in enumerate(revs, 1):
1475 1475 ctx = repo[rev]
1476 1476 fo = None
1477 1477 if not fp and fntemplate:
1478 1478 desc_lines = ctx.description().rstrip().split('\n')
1479 1479 desc = desc_lines[0] #Commit always has a first line.
1480 1480 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1481 1481 total=total, seqno=seqno, revwidth=revwidth,
1482 1482 mode='wb', modemap=filemode)
1483 1483 dest = fo.name
1484 1484 def write(s, **kw):
1485 1485 fo.write(s)
1486 1486 if not dest.startswith('<'):
1487 1487 repo.ui.note("%s\n" % dest)
1488 1488 _exportsingle(
1489 1489 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1490 1490 if fo is not None:
1491 1491 fo.close()
1492 1492
1493 1493 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1494 1494 changes=None, stat=False, fp=None, prefix='',
1495 1495 root='', listsubrepos=False, hunksfilterfn=None):
1496 1496 '''show diff or diffstat.'''
1497 1497 if fp is None:
1498 1498 write = ui.write
1499 1499 else:
1500 1500 def write(s, **kw):
1501 1501 fp.write(s)
1502 1502
1503 1503 if root:
1504 1504 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1505 1505 else:
1506 1506 relroot = ''
1507 1507 if relroot != '':
1508 1508 # XXX relative roots currently don't work if the root is within a
1509 1509 # subrepo
1510 1510 uirelroot = match.uipath(relroot)
1511 1511 relroot += '/'
1512 1512 for matchroot in match.files():
1513 1513 if not matchroot.startswith(relroot):
1514 1514 ui.warn(_('warning: %s not inside relative root %s\n') % (
1515 1515 match.uipath(matchroot), uirelroot))
1516 1516
1517 1517 if stat:
1518 1518 diffopts = diffopts.copy(context=0, noprefix=False)
1519 1519 width = 80
1520 1520 if not ui.plain():
1521 1521 width = ui.termwidth()
1522 1522 chunks = patch.diff(repo, node1, node2, match, changes, opts=diffopts,
1523 1523 prefix=prefix, relroot=relroot,
1524 1524 hunksfilterfn=hunksfilterfn)
1525 1525 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1526 1526 width=width):
1527 1527 write(chunk, label=label)
1528 1528 else:
1529 1529 for chunk, label in patch.diffui(repo, node1, node2, match,
1530 1530 changes, opts=diffopts, prefix=prefix,
1531 1531 relroot=relroot,
1532 1532 hunksfilterfn=hunksfilterfn):
1533 1533 write(chunk, label=label)
1534 1534
1535 1535 if listsubrepos:
1536 1536 ctx1 = repo[node1]
1537 1537 ctx2 = repo[node2]
1538 1538 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1539 1539 tempnode2 = node2
1540 1540 try:
1541 1541 if node2 is not None:
1542 1542 tempnode2 = ctx2.substate[subpath][1]
1543 1543 except KeyError:
1544 1544 # A subrepo that existed in node1 was deleted between node1 and
1545 1545 # node2 (inclusive). Thus, ctx2's substate won't contain that
1546 1546 # subpath. The best we can do is to ignore it.
1547 1547 tempnode2 = None
1548 1548 submatch = matchmod.subdirmatcher(subpath, match)
1549 1549 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1550 1550 stat=stat, fp=fp, prefix=prefix)
1551 1551
1552 1552 def _changesetlabels(ctx):
1553 1553 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1554 1554 if ctx.obsolete():
1555 1555 labels.append('changeset.obsolete')
1556 1556 if ctx.isunstable():
1557 1557 labels.append('changeset.unstable')
1558 1558 for instability in ctx.instabilities():
1559 1559 labels.append('instability.%s' % instability)
1560 1560 return ' '.join(labels)
1561 1561
1562 1562 class changeset_printer(object):
1563 1563 '''show changeset information when templating not requested.'''
1564 1564
1565 1565 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1566 1566 self.ui = ui
1567 1567 self.repo = repo
1568 1568 self.buffered = buffered
1569 1569 self.matchfn = matchfn
1570 1570 self.diffopts = diffopts
1571 1571 self.header = {}
1572 1572 self.hunk = {}
1573 1573 self.lastheader = None
1574 1574 self.footer = None
1575 1575 self._columns = templatekw.getlogcolumns()
1576 1576
1577 1577 def flush(self, ctx):
1578 1578 rev = ctx.rev()
1579 1579 if rev in self.header:
1580 1580 h = self.header[rev]
1581 1581 if h != self.lastheader:
1582 1582 self.lastheader = h
1583 1583 self.ui.write(h)
1584 1584 del self.header[rev]
1585 1585 if rev in self.hunk:
1586 1586 self.ui.write(self.hunk[rev])
1587 1587 del self.hunk[rev]
1588 1588 return 1
1589 1589 return 0
1590 1590
1591 1591 def close(self):
1592 1592 if self.footer:
1593 1593 self.ui.write(self.footer)
1594 1594
1595 1595 def show(self, ctx, copies=None, matchfn=None, hunksfilterfn=None,
1596 1596 **props):
1597 1597 props = pycompat.byteskwargs(props)
1598 1598 if self.buffered:
1599 1599 self.ui.pushbuffer(labeled=True)
1600 1600 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1601 1601 self.hunk[ctx.rev()] = self.ui.popbuffer()
1602 1602 else:
1603 1603 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1604 1604
1605 1605 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1606 1606 '''show a single changeset or file revision'''
1607 1607 changenode = ctx.node()
1608 1608 rev = ctx.rev()
1609 1609
1610 1610 if self.ui.quiet:
1611 1611 self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
1612 1612 label='log.node')
1613 1613 return
1614 1614
1615 1615 columns = self._columns
1616 1616 self.ui.write(columns['changeset'] % scmutil.formatchangeid(ctx),
1617 1617 label=_changesetlabels(ctx))
1618 1618
1619 1619 # branches are shown first before any other names due to backwards
1620 1620 # compatibility
1621 1621 branch = ctx.branch()
1622 1622 # don't show the default branch name
1623 1623 if branch != 'default':
1624 1624 self.ui.write(columns['branch'] % branch, label='log.branch')
1625 1625
1626 1626 for nsname, ns in self.repo.names.iteritems():
1627 1627 # branches has special logic already handled above, so here we just
1628 1628 # skip it
1629 1629 if nsname == 'branches':
1630 1630 continue
1631 1631 # we will use the templatename as the color name since those two
1632 1632 # should be the same
1633 1633 for name in ns.names(self.repo, changenode):
1634 1634 self.ui.write(ns.logfmt % name,
1635 1635 label='log.%s' % ns.colorname)
1636 1636 if self.ui.debugflag:
1637 1637 self.ui.write(columns['phase'] % ctx.phasestr(), label='log.phase')
1638 1638 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1639 1639 label = 'log.parent changeset.%s' % pctx.phasestr()
1640 1640 self.ui.write(columns['parent'] % scmutil.formatchangeid(pctx),
1641 1641 label=label)
1642 1642
1643 1643 if self.ui.debugflag and rev is not None:
1644 1644 mnode = ctx.manifestnode()
1645 1645 mrev = self.repo.manifestlog._revlog.rev(mnode)
1646 1646 self.ui.write(columns['manifest']
1647 1647 % scmutil.formatrevnode(self.ui, mrev, mnode),
1648 1648 label='ui.debug log.manifest')
1649 1649 self.ui.write(columns['user'] % ctx.user(), label='log.user')
1650 1650 self.ui.write(columns['date'] % util.datestr(ctx.date()),
1651 1651 label='log.date')
1652 1652
1653 1653 if ctx.isunstable():
1654 1654 instabilities = ctx.instabilities()
1655 1655 self.ui.write(columns['instability'] % ', '.join(instabilities),
1656 1656 label='log.instability')
1657 1657
1658 1658 elif ctx.obsolete():
1659 1659 self._showobsfate(ctx)
1660 1660
1661 1661 self._exthook(ctx)
1662 1662
1663 1663 if self.ui.debugflag:
1664 1664 files = ctx.p1().status(ctx)[:3]
1665 1665 for key, value in zip(['files', 'files+', 'files-'], files):
1666 1666 if value:
1667 1667 self.ui.write(columns[key] % " ".join(value),
1668 1668 label='ui.debug log.files')
1669 1669 elif ctx.files() and self.ui.verbose:
1670 1670 self.ui.write(columns['files'] % " ".join(ctx.files()),
1671 1671 label='ui.note log.files')
1672 1672 if copies and self.ui.verbose:
1673 1673 copies = ['%s (%s)' % c for c in copies]
1674 1674 self.ui.write(columns['copies'] % ' '.join(copies),
1675 1675 label='ui.note log.copies')
1676 1676
1677 1677 extra = ctx.extra()
1678 1678 if extra and self.ui.debugflag:
1679 1679 for key, value in sorted(extra.items()):
1680 1680 self.ui.write(columns['extra'] % (key, util.escapestr(value)),
1681 1681 label='ui.debug log.extra')
1682 1682
1683 1683 description = ctx.description().strip()
1684 1684 if description:
1685 1685 if self.ui.verbose:
1686 1686 self.ui.write(_("description:\n"),
1687 1687 label='ui.note log.description')
1688 1688 self.ui.write(description,
1689 1689 label='ui.note log.description')
1690 1690 self.ui.write("\n\n")
1691 1691 else:
1692 1692 self.ui.write(columns['summary'] % description.splitlines()[0],
1693 1693 label='log.summary')
1694 1694 self.ui.write("\n")
1695 1695
1696 1696 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1697 1697
1698 1698 def _showobsfate(self, ctx):
1699 1699 obsfate = templatekw.showobsfate(repo=self.repo, ctx=ctx, ui=self.ui)
1700 1700
1701 1701 if obsfate:
1702 1702 for obsfateline in obsfate:
1703 1703 self.ui.write(self._columns['obsolete'] % obsfateline,
1704 1704 label='log.obsfate')
1705 1705
1706 1706 def _exthook(self, ctx):
1707 1707 '''empty method used by extension as a hook point
1708 1708 '''
1709 1709
1710 1710 def showpatch(self, ctx, matchfn, hunksfilterfn=None):
1711 1711 if not matchfn:
1712 1712 matchfn = self.matchfn
1713 1713 if matchfn:
1714 1714 stat = self.diffopts.get('stat')
1715 1715 diff = self.diffopts.get('patch')
1716 1716 diffopts = patch.diffallopts(self.ui, self.diffopts)
1717 1717 node = ctx.node()
1718 1718 prev = ctx.p1().node()
1719 1719 if stat:
1720 1720 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1721 1721 match=matchfn, stat=True,
1722 1722 hunksfilterfn=hunksfilterfn)
1723 1723 if diff:
1724 1724 if stat:
1725 1725 self.ui.write("\n")
1726 1726 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1727 1727 match=matchfn, stat=False,
1728 1728 hunksfilterfn=hunksfilterfn)
1729 1729 self.ui.write("\n")
1730 1730
1731 1731 class jsonchangeset(changeset_printer):
1732 1732 '''format changeset information.'''
1733 1733
1734 1734 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1735 1735 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1736 1736 self.cache = {}
1737 1737 self._first = True
1738 1738
1739 1739 def close(self):
1740 1740 if not self._first:
1741 1741 self.ui.write("\n]\n")
1742 1742 else:
1743 1743 self.ui.write("[]\n")
1744 1744
1745 1745 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1746 1746 '''show a single changeset or file revision'''
1747 1747 rev = ctx.rev()
1748 1748 if rev is None:
1749 1749 jrev = jnode = 'null'
1750 1750 else:
1751 1751 jrev = '%d' % rev
1752 1752 jnode = '"%s"' % hex(ctx.node())
1753 1753 j = encoding.jsonescape
1754 1754
1755 1755 if self._first:
1756 1756 self.ui.write("[\n {")
1757 1757 self._first = False
1758 1758 else:
1759 1759 self.ui.write(",\n {")
1760 1760
1761 1761 if self.ui.quiet:
1762 1762 self.ui.write(('\n "rev": %s') % jrev)
1763 1763 self.ui.write((',\n "node": %s') % jnode)
1764 1764 self.ui.write('\n }')
1765 1765 return
1766 1766
1767 1767 self.ui.write(('\n "rev": %s') % jrev)
1768 1768 self.ui.write((',\n "node": %s') % jnode)
1769 1769 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1770 1770 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1771 1771 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1772 1772 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1773 1773 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1774 1774
1775 1775 self.ui.write((',\n "bookmarks": [%s]') %
1776 1776 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1777 1777 self.ui.write((',\n "tags": [%s]') %
1778 1778 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1779 1779 self.ui.write((',\n "parents": [%s]') %
1780 1780 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1781 1781
1782 1782 if self.ui.debugflag:
1783 1783 if rev is None:
1784 1784 jmanifestnode = 'null'
1785 1785 else:
1786 1786 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1787 1787 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1788 1788
1789 1789 self.ui.write((',\n "extra": {%s}') %
1790 1790 ", ".join('"%s": "%s"' % (j(k), j(v))
1791 1791 for k, v in ctx.extra().items()))
1792 1792
1793 1793 files = ctx.p1().status(ctx)
1794 1794 self.ui.write((',\n "modified": [%s]') %
1795 1795 ", ".join('"%s"' % j(f) for f in files[0]))
1796 1796 self.ui.write((',\n "added": [%s]') %
1797 1797 ", ".join('"%s"' % j(f) for f in files[1]))
1798 1798 self.ui.write((',\n "removed": [%s]') %
1799 1799 ", ".join('"%s"' % j(f) for f in files[2]))
1800 1800
1801 1801 elif self.ui.verbose:
1802 1802 self.ui.write((',\n "files": [%s]') %
1803 1803 ", ".join('"%s"' % j(f) for f in ctx.files()))
1804 1804
1805 1805 if copies:
1806 1806 self.ui.write((',\n "copies": {%s}') %
1807 1807 ", ".join('"%s": "%s"' % (j(k), j(v))
1808 1808 for k, v in copies))
1809 1809
1810 1810 matchfn = self.matchfn
1811 1811 if matchfn:
1812 1812 stat = self.diffopts.get('stat')
1813 1813 diff = self.diffopts.get('patch')
1814 1814 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1815 1815 node, prev = ctx.node(), ctx.p1().node()
1816 1816 if stat:
1817 1817 self.ui.pushbuffer()
1818 1818 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1819 1819 match=matchfn, stat=True)
1820 1820 self.ui.write((',\n "diffstat": "%s"')
1821 1821 % j(self.ui.popbuffer()))
1822 1822 if diff:
1823 1823 self.ui.pushbuffer()
1824 1824 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1825 1825 match=matchfn, stat=False)
1826 1826 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1827 1827
1828 1828 self.ui.write("\n }")
1829 1829
1830 1830 class changeset_templater(changeset_printer):
1831 1831 '''format changeset information.
1832 1832
1833 1833 Note: there are a variety of convenience functions to build a
1834 1834 changeset_templater for common cases. See functions such as:
1835 1835 makelogtemplater, show_changeset, buildcommittemplate, or other
1836 1836 functions that use changesest_templater.
1837 1837 '''
1838 1838
1839 1839 # Arguments before "buffered" used to be positional. Consider not
1840 1840 # adding/removing arguments before "buffered" to not break callers.
1841 1841 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1842 1842 buffered=False):
1843 1843 diffopts = diffopts or {}
1844 1844
1845 1845 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1846 1846 tres = formatter.templateresources(ui, repo)
1847 self.t = formatter.loadtemplater(ui, tmplspec, resources=tres,
1847 self.t = formatter.loadtemplater(ui, tmplspec,
1848 defaults=templatekw.keywords,
1849 resources=tres,
1848 1850 cache=templatekw.defaulttempl)
1849 1851 self._counter = itertools.count()
1850 1852 self.cache = tres['cache'] # shared with _graphnodeformatter()
1851 1853
1852 1854 self._tref = tmplspec.ref
1853 1855 self._parts = {'header': '', 'footer': '',
1854 1856 tmplspec.ref: tmplspec.ref,
1855 1857 'docheader': '', 'docfooter': '',
1856 1858 'separator': ''}
1857 1859 if tmplspec.mapfile:
1858 1860 # find correct templates for current mode, for backward
1859 1861 # compatibility with 'log -v/-q/--debug' using a mapfile
1860 1862 tmplmodes = [
1861 1863 (True, ''),
1862 1864 (self.ui.verbose, '_verbose'),
1863 1865 (self.ui.quiet, '_quiet'),
1864 1866 (self.ui.debugflag, '_debug'),
1865 1867 ]
1866 1868 for mode, postfix in tmplmodes:
1867 1869 for t in self._parts:
1868 1870 cur = t + postfix
1869 1871 if mode and cur in self.t:
1870 1872 self._parts[t] = cur
1871 1873 else:
1872 1874 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1873 1875 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1874 1876 self._parts.update(m)
1875 1877
1876 1878 if self._parts['docheader']:
1877 1879 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1878 1880
1879 1881 def close(self):
1880 1882 if self._parts['docfooter']:
1881 1883 if not self.footer:
1882 1884 self.footer = ""
1883 1885 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1884 1886 return super(changeset_templater, self).close()
1885 1887
1886 1888 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1887 1889 '''show a single changeset or file revision'''
1888 1890 props = props.copy()
1889 props.update(templatekw.keywords)
1890 1891 props['ctx'] = ctx
1891 1892 props['index'] = index = next(self._counter)
1892 1893 props['revcache'] = {'copies': copies}
1893 1894 props = pycompat.strkwargs(props)
1894 1895
1895 1896 # write separator, which wouldn't work well with the header part below
1896 1897 # since there's inherently a conflict between header (across items) and
1897 1898 # separator (per item)
1898 1899 if self._parts['separator'] and index > 0:
1899 1900 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1900 1901
1901 1902 # write header
1902 1903 if self._parts['header']:
1903 1904 h = templater.stringify(self.t(self._parts['header'], **props))
1904 1905 if self.buffered:
1905 1906 self.header[ctx.rev()] = h
1906 1907 else:
1907 1908 if self.lastheader != h:
1908 1909 self.lastheader = h
1909 1910 self.ui.write(h)
1910 1911
1911 1912 # write changeset metadata, then patch if requested
1912 1913 key = self._parts[self._tref]
1913 1914 self.ui.write(templater.stringify(self.t(key, **props)))
1914 1915 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1915 1916
1916 1917 if self._parts['footer']:
1917 1918 if not self.footer:
1918 1919 self.footer = templater.stringify(
1919 1920 self.t(self._parts['footer'], **props))
1920 1921
1921 1922 def logtemplatespec(tmpl, mapfile):
1922 1923 if mapfile:
1923 1924 return formatter.templatespec('changeset', tmpl, mapfile)
1924 1925 else:
1925 1926 return formatter.templatespec('', tmpl, None)
1926 1927
1927 1928 def _lookuplogtemplate(ui, tmpl, style):
1928 1929 """Find the template matching the given template spec or style
1929 1930
1930 1931 See formatter.lookuptemplate() for details.
1931 1932 """
1932 1933
1933 1934 # ui settings
1934 1935 if not tmpl and not style: # template are stronger than style
1935 1936 tmpl = ui.config('ui', 'logtemplate')
1936 1937 if tmpl:
1937 1938 return logtemplatespec(templater.unquotestring(tmpl), None)
1938 1939 else:
1939 1940 style = util.expandpath(ui.config('ui', 'style'))
1940 1941
1941 1942 if not tmpl and style:
1942 1943 mapfile = style
1943 1944 if not os.path.split(mapfile)[0]:
1944 1945 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1945 1946 or templater.templatepath(mapfile))
1946 1947 if mapname:
1947 1948 mapfile = mapname
1948 1949 return logtemplatespec(None, mapfile)
1949 1950
1950 1951 if not tmpl:
1951 1952 return logtemplatespec(None, None)
1952 1953
1953 1954 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1954 1955
1955 1956 def makelogtemplater(ui, repo, tmpl, buffered=False):
1956 1957 """Create a changeset_templater from a literal template 'tmpl'
1957 1958 byte-string."""
1958 1959 spec = logtemplatespec(tmpl, None)
1959 1960 return changeset_templater(ui, repo, spec, buffered=buffered)
1960 1961
1961 1962 def show_changeset(ui, repo, opts, buffered=False):
1962 1963 """show one changeset using template or regular display.
1963 1964
1964 1965 Display format will be the first non-empty hit of:
1965 1966 1. option 'template'
1966 1967 2. option 'style'
1967 1968 3. [ui] setting 'logtemplate'
1968 1969 4. [ui] setting 'style'
1969 1970 If all of these values are either the unset or the empty string,
1970 1971 regular display via changeset_printer() is done.
1971 1972 """
1972 1973 # options
1973 1974 match = None
1974 1975 if opts.get('patch') or opts.get('stat'):
1975 1976 match = scmutil.matchall(repo)
1976 1977
1977 1978 if opts.get('template') == 'json':
1978 1979 return jsonchangeset(ui, repo, match, opts, buffered)
1979 1980
1980 1981 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1981 1982
1982 1983 if not spec.ref and not spec.tmpl and not spec.mapfile:
1983 1984 return changeset_printer(ui, repo, match, opts, buffered)
1984 1985
1985 1986 return changeset_templater(ui, repo, spec, match, opts, buffered)
1986 1987
1987 1988 def showmarker(fm, marker, index=None):
1988 1989 """utility function to display obsolescence marker in a readable way
1989 1990
1990 1991 To be used by debug function."""
1991 1992 if index is not None:
1992 1993 fm.write('index', '%i ', index)
1993 1994 fm.write('prednode', '%s ', hex(marker.prednode()))
1994 1995 succs = marker.succnodes()
1995 1996 fm.condwrite(succs, 'succnodes', '%s ',
1996 1997 fm.formatlist(map(hex, succs), name='node'))
1997 1998 fm.write('flag', '%X ', marker.flags())
1998 1999 parents = marker.parentnodes()
1999 2000 if parents is not None:
2000 2001 fm.write('parentnodes', '{%s} ',
2001 2002 fm.formatlist(map(hex, parents), name='node', sep=', '))
2002 2003 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
2003 2004 meta = marker.metadata().copy()
2004 2005 meta.pop('date', None)
2005 2006 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
2006 2007 fm.plain('\n')
2007 2008
2008 2009 def finddate(ui, repo, date):
2009 2010 """Find the tipmost changeset that matches the given date spec"""
2010 2011
2011 2012 df = util.matchdate(date)
2012 2013 m = scmutil.matchall(repo)
2013 2014 results = {}
2014 2015
2015 2016 def prep(ctx, fns):
2016 2017 d = ctx.date()
2017 2018 if df(d[0]):
2018 2019 results[ctx.rev()] = d
2019 2020
2020 2021 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
2021 2022 rev = ctx.rev()
2022 2023 if rev in results:
2023 2024 ui.status(_("found revision %s from %s\n") %
2024 2025 (rev, util.datestr(results[rev])))
2025 2026 return '%d' % rev
2026 2027
2027 2028 raise error.Abort(_("revision matching date not found"))
2028 2029
2029 2030 def increasingwindows(windowsize=8, sizelimit=512):
2030 2031 while True:
2031 2032 yield windowsize
2032 2033 if windowsize < sizelimit:
2033 2034 windowsize *= 2
2034 2035
2035 2036 class FileWalkError(Exception):
2036 2037 pass
2037 2038
2038 2039 def walkfilerevs(repo, match, follow, revs, fncache):
2039 2040 '''Walks the file history for the matched files.
2040 2041
2041 2042 Returns the changeset revs that are involved in the file history.
2042 2043
2043 2044 Throws FileWalkError if the file history can't be walked using
2044 2045 filelogs alone.
2045 2046 '''
2046 2047 wanted = set()
2047 2048 copies = []
2048 2049 minrev, maxrev = min(revs), max(revs)
2049 2050 def filerevgen(filelog, last):
2050 2051 """
2051 2052 Only files, no patterns. Check the history of each file.
2052 2053
2053 2054 Examines filelog entries within minrev, maxrev linkrev range
2054 2055 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2055 2056 tuples in backwards order
2056 2057 """
2057 2058 cl_count = len(repo)
2058 2059 revs = []
2059 2060 for j in xrange(0, last + 1):
2060 2061 linkrev = filelog.linkrev(j)
2061 2062 if linkrev < minrev:
2062 2063 continue
2063 2064 # only yield rev for which we have the changelog, it can
2064 2065 # happen while doing "hg log" during a pull or commit
2065 2066 if linkrev >= cl_count:
2066 2067 break
2067 2068
2068 2069 parentlinkrevs = []
2069 2070 for p in filelog.parentrevs(j):
2070 2071 if p != nullrev:
2071 2072 parentlinkrevs.append(filelog.linkrev(p))
2072 2073 n = filelog.node(j)
2073 2074 revs.append((linkrev, parentlinkrevs,
2074 2075 follow and filelog.renamed(n)))
2075 2076
2076 2077 return reversed(revs)
2077 2078 def iterfiles():
2078 2079 pctx = repo['.']
2079 2080 for filename in match.files():
2080 2081 if follow:
2081 2082 if filename not in pctx:
2082 2083 raise error.Abort(_('cannot follow file not in parent '
2083 2084 'revision: "%s"') % filename)
2084 2085 yield filename, pctx[filename].filenode()
2085 2086 else:
2086 2087 yield filename, None
2087 2088 for filename_node in copies:
2088 2089 yield filename_node
2089 2090
2090 2091 for file_, node in iterfiles():
2091 2092 filelog = repo.file(file_)
2092 2093 if not len(filelog):
2093 2094 if node is None:
2094 2095 # A zero count may be a directory or deleted file, so
2095 2096 # try to find matching entries on the slow path.
2096 2097 if follow:
2097 2098 raise error.Abort(
2098 2099 _('cannot follow nonexistent file: "%s"') % file_)
2099 2100 raise FileWalkError("Cannot walk via filelog")
2100 2101 else:
2101 2102 continue
2102 2103
2103 2104 if node is None:
2104 2105 last = len(filelog) - 1
2105 2106 else:
2106 2107 last = filelog.rev(node)
2107 2108
2108 2109 # keep track of all ancestors of the file
2109 2110 ancestors = {filelog.linkrev(last)}
2110 2111
2111 2112 # iterate from latest to oldest revision
2112 2113 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2113 2114 if not follow:
2114 2115 if rev > maxrev:
2115 2116 continue
2116 2117 else:
2117 2118 # Note that last might not be the first interesting
2118 2119 # rev to us:
2119 2120 # if the file has been changed after maxrev, we'll
2120 2121 # have linkrev(last) > maxrev, and we still need
2121 2122 # to explore the file graph
2122 2123 if rev not in ancestors:
2123 2124 continue
2124 2125 # XXX insert 1327 fix here
2125 2126 if flparentlinkrevs:
2126 2127 ancestors.update(flparentlinkrevs)
2127 2128
2128 2129 fncache.setdefault(rev, []).append(file_)
2129 2130 wanted.add(rev)
2130 2131 if copied:
2131 2132 copies.append(copied)
2132 2133
2133 2134 return wanted
2134 2135
2135 2136 class _followfilter(object):
2136 2137 def __init__(self, repo, onlyfirst=False):
2137 2138 self.repo = repo
2138 2139 self.startrev = nullrev
2139 2140 self.roots = set()
2140 2141 self.onlyfirst = onlyfirst
2141 2142
2142 2143 def match(self, rev):
2143 2144 def realparents(rev):
2144 2145 if self.onlyfirst:
2145 2146 return self.repo.changelog.parentrevs(rev)[0:1]
2146 2147 else:
2147 2148 return filter(lambda x: x != nullrev,
2148 2149 self.repo.changelog.parentrevs(rev))
2149 2150
2150 2151 if self.startrev == nullrev:
2151 2152 self.startrev = rev
2152 2153 return True
2153 2154
2154 2155 if rev > self.startrev:
2155 2156 # forward: all descendants
2156 2157 if not self.roots:
2157 2158 self.roots.add(self.startrev)
2158 2159 for parent in realparents(rev):
2159 2160 if parent in self.roots:
2160 2161 self.roots.add(rev)
2161 2162 return True
2162 2163 else:
2163 2164 # backwards: all parents
2164 2165 if not self.roots:
2165 2166 self.roots.update(realparents(self.startrev))
2166 2167 if rev in self.roots:
2167 2168 self.roots.remove(rev)
2168 2169 self.roots.update(realparents(rev))
2169 2170 return True
2170 2171
2171 2172 return False
2172 2173
2173 2174 def walkchangerevs(repo, match, opts, prepare):
2174 2175 '''Iterate over files and the revs in which they changed.
2175 2176
2176 2177 Callers most commonly need to iterate backwards over the history
2177 2178 in which they are interested. Doing so has awful (quadratic-looking)
2178 2179 performance, so we use iterators in a "windowed" way.
2179 2180
2180 2181 We walk a window of revisions in the desired order. Within the
2181 2182 window, we first walk forwards to gather data, then in the desired
2182 2183 order (usually backwards) to display it.
2183 2184
2184 2185 This function returns an iterator yielding contexts. Before
2185 2186 yielding each context, the iterator will first call the prepare
2186 2187 function on each context in the window in forward order.'''
2187 2188
2188 2189 follow = opts.get('follow') or opts.get('follow_first')
2189 2190 revs = _logrevs(repo, opts)
2190 2191 if not revs:
2191 2192 return []
2192 2193 wanted = set()
2193 2194 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
2194 2195 fncache = {}
2195 2196 change = repo.changectx
2196 2197
2197 2198 # First step is to fill wanted, the set of revisions that we want to yield.
2198 2199 # When it does not induce extra cost, we also fill fncache for revisions in
2199 2200 # wanted: a cache of filenames that were changed (ctx.files()) and that
2200 2201 # match the file filtering conditions.
2201 2202
2202 2203 if match.always():
2203 2204 # No files, no patterns. Display all revs.
2204 2205 wanted = revs
2205 2206 elif not slowpath:
2206 2207 # We only have to read through the filelog to find wanted revisions
2207 2208
2208 2209 try:
2209 2210 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2210 2211 except FileWalkError:
2211 2212 slowpath = True
2212 2213
2213 2214 # We decided to fall back to the slowpath because at least one
2214 2215 # of the paths was not a file. Check to see if at least one of them
2215 2216 # existed in history, otherwise simply return
2216 2217 for path in match.files():
2217 2218 if path == '.' or path in repo.store:
2218 2219 break
2219 2220 else:
2220 2221 return []
2221 2222
2222 2223 if slowpath:
2223 2224 # We have to read the changelog to match filenames against
2224 2225 # changed files
2225 2226
2226 2227 if follow:
2227 2228 raise error.Abort(_('can only follow copies/renames for explicit '
2228 2229 'filenames'))
2229 2230
2230 2231 # The slow path checks files modified in every changeset.
2231 2232 # This is really slow on large repos, so compute the set lazily.
2232 2233 class lazywantedset(object):
2233 2234 def __init__(self):
2234 2235 self.set = set()
2235 2236 self.revs = set(revs)
2236 2237
2237 2238 # No need to worry about locality here because it will be accessed
2238 2239 # in the same order as the increasing window below.
2239 2240 def __contains__(self, value):
2240 2241 if value in self.set:
2241 2242 return True
2242 2243 elif not value in self.revs:
2243 2244 return False
2244 2245 else:
2245 2246 self.revs.discard(value)
2246 2247 ctx = change(value)
2247 2248 matches = filter(match, ctx.files())
2248 2249 if matches:
2249 2250 fncache[value] = matches
2250 2251 self.set.add(value)
2251 2252 return True
2252 2253 return False
2253 2254
2254 2255 def discard(self, value):
2255 2256 self.revs.discard(value)
2256 2257 self.set.discard(value)
2257 2258
2258 2259 wanted = lazywantedset()
2259 2260
2260 2261 # it might be worthwhile to do this in the iterator if the rev range
2261 2262 # is descending and the prune args are all within that range
2262 2263 for rev in opts.get('prune', ()):
2263 2264 rev = repo[rev].rev()
2264 2265 ff = _followfilter(repo)
2265 2266 stop = min(revs[0], revs[-1])
2266 2267 for x in xrange(rev, stop - 1, -1):
2267 2268 if ff.match(x):
2268 2269 wanted = wanted - [x]
2269 2270
2270 2271 # Now that wanted is correctly initialized, we can iterate over the
2271 2272 # revision range, yielding only revisions in wanted.
2272 2273 def iterate():
2273 2274 if follow and match.always():
2274 2275 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2275 2276 def want(rev):
2276 2277 return ff.match(rev) and rev in wanted
2277 2278 else:
2278 2279 def want(rev):
2279 2280 return rev in wanted
2280 2281
2281 2282 it = iter(revs)
2282 2283 stopiteration = False
2283 2284 for windowsize in increasingwindows():
2284 2285 nrevs = []
2285 2286 for i in xrange(windowsize):
2286 2287 rev = next(it, None)
2287 2288 if rev is None:
2288 2289 stopiteration = True
2289 2290 break
2290 2291 elif want(rev):
2291 2292 nrevs.append(rev)
2292 2293 for rev in sorted(nrevs):
2293 2294 fns = fncache.get(rev)
2294 2295 ctx = change(rev)
2295 2296 if not fns:
2296 2297 def fns_generator():
2297 2298 for f in ctx.files():
2298 2299 if match(f):
2299 2300 yield f
2300 2301 fns = fns_generator()
2301 2302 prepare(ctx, fns)
2302 2303 for rev in nrevs:
2303 2304 yield change(rev)
2304 2305
2305 2306 if stopiteration:
2306 2307 break
2307 2308
2308 2309 return iterate()
2309 2310
2310 2311 def _makefollowlogfilematcher(repo, files, followfirst):
2311 2312 # When displaying a revision with --patch --follow FILE, we have
2312 2313 # to know which file of the revision must be diffed. With
2313 2314 # --follow, we want the names of the ancestors of FILE in the
2314 2315 # revision, stored in "fcache". "fcache" is populated by
2315 2316 # reproducing the graph traversal already done by --follow revset
2316 2317 # and relating revs to file names (which is not "correct" but
2317 2318 # good enough).
2318 2319 fcache = {}
2319 2320 fcacheready = [False]
2320 2321 pctx = repo['.']
2321 2322
2322 2323 def populate():
2323 2324 for fn in files:
2324 2325 fctx = pctx[fn]
2325 2326 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2326 2327 for c in fctx.ancestors(followfirst=followfirst):
2327 2328 fcache.setdefault(c.rev(), set()).add(c.path())
2328 2329
2329 2330 def filematcher(rev):
2330 2331 if not fcacheready[0]:
2331 2332 # Lazy initialization
2332 2333 fcacheready[0] = True
2333 2334 populate()
2334 2335 return scmutil.matchfiles(repo, fcache.get(rev, []))
2335 2336
2336 2337 return filematcher
2337 2338
2338 2339 def _makenofollowlogfilematcher(repo, pats, opts):
2339 2340 '''hook for extensions to override the filematcher for non-follow cases'''
2340 2341 return None
2341 2342
2342 2343 def _makelogrevset(repo, pats, opts, revs):
2343 2344 """Return (expr, filematcher) where expr is a revset string built
2344 2345 from log options and file patterns or None. If --stat or --patch
2345 2346 are not passed filematcher is None. Otherwise it is a callable
2346 2347 taking a revision number and returning a match objects filtering
2347 2348 the files to be detailed when displaying the revision.
2348 2349 """
2349 2350 opt2revset = {
2350 2351 'no_merges': ('not merge()', None),
2351 2352 'only_merges': ('merge()', None),
2352 2353 '_ancestors': ('ancestors(%(val)s)', None),
2353 2354 '_fancestors': ('_firstancestors(%(val)s)', None),
2354 2355 '_descendants': ('descendants(%(val)s)', None),
2355 2356 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2356 2357 '_matchfiles': ('_matchfiles(%(val)s)', None),
2357 2358 'date': ('date(%(val)r)', None),
2358 2359 'branch': ('branch(%(val)r)', ' or '),
2359 2360 '_patslog': ('filelog(%(val)r)', ' or '),
2360 2361 '_patsfollow': ('follow(%(val)r)', ' or '),
2361 2362 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2362 2363 'keyword': ('keyword(%(val)r)', ' or '),
2363 2364 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2364 2365 'user': ('user(%(val)r)', ' or '),
2365 2366 }
2366 2367
2367 2368 opts = dict(opts)
2368 2369 # follow or not follow?
2369 2370 follow = opts.get('follow') or opts.get('follow_first')
2370 2371 if opts.get('follow_first'):
2371 2372 followfirst = 1
2372 2373 else:
2373 2374 followfirst = 0
2374 2375 # --follow with FILE behavior depends on revs...
2375 2376 it = iter(revs)
2376 2377 startrev = next(it)
2377 2378 followdescendants = startrev < next(it, startrev)
2378 2379
2379 2380 # branch and only_branch are really aliases and must be handled at
2380 2381 # the same time
2381 2382 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2382 2383 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2383 2384 # pats/include/exclude are passed to match.match() directly in
2384 2385 # _matchfiles() revset but walkchangerevs() builds its matcher with
2385 2386 # scmutil.match(). The difference is input pats are globbed on
2386 2387 # platforms without shell expansion (windows).
2387 2388 wctx = repo[None]
2388 2389 match, pats = scmutil.matchandpats(wctx, pats, opts)
2389 2390 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
2390 2391 if not slowpath:
2391 2392 for f in match.files():
2392 2393 if follow and f not in wctx:
2393 2394 # If the file exists, it may be a directory, so let it
2394 2395 # take the slow path.
2395 2396 if os.path.exists(repo.wjoin(f)):
2396 2397 slowpath = True
2397 2398 continue
2398 2399 else:
2399 2400 raise error.Abort(_('cannot follow file not in parent '
2400 2401 'revision: "%s"') % f)
2401 2402 filelog = repo.file(f)
2402 2403 if not filelog:
2403 2404 # A zero count may be a directory or deleted file, so
2404 2405 # try to find matching entries on the slow path.
2405 2406 if follow:
2406 2407 raise error.Abort(
2407 2408 _('cannot follow nonexistent file: "%s"') % f)
2408 2409 slowpath = True
2409 2410
2410 2411 # We decided to fall back to the slowpath because at least one
2411 2412 # of the paths was not a file. Check to see if at least one of them
2412 2413 # existed in history - in that case, we'll continue down the
2413 2414 # slowpath; otherwise, we can turn off the slowpath
2414 2415 if slowpath:
2415 2416 for path in match.files():
2416 2417 if path == '.' or path in repo.store:
2417 2418 break
2418 2419 else:
2419 2420 slowpath = False
2420 2421
2421 2422 fpats = ('_patsfollow', '_patsfollowfirst')
2422 2423 fnopats = (('_ancestors', '_fancestors'),
2423 2424 ('_descendants', '_fdescendants'))
2424 2425 if slowpath:
2425 2426 # See walkchangerevs() slow path.
2426 2427 #
2427 2428 # pats/include/exclude cannot be represented as separate
2428 2429 # revset expressions as their filtering logic applies at file
2429 2430 # level. For instance "-I a -X a" matches a revision touching
2430 2431 # "a" and "b" while "file(a) and not file(b)" does
2431 2432 # not. Besides, filesets are evaluated against the working
2432 2433 # directory.
2433 2434 matchargs = ['r:', 'd:relpath']
2434 2435 for p in pats:
2435 2436 matchargs.append('p:' + p)
2436 2437 for p in opts.get('include', []):
2437 2438 matchargs.append('i:' + p)
2438 2439 for p in opts.get('exclude', []):
2439 2440 matchargs.append('x:' + p)
2440 2441 matchargs = ','.join(('%r' % p) for p in matchargs)
2441 2442 opts['_matchfiles'] = matchargs
2442 2443 if follow:
2443 2444 opts[fnopats[0][followfirst]] = '.'
2444 2445 else:
2445 2446 if follow:
2446 2447 if pats:
2447 2448 # follow() revset interprets its file argument as a
2448 2449 # manifest entry, so use match.files(), not pats.
2449 2450 opts[fpats[followfirst]] = list(match.files())
2450 2451 else:
2451 2452 op = fnopats[followdescendants][followfirst]
2452 2453 opts[op] = 'rev(%d)' % startrev
2453 2454 else:
2454 2455 opts['_patslog'] = list(pats)
2455 2456
2456 2457 filematcher = None
2457 2458 if opts.get('patch') or opts.get('stat'):
2458 2459 # When following files, track renames via a special matcher.
2459 2460 # If we're forced to take the slowpath it means we're following
2460 2461 # at least one pattern/directory, so don't bother with rename tracking.
2461 2462 if follow and not match.always() and not slowpath:
2462 2463 # _makefollowlogfilematcher expects its files argument to be
2463 2464 # relative to the repo root, so use match.files(), not pats.
2464 2465 filematcher = _makefollowlogfilematcher(repo, match.files(),
2465 2466 followfirst)
2466 2467 else:
2467 2468 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2468 2469 if filematcher is None:
2469 2470 filematcher = lambda rev: match
2470 2471
2471 2472 expr = []
2472 2473 for op, val in sorted(opts.iteritems()):
2473 2474 if not val:
2474 2475 continue
2475 2476 if op not in opt2revset:
2476 2477 continue
2477 2478 revop, andor = opt2revset[op]
2478 2479 if '%(val)' not in revop:
2479 2480 expr.append(revop)
2480 2481 else:
2481 2482 if not isinstance(val, list):
2482 2483 e = revop % {'val': val}
2483 2484 else:
2484 2485 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2485 2486 expr.append(e)
2486 2487
2487 2488 if expr:
2488 2489 expr = '(' + ' and '.join(expr) + ')'
2489 2490 else:
2490 2491 expr = None
2491 2492 return expr, filematcher
2492 2493
2493 2494 def _logrevs(repo, opts):
2494 2495 # Default --rev value depends on --follow but --follow behavior
2495 2496 # depends on revisions resolved from --rev...
2496 2497 follow = opts.get('follow') or opts.get('follow_first')
2497 2498 if opts.get('rev'):
2498 2499 revs = scmutil.revrange(repo, opts['rev'])
2499 2500 elif follow and repo.dirstate.p1() == nullid:
2500 2501 revs = smartset.baseset()
2501 2502 elif follow:
2502 2503 revs = repo.revs('reverse(:.)')
2503 2504 else:
2504 2505 revs = smartset.spanset(repo)
2505 2506 revs.reverse()
2506 2507 return revs
2507 2508
2508 2509 def getgraphlogrevs(repo, pats, opts):
2509 2510 """Return (revs, expr, filematcher) where revs is an iterable of
2510 2511 revision numbers, expr is a revset string built from log options
2511 2512 and file patterns or None, and used to filter 'revs'. If --stat or
2512 2513 --patch are not passed filematcher is None. Otherwise it is a
2513 2514 callable taking a revision number and returning a match objects
2514 2515 filtering the files to be detailed when displaying the revision.
2515 2516 """
2516 2517 limit = loglimit(opts)
2517 2518 revs = _logrevs(repo, opts)
2518 2519 if not revs:
2519 2520 return smartset.baseset(), None, None
2520 2521 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2521 2522 if opts.get('rev'):
2522 2523 # User-specified revs might be unsorted, but don't sort before
2523 2524 # _makelogrevset because it might depend on the order of revs
2524 2525 if not (revs.isdescending() or revs.istopo()):
2525 2526 revs.sort(reverse=True)
2526 2527 if expr:
2527 2528 matcher = revset.match(repo.ui, expr)
2528 2529 revs = matcher(repo, revs)
2529 2530 if limit is not None:
2530 2531 limitedrevs = []
2531 2532 for idx, rev in enumerate(revs):
2532 2533 if idx >= limit:
2533 2534 break
2534 2535 limitedrevs.append(rev)
2535 2536 revs = smartset.baseset(limitedrevs)
2536 2537
2537 2538 return revs, expr, filematcher
2538 2539
2539 2540 def getlogrevs(repo, pats, opts):
2540 2541 """Return (revs, expr, filematcher) where revs is an iterable of
2541 2542 revision numbers, expr is a revset string built from log options
2542 2543 and file patterns or None, and used to filter 'revs'. If --stat or
2543 2544 --patch are not passed filematcher is None. Otherwise it is a
2544 2545 callable taking a revision number and returning a match objects
2545 2546 filtering the files to be detailed when displaying the revision.
2546 2547 """
2547 2548 limit = loglimit(opts)
2548 2549 revs = _logrevs(repo, opts)
2549 2550 if not revs:
2550 2551 return smartset.baseset([]), None, None
2551 2552 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2552 2553 if expr:
2553 2554 matcher = revset.match(repo.ui, expr)
2554 2555 revs = matcher(repo, revs)
2555 2556 if limit is not None:
2556 2557 limitedrevs = []
2557 2558 for idx, r in enumerate(revs):
2558 2559 if limit <= idx:
2559 2560 break
2560 2561 limitedrevs.append(r)
2561 2562 revs = smartset.baseset(limitedrevs)
2562 2563
2563 2564 return revs, expr, filematcher
2564 2565
2565 2566 def _parselinerangelogopt(repo, opts):
2566 2567 """Parse --line-range log option and return a list of tuples (filename,
2567 2568 (fromline, toline)).
2568 2569 """
2569 2570 linerangebyfname = []
2570 2571 for pat in opts.get('line_range', []):
2571 2572 try:
2572 2573 pat, linerange = pat.rsplit(',', 1)
2573 2574 except ValueError:
2574 2575 raise error.Abort(_('malformatted line-range pattern %s') % pat)
2575 2576 try:
2576 2577 fromline, toline = map(int, linerange.split(':'))
2577 2578 except ValueError:
2578 2579 raise error.Abort(_("invalid line range for %s") % pat)
2579 2580 msg = _("line range pattern '%s' must match exactly one file") % pat
2580 2581 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
2581 2582 linerangebyfname.append(
2582 2583 (fname, util.processlinerange(fromline, toline)))
2583 2584 return linerangebyfname
2584 2585
2585 2586 def getloglinerangerevs(repo, userrevs, opts):
2586 2587 """Return (revs, filematcher, hunksfilter).
2587 2588
2588 2589 "revs" are revisions obtained by processing "line-range" log options and
2589 2590 walking block ancestors of each specified file/line-range.
2590 2591
2591 2592 "filematcher(rev) -> match" is a factory function returning a match object
2592 2593 for a given revision for file patterns specified in --line-range option.
2593 2594 If neither --stat nor --patch options are passed, "filematcher" is None.
2594 2595
2595 2596 "hunksfilter(rev) -> filterfn(fctx, hunks)" is a factory function
2596 2597 returning a hunks filtering function.
2597 2598 If neither --stat nor --patch options are passed, "filterhunks" is None.
2598 2599 """
2599 2600 wctx = repo[None]
2600 2601
2601 2602 # Two-levels map of "rev -> file ctx -> [line range]".
2602 2603 linerangesbyrev = {}
2603 2604 for fname, (fromline, toline) in _parselinerangelogopt(repo, opts):
2604 2605 if fname not in wctx:
2605 2606 raise error.Abort(_('cannot follow file not in parent '
2606 2607 'revision: "%s"') % fname)
2607 2608 fctx = wctx.filectx(fname)
2608 2609 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
2609 2610 rev = fctx.introrev()
2610 2611 if rev not in userrevs:
2611 2612 continue
2612 2613 linerangesbyrev.setdefault(
2613 2614 rev, {}).setdefault(
2614 2615 fctx.path(), []).append(linerange)
2615 2616
2616 2617 filematcher = None
2617 2618 hunksfilter = None
2618 2619 if opts.get('patch') or opts.get('stat'):
2619 2620
2620 2621 def nofilterhunksfn(fctx, hunks):
2621 2622 return hunks
2622 2623
2623 2624 def hunksfilter(rev):
2624 2625 fctxlineranges = linerangesbyrev.get(rev)
2625 2626 if fctxlineranges is None:
2626 2627 return nofilterhunksfn
2627 2628
2628 2629 def filterfn(fctx, hunks):
2629 2630 lineranges = fctxlineranges.get(fctx.path())
2630 2631 if lineranges is not None:
2631 2632 for hr, lines in hunks:
2632 2633 if hr is None: # binary
2633 2634 yield hr, lines
2634 2635 continue
2635 2636 if any(mdiff.hunkinrange(hr[2:], lr)
2636 2637 for lr in lineranges):
2637 2638 yield hr, lines
2638 2639 else:
2639 2640 for hunk in hunks:
2640 2641 yield hunk
2641 2642
2642 2643 return filterfn
2643 2644
2644 2645 def filematcher(rev):
2645 2646 files = list(linerangesbyrev.get(rev, []))
2646 2647 return scmutil.matchfiles(repo, files)
2647 2648
2648 2649 revs = sorted(linerangesbyrev, reverse=True)
2649 2650
2650 2651 return revs, filematcher, hunksfilter
2651 2652
2652 2653 def _graphnodeformatter(ui, displayer):
2653 2654 spec = ui.config('ui', 'graphnodetemplate')
2654 2655 if not spec:
2655 2656 return templatekw.showgraphnode # fast path for "{graphnode}"
2656 2657
2657 2658 spec = templater.unquotestring(spec)
2658 2659 tres = formatter.templateresources(ui)
2659 2660 if isinstance(displayer, changeset_templater):
2660 2661 tres['cache'] = displayer.cache # reuse cache of slow templates
2661 templ = formatter.maketemplater(ui, spec, resources=tres)
2662 props = templatekw.keywords.copy()
2662 templ = formatter.maketemplater(ui, spec, defaults=templatekw.keywords,
2663 resources=tres)
2663 2664 def formatnode(repo, ctx):
2664 props['ctx'] = ctx
2665 props['repo'] = repo
2666 props['revcache'] = {}
2665 props = {'ctx': ctx, 'repo': repo, 'revcache': {}}
2667 2666 return templ.render(props)
2668 2667 return formatnode
2669 2668
2670 2669 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2671 2670 filematcher=None, props=None):
2672 2671 props = props or {}
2673 2672 formatnode = _graphnodeformatter(ui, displayer)
2674 2673 state = graphmod.asciistate()
2675 2674 styles = state['styles']
2676 2675
2677 2676 # only set graph styling if HGPLAIN is not set.
2678 2677 if ui.plain('graph'):
2679 2678 # set all edge styles to |, the default pre-3.8 behaviour
2680 2679 styles.update(dict.fromkeys(styles, '|'))
2681 2680 else:
2682 2681 edgetypes = {
2683 2682 'parent': graphmod.PARENT,
2684 2683 'grandparent': graphmod.GRANDPARENT,
2685 2684 'missing': graphmod.MISSINGPARENT
2686 2685 }
2687 2686 for name, key in edgetypes.items():
2688 2687 # experimental config: experimental.graphstyle.*
2689 2688 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2690 2689 styles[key])
2691 2690 if not styles[key]:
2692 2691 styles[key] = None
2693 2692
2694 2693 # experimental config: experimental.graphshorten
2695 2694 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2696 2695
2697 2696 for rev, type, ctx, parents in dag:
2698 2697 char = formatnode(repo, ctx)
2699 2698 copies = None
2700 2699 if getrenamed and ctx.rev():
2701 2700 copies = []
2702 2701 for fn in ctx.files():
2703 2702 rename = getrenamed(fn, ctx.rev())
2704 2703 if rename:
2705 2704 copies.append((fn, rename[0]))
2706 2705 revmatchfn = None
2707 2706 if filematcher is not None:
2708 2707 revmatchfn = filematcher(ctx.rev())
2709 2708 edges = edgefn(type, char, state, rev, parents)
2710 2709 firstedge = next(edges)
2711 2710 width = firstedge[2]
2712 2711 displayer.show(ctx, copies=copies, matchfn=revmatchfn,
2713 2712 _graphwidth=width, **pycompat.strkwargs(props))
2714 2713 lines = displayer.hunk.pop(rev).split('\n')
2715 2714 if not lines[-1]:
2716 2715 del lines[-1]
2717 2716 displayer.flush(ctx)
2718 2717 for type, char, width, coldata in itertools.chain([firstedge], edges):
2719 2718 graphmod.ascii(ui, state, type, char, lines, coldata)
2720 2719 lines = []
2721 2720 displayer.close()
2722 2721
2723 2722 def graphlog(ui, repo, pats, opts):
2724 2723 # Parameters are identical to log command ones
2725 2724 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2726 2725 revdag = graphmod.dagwalker(repo, revs)
2727 2726
2728 2727 getrenamed = None
2729 2728 if opts.get('copies'):
2730 2729 endrev = None
2731 2730 if opts.get('rev'):
2732 2731 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2733 2732 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2734 2733
2735 2734 ui.pager('log')
2736 2735 displayer = show_changeset(ui, repo, opts, buffered=True)
2737 2736 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2738 2737 filematcher)
2739 2738
2740 2739 def checkunsupportedgraphflags(pats, opts):
2741 2740 for op in ["newest_first"]:
2742 2741 if op in opts and opts[op]:
2743 2742 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2744 2743 % op.replace("_", "-"))
2745 2744
2746 2745 def graphrevs(repo, nodes, opts):
2747 2746 limit = loglimit(opts)
2748 2747 nodes.reverse()
2749 2748 if limit is not None:
2750 2749 nodes = nodes[:limit]
2751 2750 return graphmod.nodes(repo, nodes)
2752 2751
2753 2752 def add(ui, repo, match, prefix, explicitonly, **opts):
2754 2753 join = lambda f: os.path.join(prefix, f)
2755 2754 bad = []
2756 2755
2757 2756 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2758 2757 names = []
2759 2758 wctx = repo[None]
2760 2759 cca = None
2761 2760 abort, warn = scmutil.checkportabilityalert(ui)
2762 2761 if abort or warn:
2763 2762 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2764 2763
2765 2764 badmatch = matchmod.badmatch(match, badfn)
2766 2765 dirstate = repo.dirstate
2767 2766 # We don't want to just call wctx.walk here, since it would return a lot of
2768 2767 # clean files, which we aren't interested in and takes time.
2769 2768 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2770 2769 unknown=True, ignored=False, full=False)):
2771 2770 exact = match.exact(f)
2772 2771 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2773 2772 if cca:
2774 2773 cca(f)
2775 2774 names.append(f)
2776 2775 if ui.verbose or not exact:
2777 2776 ui.status(_('adding %s\n') % match.rel(f))
2778 2777
2779 2778 for subpath in sorted(wctx.substate):
2780 2779 sub = wctx.sub(subpath)
2781 2780 try:
2782 2781 submatch = matchmod.subdirmatcher(subpath, match)
2783 2782 if opts.get(r'subrepos'):
2784 2783 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2785 2784 else:
2786 2785 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2787 2786 except error.LookupError:
2788 2787 ui.status(_("skipping missing subrepository: %s\n")
2789 2788 % join(subpath))
2790 2789
2791 2790 if not opts.get(r'dry_run'):
2792 2791 rejected = wctx.add(names, prefix)
2793 2792 bad.extend(f for f in rejected if f in match.files())
2794 2793 return bad
2795 2794
2796 2795 def addwebdirpath(repo, serverpath, webconf):
2797 2796 webconf[serverpath] = repo.root
2798 2797 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2799 2798
2800 2799 for r in repo.revs('filelog("path:.hgsub")'):
2801 2800 ctx = repo[r]
2802 2801 for subpath in ctx.substate:
2803 2802 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2804 2803
2805 2804 def forget(ui, repo, match, prefix, explicitonly):
2806 2805 join = lambda f: os.path.join(prefix, f)
2807 2806 bad = []
2808 2807 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2809 2808 wctx = repo[None]
2810 2809 forgot = []
2811 2810
2812 2811 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2813 2812 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2814 2813 if explicitonly:
2815 2814 forget = [f for f in forget if match.exact(f)]
2816 2815
2817 2816 for subpath in sorted(wctx.substate):
2818 2817 sub = wctx.sub(subpath)
2819 2818 try:
2820 2819 submatch = matchmod.subdirmatcher(subpath, match)
2821 2820 subbad, subforgot = sub.forget(submatch, prefix)
2822 2821 bad.extend([subpath + '/' + f for f in subbad])
2823 2822 forgot.extend([subpath + '/' + f for f in subforgot])
2824 2823 except error.LookupError:
2825 2824 ui.status(_("skipping missing subrepository: %s\n")
2826 2825 % join(subpath))
2827 2826
2828 2827 if not explicitonly:
2829 2828 for f in match.files():
2830 2829 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2831 2830 if f not in forgot:
2832 2831 if repo.wvfs.exists(f):
2833 2832 # Don't complain if the exact case match wasn't given.
2834 2833 # But don't do this until after checking 'forgot', so
2835 2834 # that subrepo files aren't normalized, and this op is
2836 2835 # purely from data cached by the status walk above.
2837 2836 if repo.dirstate.normalize(f) in repo.dirstate:
2838 2837 continue
2839 2838 ui.warn(_('not removing %s: '
2840 2839 'file is already untracked\n')
2841 2840 % match.rel(f))
2842 2841 bad.append(f)
2843 2842
2844 2843 for f in forget:
2845 2844 if ui.verbose or not match.exact(f):
2846 2845 ui.status(_('removing %s\n') % match.rel(f))
2847 2846
2848 2847 rejected = wctx.forget(forget, prefix)
2849 2848 bad.extend(f for f in rejected if f in match.files())
2850 2849 forgot.extend(f for f in forget if f not in rejected)
2851 2850 return bad, forgot
2852 2851
2853 2852 def files(ui, ctx, m, fm, fmt, subrepos):
2854 2853 rev = ctx.rev()
2855 2854 ret = 1
2856 2855 ds = ctx.repo().dirstate
2857 2856
2858 2857 for f in ctx.matches(m):
2859 2858 if rev is None and ds[f] == 'r':
2860 2859 continue
2861 2860 fm.startitem()
2862 2861 if ui.verbose:
2863 2862 fc = ctx[f]
2864 2863 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2865 2864 fm.data(abspath=f)
2866 2865 fm.write('path', fmt, m.rel(f))
2867 2866 ret = 0
2868 2867
2869 2868 for subpath in sorted(ctx.substate):
2870 2869 submatch = matchmod.subdirmatcher(subpath, m)
2871 2870 if (subrepos or m.exact(subpath) or any(submatch.files())):
2872 2871 sub = ctx.sub(subpath)
2873 2872 try:
2874 2873 recurse = m.exact(subpath) or subrepos
2875 2874 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2876 2875 ret = 0
2877 2876 except error.LookupError:
2878 2877 ui.status(_("skipping missing subrepository: %s\n")
2879 2878 % m.abs(subpath))
2880 2879
2881 2880 return ret
2882 2881
2883 2882 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2884 2883 join = lambda f: os.path.join(prefix, f)
2885 2884 ret = 0
2886 2885 s = repo.status(match=m, clean=True)
2887 2886 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2888 2887
2889 2888 wctx = repo[None]
2890 2889
2891 2890 if warnings is None:
2892 2891 warnings = []
2893 2892 warn = True
2894 2893 else:
2895 2894 warn = False
2896 2895
2897 2896 subs = sorted(wctx.substate)
2898 2897 total = len(subs)
2899 2898 count = 0
2900 2899 for subpath in subs:
2901 2900 count += 1
2902 2901 submatch = matchmod.subdirmatcher(subpath, m)
2903 2902 if subrepos or m.exact(subpath) or any(submatch.files()):
2904 2903 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2905 2904 sub = wctx.sub(subpath)
2906 2905 try:
2907 2906 if sub.removefiles(submatch, prefix, after, force, subrepos,
2908 2907 warnings):
2909 2908 ret = 1
2910 2909 except error.LookupError:
2911 2910 warnings.append(_("skipping missing subrepository: %s\n")
2912 2911 % join(subpath))
2913 2912 ui.progress(_('searching'), None)
2914 2913
2915 2914 # warn about failure to delete explicit files/dirs
2916 2915 deleteddirs = util.dirs(deleted)
2917 2916 files = m.files()
2918 2917 total = len(files)
2919 2918 count = 0
2920 2919 for f in files:
2921 2920 def insubrepo():
2922 2921 for subpath in wctx.substate:
2923 2922 if f.startswith(subpath + '/'):
2924 2923 return True
2925 2924 return False
2926 2925
2927 2926 count += 1
2928 2927 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2929 2928 isdir = f in deleteddirs or wctx.hasdir(f)
2930 2929 if (f in repo.dirstate or isdir or f == '.'
2931 2930 or insubrepo() or f in subs):
2932 2931 continue
2933 2932
2934 2933 if repo.wvfs.exists(f):
2935 2934 if repo.wvfs.isdir(f):
2936 2935 warnings.append(_('not removing %s: no tracked files\n')
2937 2936 % m.rel(f))
2938 2937 else:
2939 2938 warnings.append(_('not removing %s: file is untracked\n')
2940 2939 % m.rel(f))
2941 2940 # missing files will generate a warning elsewhere
2942 2941 ret = 1
2943 2942 ui.progress(_('deleting'), None)
2944 2943
2945 2944 if force:
2946 2945 list = modified + deleted + clean + added
2947 2946 elif after:
2948 2947 list = deleted
2949 2948 remaining = modified + added + clean
2950 2949 total = len(remaining)
2951 2950 count = 0
2952 2951 for f in remaining:
2953 2952 count += 1
2954 2953 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2955 2954 if ui.verbose or (f in files):
2956 2955 warnings.append(_('not removing %s: file still exists\n')
2957 2956 % m.rel(f))
2958 2957 ret = 1
2959 2958 ui.progress(_('skipping'), None)
2960 2959 else:
2961 2960 list = deleted + clean
2962 2961 total = len(modified) + len(added)
2963 2962 count = 0
2964 2963 for f in modified:
2965 2964 count += 1
2966 2965 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2967 2966 warnings.append(_('not removing %s: file is modified (use -f'
2968 2967 ' to force removal)\n') % m.rel(f))
2969 2968 ret = 1
2970 2969 for f in added:
2971 2970 count += 1
2972 2971 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2973 2972 warnings.append(_("not removing %s: file has been marked for add"
2974 2973 " (use 'hg forget' to undo add)\n") % m.rel(f))
2975 2974 ret = 1
2976 2975 ui.progress(_('skipping'), None)
2977 2976
2978 2977 list = sorted(list)
2979 2978 total = len(list)
2980 2979 count = 0
2981 2980 for f in list:
2982 2981 count += 1
2983 2982 if ui.verbose or not m.exact(f):
2984 2983 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2985 2984 ui.status(_('removing %s\n') % m.rel(f))
2986 2985 ui.progress(_('deleting'), None)
2987 2986
2988 2987 with repo.wlock():
2989 2988 if not after:
2990 2989 for f in list:
2991 2990 if f in added:
2992 2991 continue # we never unlink added files on remove
2993 2992 repo.wvfs.unlinkpath(f, ignoremissing=True)
2994 2993 repo[None].forget(list)
2995 2994
2996 2995 if warn:
2997 2996 for warning in warnings:
2998 2997 ui.warn(warning)
2999 2998
3000 2999 return ret
3001 3000
3002 3001 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
3003 3002 err = 1
3004 3003 opts = pycompat.byteskwargs(opts)
3005 3004
3006 3005 def write(path):
3007 3006 filename = None
3008 3007 if fntemplate:
3009 3008 filename = makefilename(repo, fntemplate, ctx.node(),
3010 3009 pathname=os.path.join(prefix, path))
3011 3010 # attempt to create the directory if it does not already exist
3012 3011 try:
3013 3012 os.makedirs(os.path.dirname(filename))
3014 3013 except OSError:
3015 3014 pass
3016 3015 with formatter.maybereopen(basefm, filename, opts) as fm:
3017 3016 data = ctx[path].data()
3018 3017 if opts.get('decode'):
3019 3018 data = repo.wwritedata(path, data)
3020 3019 fm.startitem()
3021 3020 fm.write('data', '%s', data)
3022 3021 fm.data(abspath=path, path=matcher.rel(path))
3023 3022
3024 3023 # Automation often uses hg cat on single files, so special case it
3025 3024 # for performance to avoid the cost of parsing the manifest.
3026 3025 if len(matcher.files()) == 1 and not matcher.anypats():
3027 3026 file = matcher.files()[0]
3028 3027 mfl = repo.manifestlog
3029 3028 mfnode = ctx.manifestnode()
3030 3029 try:
3031 3030 if mfnode and mfl[mfnode].find(file)[0]:
3032 3031 write(file)
3033 3032 return 0
3034 3033 except KeyError:
3035 3034 pass
3036 3035
3037 3036 for abs in ctx.walk(matcher):
3038 3037 write(abs)
3039 3038 err = 0
3040 3039
3041 3040 for subpath in sorted(ctx.substate):
3042 3041 sub = ctx.sub(subpath)
3043 3042 try:
3044 3043 submatch = matchmod.subdirmatcher(subpath, matcher)
3045 3044
3046 3045 if not sub.cat(submatch, basefm, fntemplate,
3047 3046 os.path.join(prefix, sub._path),
3048 3047 **pycompat.strkwargs(opts)):
3049 3048 err = 0
3050 3049 except error.RepoLookupError:
3051 3050 ui.status(_("skipping missing subrepository: %s\n")
3052 3051 % os.path.join(prefix, subpath))
3053 3052
3054 3053 return err
3055 3054
3056 3055 def commit(ui, repo, commitfunc, pats, opts):
3057 3056 '''commit the specified files or all outstanding changes'''
3058 3057 date = opts.get('date')
3059 3058 if date:
3060 3059 opts['date'] = util.parsedate(date)
3061 3060 message = logmessage(ui, opts)
3062 3061 matcher = scmutil.match(repo[None], pats, opts)
3063 3062
3064 3063 dsguard = None
3065 3064 # extract addremove carefully -- this function can be called from a command
3066 3065 # that doesn't support addremove
3067 3066 if opts.get('addremove'):
3068 3067 dsguard = dirstateguard.dirstateguard(repo, 'commit')
3069 3068 with dsguard or util.nullcontextmanager():
3070 3069 if dsguard:
3071 3070 if scmutil.addremove(repo, matcher, "", opts) != 0:
3072 3071 raise error.Abort(
3073 3072 _("failed to mark all new/missing files as added/removed"))
3074 3073
3075 3074 return commitfunc(ui, repo, message, matcher, opts)
3076 3075
3077 3076 def samefile(f, ctx1, ctx2):
3078 3077 if f in ctx1.manifest():
3079 3078 a = ctx1.filectx(f)
3080 3079 if f in ctx2.manifest():
3081 3080 b = ctx2.filectx(f)
3082 3081 return (not a.cmp(b)
3083 3082 and a.flags() == b.flags())
3084 3083 else:
3085 3084 return False
3086 3085 else:
3087 3086 return f not in ctx2.manifest()
3088 3087
3089 3088 def amend(ui, repo, old, extra, pats, opts):
3090 3089 # avoid cycle context -> subrepo -> cmdutil
3091 3090 from . import context
3092 3091
3093 3092 # amend will reuse the existing user if not specified, but the obsolete
3094 3093 # marker creation requires that the current user's name is specified.
3095 3094 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3096 3095 ui.username() # raise exception if username not set
3097 3096
3098 3097 ui.note(_('amending changeset %s\n') % old)
3099 3098 base = old.p1()
3100 3099
3101 3100 with repo.wlock(), repo.lock(), repo.transaction('amend'):
3102 3101 # Participating changesets:
3103 3102 #
3104 3103 # wctx o - workingctx that contains changes from working copy
3105 3104 # | to go into amending commit
3106 3105 # |
3107 3106 # old o - changeset to amend
3108 3107 # |
3109 3108 # base o - first parent of the changeset to amend
3110 3109 wctx = repo[None]
3111 3110
3112 3111 # Copy to avoid mutating input
3113 3112 extra = extra.copy()
3114 3113 # Update extra dict from amended commit (e.g. to preserve graft
3115 3114 # source)
3116 3115 extra.update(old.extra())
3117 3116
3118 3117 # Also update it from the from the wctx
3119 3118 extra.update(wctx.extra())
3120 3119
3121 3120 user = opts.get('user') or old.user()
3122 3121 date = opts.get('date') or old.date()
3123 3122
3124 3123 # Parse the date to allow comparison between date and old.date()
3125 3124 date = util.parsedate(date)
3126 3125
3127 3126 if len(old.parents()) > 1:
3128 3127 # ctx.files() isn't reliable for merges, so fall back to the
3129 3128 # slower repo.status() method
3130 3129 files = set([fn for st in repo.status(base, old)[:3]
3131 3130 for fn in st])
3132 3131 else:
3133 3132 files = set(old.files())
3134 3133
3135 3134 # add/remove the files to the working copy if the "addremove" option
3136 3135 # was specified.
3137 3136 matcher = scmutil.match(wctx, pats, opts)
3138 3137 if (opts.get('addremove')
3139 3138 and scmutil.addremove(repo, matcher, "", opts)):
3140 3139 raise error.Abort(
3141 3140 _("failed to mark all new/missing files as added/removed"))
3142 3141
3143 3142 # Check subrepos. This depends on in-place wctx._status update in
3144 3143 # subrepo.precommit(). To minimize the risk of this hack, we do
3145 3144 # nothing if .hgsub does not exist.
3146 3145 if '.hgsub' in wctx or '.hgsub' in old:
3147 3146 from . import subrepo # avoid cycle: cmdutil -> subrepo -> cmdutil
3148 3147 subs, commitsubs, newsubstate = subrepo.precommit(
3149 3148 ui, wctx, wctx._status, matcher)
3150 3149 # amend should abort if commitsubrepos is enabled
3151 3150 assert not commitsubs
3152 3151 if subs:
3153 3152 subrepo.writestate(repo, newsubstate)
3154 3153
3155 3154 filestoamend = set(f for f in wctx.files() if matcher(f))
3156 3155
3157 3156 changes = (len(filestoamend) > 0)
3158 3157 if changes:
3159 3158 # Recompute copies (avoid recording a -> b -> a)
3160 3159 copied = copies.pathcopies(base, wctx, matcher)
3161 3160 if old.p2:
3162 3161 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3163 3162
3164 3163 # Prune files which were reverted by the updates: if old
3165 3164 # introduced file X and the file was renamed in the working
3166 3165 # copy, then those two files are the same and
3167 3166 # we can discard X from our list of files. Likewise if X
3168 3167 # was removed, it's no longer relevant. If X is missing (aka
3169 3168 # deleted), old X must be preserved.
3170 3169 files.update(filestoamend)
3171 3170 files = [f for f in files if (not samefile(f, wctx, base)
3172 3171 or f in wctx.deleted())]
3173 3172
3174 3173 def filectxfn(repo, ctx_, path):
3175 3174 try:
3176 3175 # If the file being considered is not amongst the files
3177 3176 # to be amended, we should return the file context from the
3178 3177 # old changeset. This avoids issues when only some files in
3179 3178 # the working copy are being amended but there are also
3180 3179 # changes to other files from the old changeset.
3181 3180 if path not in filestoamend:
3182 3181 return old.filectx(path)
3183 3182
3184 3183 # Return None for removed files.
3185 3184 if path in wctx.removed():
3186 3185 return None
3187 3186
3188 3187 fctx = wctx[path]
3189 3188 flags = fctx.flags()
3190 3189 mctx = context.memfilectx(repo, ctx_,
3191 3190 fctx.path(), fctx.data(),
3192 3191 islink='l' in flags,
3193 3192 isexec='x' in flags,
3194 3193 copied=copied.get(path))
3195 3194 return mctx
3196 3195 except KeyError:
3197 3196 return None
3198 3197 else:
3199 3198 ui.note(_('copying changeset %s to %s\n') % (old, base))
3200 3199
3201 3200 # Use version of files as in the old cset
3202 3201 def filectxfn(repo, ctx_, path):
3203 3202 try:
3204 3203 return old.filectx(path)
3205 3204 except KeyError:
3206 3205 return None
3207 3206
3208 3207 # See if we got a message from -m or -l, if not, open the editor with
3209 3208 # the message of the changeset to amend.
3210 3209 message = logmessage(ui, opts)
3211 3210
3212 3211 editform = mergeeditform(old, 'commit.amend')
3213 3212 editor = getcommiteditor(editform=editform,
3214 3213 **pycompat.strkwargs(opts))
3215 3214
3216 3215 if not message:
3217 3216 editor = getcommiteditor(edit=True, editform=editform)
3218 3217 message = old.description()
3219 3218
3220 3219 pureextra = extra.copy()
3221 3220 extra['amend_source'] = old.hex()
3222 3221
3223 3222 new = context.memctx(repo,
3224 3223 parents=[base.node(), old.p2().node()],
3225 3224 text=message,
3226 3225 files=files,
3227 3226 filectxfn=filectxfn,
3228 3227 user=user,
3229 3228 date=date,
3230 3229 extra=extra,
3231 3230 editor=editor)
3232 3231
3233 3232 newdesc = changelog.stripdesc(new.description())
3234 3233 if ((not changes)
3235 3234 and newdesc == old.description()
3236 3235 and user == old.user()
3237 3236 and date == old.date()
3238 3237 and pureextra == old.extra()):
3239 3238 # nothing changed. continuing here would create a new node
3240 3239 # anyway because of the amend_source noise.
3241 3240 #
3242 3241 # This not what we expect from amend.
3243 3242 return old.node()
3244 3243
3245 3244 if opts.get('secret'):
3246 3245 commitphase = 'secret'
3247 3246 else:
3248 3247 commitphase = old.phase()
3249 3248 overrides = {('phases', 'new-commit'): commitphase}
3250 3249 with ui.configoverride(overrides, 'amend'):
3251 3250 newid = repo.commitctx(new)
3252 3251
3253 3252 # Reroute the working copy parent to the new changeset
3254 3253 repo.setparents(newid, nullid)
3255 3254 mapping = {old.node(): (newid,)}
3256 3255 obsmetadata = None
3257 3256 if opts.get('note'):
3258 3257 obsmetadata = {'note': opts['note']}
3259 3258 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
3260 3259
3261 3260 # Fixing the dirstate because localrepo.commitctx does not update
3262 3261 # it. This is rather convenient because we did not need to update
3263 3262 # the dirstate for all the files in the new commit which commitctx
3264 3263 # could have done if it updated the dirstate. Now, we can
3265 3264 # selectively update the dirstate only for the amended files.
3266 3265 dirstate = repo.dirstate
3267 3266
3268 3267 # Update the state of the files which were added and
3269 3268 # and modified in the amend to "normal" in the dirstate.
3270 3269 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3271 3270 for f in normalfiles:
3272 3271 dirstate.normal(f)
3273 3272
3274 3273 # Update the state of files which were removed in the amend
3275 3274 # to "removed" in the dirstate.
3276 3275 removedfiles = set(wctx.removed()) & filestoamend
3277 3276 for f in removedfiles:
3278 3277 dirstate.drop(f)
3279 3278
3280 3279 return newid
3281 3280
3282 3281 def commiteditor(repo, ctx, subs, editform=''):
3283 3282 if ctx.description():
3284 3283 return ctx.description()
3285 3284 return commitforceeditor(repo, ctx, subs, editform=editform,
3286 3285 unchangedmessagedetection=True)
3287 3286
3288 3287 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3289 3288 editform='', unchangedmessagedetection=False):
3290 3289 if not extramsg:
3291 3290 extramsg = _("Leave message empty to abort commit.")
3292 3291
3293 3292 forms = [e for e in editform.split('.') if e]
3294 3293 forms.insert(0, 'changeset')
3295 3294 templatetext = None
3296 3295 while forms:
3297 3296 ref = '.'.join(forms)
3298 3297 if repo.ui.config('committemplate', ref):
3299 3298 templatetext = committext = buildcommittemplate(
3300 3299 repo, ctx, subs, extramsg, ref)
3301 3300 break
3302 3301 forms.pop()
3303 3302 else:
3304 3303 committext = buildcommittext(repo, ctx, subs, extramsg)
3305 3304
3306 3305 # run editor in the repository root
3307 3306 olddir = pycompat.getcwd()
3308 3307 os.chdir(repo.root)
3309 3308
3310 3309 # make in-memory changes visible to external process
3311 3310 tr = repo.currenttransaction()
3312 3311 repo.dirstate.write(tr)
3313 3312 pending = tr and tr.writepending() and repo.root
3314 3313
3315 3314 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3316 3315 editform=editform, pending=pending,
3317 3316 repopath=repo.path, action='commit')
3318 3317 text = editortext
3319 3318
3320 3319 # strip away anything below this special string (used for editors that want
3321 3320 # to display the diff)
3322 3321 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3323 3322 if stripbelow:
3324 3323 text = text[:stripbelow.start()]
3325 3324
3326 3325 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3327 3326 os.chdir(olddir)
3328 3327
3329 3328 if finishdesc:
3330 3329 text = finishdesc(text)
3331 3330 if not text.strip():
3332 3331 raise error.Abort(_("empty commit message"))
3333 3332 if unchangedmessagedetection and editortext == templatetext:
3334 3333 raise error.Abort(_("commit message unchanged"))
3335 3334
3336 3335 return text
3337 3336
3338 3337 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3339 3338 ui = repo.ui
3340 3339 spec = formatter.templatespec(ref, None, None)
3341 3340 t = changeset_templater(ui, repo, spec, None, {}, False)
3342 3341 t.t.cache.update((k, templater.unquotestring(v))
3343 3342 for k, v in repo.ui.configitems('committemplate'))
3344 3343
3345 3344 if not extramsg:
3346 3345 extramsg = '' # ensure that extramsg is string
3347 3346
3348 3347 ui.pushbuffer()
3349 3348 t.show(ctx, extramsg=extramsg)
3350 3349 return ui.popbuffer()
3351 3350
3352 3351 def hgprefix(msg):
3353 3352 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3354 3353
3355 3354 def buildcommittext(repo, ctx, subs, extramsg):
3356 3355 edittext = []
3357 3356 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3358 3357 if ctx.description():
3359 3358 edittext.append(ctx.description())
3360 3359 edittext.append("")
3361 3360 edittext.append("") # Empty line between message and comments.
3362 3361 edittext.append(hgprefix(_("Enter commit message."
3363 3362 " Lines beginning with 'HG:' are removed.")))
3364 3363 edittext.append(hgprefix(extramsg))
3365 3364 edittext.append("HG: --")
3366 3365 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3367 3366 if ctx.p2():
3368 3367 edittext.append(hgprefix(_("branch merge")))
3369 3368 if ctx.branch():
3370 3369 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3371 3370 if bookmarks.isactivewdirparent(repo):
3372 3371 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3373 3372 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3374 3373 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3375 3374 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3376 3375 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3377 3376 if not added and not modified and not removed:
3378 3377 edittext.append(hgprefix(_("no files changed")))
3379 3378 edittext.append("")
3380 3379
3381 3380 return "\n".join(edittext)
3382 3381
3383 3382 def commitstatus(repo, node, branch, bheads=None, opts=None):
3384 3383 if opts is None:
3385 3384 opts = {}
3386 3385 ctx = repo[node]
3387 3386 parents = ctx.parents()
3388 3387
3389 3388 if (not opts.get('amend') and bheads and node not in bheads and not
3390 3389 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3391 3390 repo.ui.status(_('created new head\n'))
3392 3391 # The message is not printed for initial roots. For the other
3393 3392 # changesets, it is printed in the following situations:
3394 3393 #
3395 3394 # Par column: for the 2 parents with ...
3396 3395 # N: null or no parent
3397 3396 # B: parent is on another named branch
3398 3397 # C: parent is a regular non head changeset
3399 3398 # H: parent was a branch head of the current branch
3400 3399 # Msg column: whether we print "created new head" message
3401 3400 # In the following, it is assumed that there already exists some
3402 3401 # initial branch heads of the current branch, otherwise nothing is
3403 3402 # printed anyway.
3404 3403 #
3405 3404 # Par Msg Comment
3406 3405 # N N y additional topo root
3407 3406 #
3408 3407 # B N y additional branch root
3409 3408 # C N y additional topo head
3410 3409 # H N n usual case
3411 3410 #
3412 3411 # B B y weird additional branch root
3413 3412 # C B y branch merge
3414 3413 # H B n merge with named branch
3415 3414 #
3416 3415 # C C y additional head from merge
3417 3416 # C H n merge with a head
3418 3417 #
3419 3418 # H H n head merge: head count decreases
3420 3419
3421 3420 if not opts.get('close_branch'):
3422 3421 for r in parents:
3423 3422 if r.closesbranch() and r.branch() == branch:
3424 3423 repo.ui.status(_('reopening closed branch head %d\n') % r)
3425 3424
3426 3425 if repo.ui.debugflag:
3427 3426 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3428 3427 elif repo.ui.verbose:
3429 3428 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3430 3429
3431 3430 def postcommitstatus(repo, pats, opts):
3432 3431 return repo.status(match=scmutil.match(repo[None], pats, opts))
3433 3432
3434 3433 def revert(ui, repo, ctx, parents, *pats, **opts):
3435 3434 opts = pycompat.byteskwargs(opts)
3436 3435 parent, p2 = parents
3437 3436 node = ctx.node()
3438 3437
3439 3438 mf = ctx.manifest()
3440 3439 if node == p2:
3441 3440 parent = p2
3442 3441
3443 3442 # need all matching names in dirstate and manifest of target rev,
3444 3443 # so have to walk both. do not print errors if files exist in one
3445 3444 # but not other. in both cases, filesets should be evaluated against
3446 3445 # workingctx to get consistent result (issue4497). this means 'set:**'
3447 3446 # cannot be used to select missing files from target rev.
3448 3447
3449 3448 # `names` is a mapping for all elements in working copy and target revision
3450 3449 # The mapping is in the form:
3451 3450 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3452 3451 names = {}
3453 3452
3454 3453 with repo.wlock():
3455 3454 ## filling of the `names` mapping
3456 3455 # walk dirstate to fill `names`
3457 3456
3458 3457 interactive = opts.get('interactive', False)
3459 3458 wctx = repo[None]
3460 3459 m = scmutil.match(wctx, pats, opts)
3461 3460
3462 3461 # we'll need this later
3463 3462 targetsubs = sorted(s for s in wctx.substate if m(s))
3464 3463
3465 3464 if not m.always():
3466 3465 matcher = matchmod.badmatch(m, lambda x, y: False)
3467 3466 for abs in wctx.walk(matcher):
3468 3467 names[abs] = m.rel(abs), m.exact(abs)
3469 3468
3470 3469 # walk target manifest to fill `names`
3471 3470
3472 3471 def badfn(path, msg):
3473 3472 if path in names:
3474 3473 return
3475 3474 if path in ctx.substate:
3476 3475 return
3477 3476 path_ = path + '/'
3478 3477 for f in names:
3479 3478 if f.startswith(path_):
3480 3479 return
3481 3480 ui.warn("%s: %s\n" % (m.rel(path), msg))
3482 3481
3483 3482 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3484 3483 if abs not in names:
3485 3484 names[abs] = m.rel(abs), m.exact(abs)
3486 3485
3487 3486 # Find status of all file in `names`.
3488 3487 m = scmutil.matchfiles(repo, names)
3489 3488
3490 3489 changes = repo.status(node1=node, match=m,
3491 3490 unknown=True, ignored=True, clean=True)
3492 3491 else:
3493 3492 changes = repo.status(node1=node, match=m)
3494 3493 for kind in changes:
3495 3494 for abs in kind:
3496 3495 names[abs] = m.rel(abs), m.exact(abs)
3497 3496
3498 3497 m = scmutil.matchfiles(repo, names)
3499 3498
3500 3499 modified = set(changes.modified)
3501 3500 added = set(changes.added)
3502 3501 removed = set(changes.removed)
3503 3502 _deleted = set(changes.deleted)
3504 3503 unknown = set(changes.unknown)
3505 3504 unknown.update(changes.ignored)
3506 3505 clean = set(changes.clean)
3507 3506 modadded = set()
3508 3507
3509 3508 # We need to account for the state of the file in the dirstate,
3510 3509 # even when we revert against something else than parent. This will
3511 3510 # slightly alter the behavior of revert (doing back up or not, delete
3512 3511 # or just forget etc).
3513 3512 if parent == node:
3514 3513 dsmodified = modified
3515 3514 dsadded = added
3516 3515 dsremoved = removed
3517 3516 # store all local modifications, useful later for rename detection
3518 3517 localchanges = dsmodified | dsadded
3519 3518 modified, added, removed = set(), set(), set()
3520 3519 else:
3521 3520 changes = repo.status(node1=parent, match=m)
3522 3521 dsmodified = set(changes.modified)
3523 3522 dsadded = set(changes.added)
3524 3523 dsremoved = set(changes.removed)
3525 3524 # store all local modifications, useful later for rename detection
3526 3525 localchanges = dsmodified | dsadded
3527 3526
3528 3527 # only take into account for removes between wc and target
3529 3528 clean |= dsremoved - removed
3530 3529 dsremoved &= removed
3531 3530 # distinct between dirstate remove and other
3532 3531 removed -= dsremoved
3533 3532
3534 3533 modadded = added & dsmodified
3535 3534 added -= modadded
3536 3535
3537 3536 # tell newly modified apart.
3538 3537 dsmodified &= modified
3539 3538 dsmodified |= modified & dsadded # dirstate added may need backup
3540 3539 modified -= dsmodified
3541 3540
3542 3541 # We need to wait for some post-processing to update this set
3543 3542 # before making the distinction. The dirstate will be used for
3544 3543 # that purpose.
3545 3544 dsadded = added
3546 3545
3547 3546 # in case of merge, files that are actually added can be reported as
3548 3547 # modified, we need to post process the result
3549 3548 if p2 != nullid:
3550 3549 mergeadd = set(dsmodified)
3551 3550 for path in dsmodified:
3552 3551 if path in mf:
3553 3552 mergeadd.remove(path)
3554 3553 dsadded |= mergeadd
3555 3554 dsmodified -= mergeadd
3556 3555
3557 3556 # if f is a rename, update `names` to also revert the source
3558 3557 cwd = repo.getcwd()
3559 3558 for f in localchanges:
3560 3559 src = repo.dirstate.copied(f)
3561 3560 # XXX should we check for rename down to target node?
3562 3561 if src and src not in names and repo.dirstate[src] == 'r':
3563 3562 dsremoved.add(src)
3564 3563 names[src] = (repo.pathto(src, cwd), True)
3565 3564
3566 3565 # determine the exact nature of the deleted changesets
3567 3566 deladded = set(_deleted)
3568 3567 for path in _deleted:
3569 3568 if path in mf:
3570 3569 deladded.remove(path)
3571 3570 deleted = _deleted - deladded
3572 3571
3573 3572 # distinguish between file to forget and the other
3574 3573 added = set()
3575 3574 for abs in dsadded:
3576 3575 if repo.dirstate[abs] != 'a':
3577 3576 added.add(abs)
3578 3577 dsadded -= added
3579 3578
3580 3579 for abs in deladded:
3581 3580 if repo.dirstate[abs] == 'a':
3582 3581 dsadded.add(abs)
3583 3582 deladded -= dsadded
3584 3583
3585 3584 # For files marked as removed, we check if an unknown file is present at
3586 3585 # the same path. If a such file exists it may need to be backed up.
3587 3586 # Making the distinction at this stage helps have simpler backup
3588 3587 # logic.
3589 3588 removunk = set()
3590 3589 for abs in removed:
3591 3590 target = repo.wjoin(abs)
3592 3591 if os.path.lexists(target):
3593 3592 removunk.add(abs)
3594 3593 removed -= removunk
3595 3594
3596 3595 dsremovunk = set()
3597 3596 for abs in dsremoved:
3598 3597 target = repo.wjoin(abs)
3599 3598 if os.path.lexists(target):
3600 3599 dsremovunk.add(abs)
3601 3600 dsremoved -= dsremovunk
3602 3601
3603 3602 # action to be actually performed by revert
3604 3603 # (<list of file>, message>) tuple
3605 3604 actions = {'revert': ([], _('reverting %s\n')),
3606 3605 'add': ([], _('adding %s\n')),
3607 3606 'remove': ([], _('removing %s\n')),
3608 3607 'drop': ([], _('removing %s\n')),
3609 3608 'forget': ([], _('forgetting %s\n')),
3610 3609 'undelete': ([], _('undeleting %s\n')),
3611 3610 'noop': (None, _('no changes needed to %s\n')),
3612 3611 'unknown': (None, _('file not managed: %s\n')),
3613 3612 }
3614 3613
3615 3614 # "constant" that convey the backup strategy.
3616 3615 # All set to `discard` if `no-backup` is set do avoid checking
3617 3616 # no_backup lower in the code.
3618 3617 # These values are ordered for comparison purposes
3619 3618 backupinteractive = 3 # do backup if interactively modified
3620 3619 backup = 2 # unconditionally do backup
3621 3620 check = 1 # check if the existing file differs from target
3622 3621 discard = 0 # never do backup
3623 3622 if opts.get('no_backup'):
3624 3623 backupinteractive = backup = check = discard
3625 3624 if interactive:
3626 3625 dsmodifiedbackup = backupinteractive
3627 3626 else:
3628 3627 dsmodifiedbackup = backup
3629 3628 tobackup = set()
3630 3629
3631 3630 backupanddel = actions['remove']
3632 3631 if not opts.get('no_backup'):
3633 3632 backupanddel = actions['drop']
3634 3633
3635 3634 disptable = (
3636 3635 # dispatch table:
3637 3636 # file state
3638 3637 # action
3639 3638 # make backup
3640 3639
3641 3640 ## Sets that results that will change file on disk
3642 3641 # Modified compared to target, no local change
3643 3642 (modified, actions['revert'], discard),
3644 3643 # Modified compared to target, but local file is deleted
3645 3644 (deleted, actions['revert'], discard),
3646 3645 # Modified compared to target, local change
3647 3646 (dsmodified, actions['revert'], dsmodifiedbackup),
3648 3647 # Added since target
3649 3648 (added, actions['remove'], discard),
3650 3649 # Added in working directory
3651 3650 (dsadded, actions['forget'], discard),
3652 3651 # Added since target, have local modification
3653 3652 (modadded, backupanddel, backup),
3654 3653 # Added since target but file is missing in working directory
3655 3654 (deladded, actions['drop'], discard),
3656 3655 # Removed since target, before working copy parent
3657 3656 (removed, actions['add'], discard),
3658 3657 # Same as `removed` but an unknown file exists at the same path
3659 3658 (removunk, actions['add'], check),
3660 3659 # Removed since targe, marked as such in working copy parent
3661 3660 (dsremoved, actions['undelete'], discard),
3662 3661 # Same as `dsremoved` but an unknown file exists at the same path
3663 3662 (dsremovunk, actions['undelete'], check),
3664 3663 ## the following sets does not result in any file changes
3665 3664 # File with no modification
3666 3665 (clean, actions['noop'], discard),
3667 3666 # Existing file, not tracked anywhere
3668 3667 (unknown, actions['unknown'], discard),
3669 3668 )
3670 3669
3671 3670 for abs, (rel, exact) in sorted(names.items()):
3672 3671 # target file to be touch on disk (relative to cwd)
3673 3672 target = repo.wjoin(abs)
3674 3673 # search the entry in the dispatch table.
3675 3674 # if the file is in any of these sets, it was touched in the working
3676 3675 # directory parent and we are sure it needs to be reverted.
3677 3676 for table, (xlist, msg), dobackup in disptable:
3678 3677 if abs not in table:
3679 3678 continue
3680 3679 if xlist is not None:
3681 3680 xlist.append(abs)
3682 3681 if dobackup:
3683 3682 # If in interactive mode, don't automatically create
3684 3683 # .orig files (issue4793)
3685 3684 if dobackup == backupinteractive:
3686 3685 tobackup.add(abs)
3687 3686 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3688 3687 bakname = scmutil.origpath(ui, repo, rel)
3689 3688 ui.note(_('saving current version of %s as %s\n') %
3690 3689 (rel, bakname))
3691 3690 if not opts.get('dry_run'):
3692 3691 if interactive:
3693 3692 util.copyfile(target, bakname)
3694 3693 else:
3695 3694 util.rename(target, bakname)
3696 3695 if ui.verbose or not exact:
3697 3696 if not isinstance(msg, bytes):
3698 3697 msg = msg(abs)
3699 3698 ui.status(msg % rel)
3700 3699 elif exact:
3701 3700 ui.warn(msg % rel)
3702 3701 break
3703 3702
3704 3703 if not opts.get('dry_run'):
3705 3704 needdata = ('revert', 'add', 'undelete')
3706 3705 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3707 3706 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3708 3707
3709 3708 if targetsubs:
3710 3709 # Revert the subrepos on the revert list
3711 3710 for sub in targetsubs:
3712 3711 try:
3713 3712 wctx.sub(sub).revert(ctx.substate[sub], *pats,
3714 3713 **pycompat.strkwargs(opts))
3715 3714 except KeyError:
3716 3715 raise error.Abort("subrepository '%s' does not exist in %s!"
3717 3716 % (sub, short(ctx.node())))
3718 3717
3719 3718 def _revertprefetch(repo, ctx, *files):
3720 3719 """Let extension changing the storage layer prefetch content"""
3721 3720
3722 3721 def _performrevert(repo, parents, ctx, actions, interactive=False,
3723 3722 tobackup=None):
3724 3723 """function that actually perform all the actions computed for revert
3725 3724
3726 3725 This is an independent function to let extension to plug in and react to
3727 3726 the imminent revert.
3728 3727
3729 3728 Make sure you have the working directory locked when calling this function.
3730 3729 """
3731 3730 parent, p2 = parents
3732 3731 node = ctx.node()
3733 3732 excluded_files = []
3734 3733 matcher_opts = {"exclude": excluded_files}
3735 3734
3736 3735 def checkout(f):
3737 3736 fc = ctx[f]
3738 3737 repo.wwrite(f, fc.data(), fc.flags())
3739 3738
3740 3739 def doremove(f):
3741 3740 try:
3742 3741 repo.wvfs.unlinkpath(f)
3743 3742 except OSError:
3744 3743 pass
3745 3744 repo.dirstate.remove(f)
3746 3745
3747 3746 audit_path = pathutil.pathauditor(repo.root, cached=True)
3748 3747 for f in actions['forget'][0]:
3749 3748 if interactive:
3750 3749 choice = repo.ui.promptchoice(
3751 3750 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3752 3751 if choice == 0:
3753 3752 repo.dirstate.drop(f)
3754 3753 else:
3755 3754 excluded_files.append(repo.wjoin(f))
3756 3755 else:
3757 3756 repo.dirstate.drop(f)
3758 3757 for f in actions['remove'][0]:
3759 3758 audit_path(f)
3760 3759 if interactive:
3761 3760 choice = repo.ui.promptchoice(
3762 3761 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3763 3762 if choice == 0:
3764 3763 doremove(f)
3765 3764 else:
3766 3765 excluded_files.append(repo.wjoin(f))
3767 3766 else:
3768 3767 doremove(f)
3769 3768 for f in actions['drop'][0]:
3770 3769 audit_path(f)
3771 3770 repo.dirstate.remove(f)
3772 3771
3773 3772 normal = None
3774 3773 if node == parent:
3775 3774 # We're reverting to our parent. If possible, we'd like status
3776 3775 # to report the file as clean. We have to use normallookup for
3777 3776 # merges to avoid losing information about merged/dirty files.
3778 3777 if p2 != nullid:
3779 3778 normal = repo.dirstate.normallookup
3780 3779 else:
3781 3780 normal = repo.dirstate.normal
3782 3781
3783 3782 newlyaddedandmodifiedfiles = set()
3784 3783 if interactive:
3785 3784 # Prompt the user for changes to revert
3786 3785 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3787 3786 m = scmutil.match(ctx, torevert, matcher_opts)
3788 3787 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3789 3788 diffopts.nodates = True
3790 3789 diffopts.git = True
3791 3790 operation = 'discard'
3792 3791 reversehunks = True
3793 3792 if node != parent:
3794 3793 operation = 'apply'
3795 3794 reversehunks = False
3796 3795 if reversehunks:
3797 3796 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3798 3797 else:
3799 3798 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3800 3799 originalchunks = patch.parsepatch(diff)
3801 3800
3802 3801 try:
3803 3802
3804 3803 chunks, opts = recordfilter(repo.ui, originalchunks,
3805 3804 operation=operation)
3806 3805 if reversehunks:
3807 3806 chunks = patch.reversehunks(chunks)
3808 3807
3809 3808 except error.PatchError as err:
3810 3809 raise error.Abort(_('error parsing patch: %s') % err)
3811 3810
3812 3811 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3813 3812 if tobackup is None:
3814 3813 tobackup = set()
3815 3814 # Apply changes
3816 3815 fp = stringio()
3817 3816 for c in chunks:
3818 3817 # Create a backup file only if this hunk should be backed up
3819 3818 if ishunk(c) and c.header.filename() in tobackup:
3820 3819 abs = c.header.filename()
3821 3820 target = repo.wjoin(abs)
3822 3821 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3823 3822 util.copyfile(target, bakname)
3824 3823 tobackup.remove(abs)
3825 3824 c.write(fp)
3826 3825 dopatch = fp.tell()
3827 3826 fp.seek(0)
3828 3827 if dopatch:
3829 3828 try:
3830 3829 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3831 3830 except error.PatchError as err:
3832 3831 raise error.Abort(str(err))
3833 3832 del fp
3834 3833 else:
3835 3834 for f in actions['revert'][0]:
3836 3835 checkout(f)
3837 3836 if normal:
3838 3837 normal(f)
3839 3838
3840 3839 for f in actions['add'][0]:
3841 3840 # Don't checkout modified files, they are already created by the diff
3842 3841 if f not in newlyaddedandmodifiedfiles:
3843 3842 checkout(f)
3844 3843 repo.dirstate.add(f)
3845 3844
3846 3845 normal = repo.dirstate.normallookup
3847 3846 if node == parent and p2 == nullid:
3848 3847 normal = repo.dirstate.normal
3849 3848 for f in actions['undelete'][0]:
3850 3849 checkout(f)
3851 3850 normal(f)
3852 3851
3853 3852 copied = copies.pathcopies(repo[parent], ctx)
3854 3853
3855 3854 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3856 3855 if f in copied:
3857 3856 repo.dirstate.copy(copied[f], f)
3858 3857
3859 3858 class command(registrar.command):
3860 3859 """deprecated: used registrar.command instead"""
3861 3860 def _doregister(self, func, name, *args, **kwargs):
3862 3861 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3863 3862 return super(command, self)._doregister(func, name, *args, **kwargs)
3864 3863
3865 3864 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3866 3865 # commands.outgoing. "missing" is "missing" of the result of
3867 3866 # "findcommonoutgoing()"
3868 3867 outgoinghooks = util.hooks()
3869 3868
3870 3869 # a list of (ui, repo) functions called by commands.summary
3871 3870 summaryhooks = util.hooks()
3872 3871
3873 3872 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3874 3873 #
3875 3874 # functions should return tuple of booleans below, if 'changes' is None:
3876 3875 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3877 3876 #
3878 3877 # otherwise, 'changes' is a tuple of tuples below:
3879 3878 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3880 3879 # - (desturl, destbranch, destpeer, outgoing)
3881 3880 summaryremotehooks = util.hooks()
3882 3881
3883 3882 # A list of state files kept by multistep operations like graft.
3884 3883 # Since graft cannot be aborted, it is considered 'clearable' by update.
3885 3884 # note: bisect is intentionally excluded
3886 3885 # (state file, clearable, allowcommit, error, hint)
3887 3886 unfinishedstates = [
3888 3887 ('graftstate', True, False, _('graft in progress'),
3889 3888 _("use 'hg graft --continue' or 'hg update' to abort")),
3890 3889 ('updatestate', True, False, _('last update was interrupted'),
3891 3890 _("use 'hg update' to get a consistent checkout"))
3892 3891 ]
3893 3892
3894 3893 def checkunfinished(repo, commit=False):
3895 3894 '''Look for an unfinished multistep operation, like graft, and abort
3896 3895 if found. It's probably good to check this right before
3897 3896 bailifchanged().
3898 3897 '''
3899 3898 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3900 3899 if commit and allowcommit:
3901 3900 continue
3902 3901 if repo.vfs.exists(f):
3903 3902 raise error.Abort(msg, hint=hint)
3904 3903
3905 3904 def clearunfinished(repo):
3906 3905 '''Check for unfinished operations (as above), and clear the ones
3907 3906 that are clearable.
3908 3907 '''
3909 3908 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3910 3909 if not clearable and repo.vfs.exists(f):
3911 3910 raise error.Abort(msg, hint=hint)
3912 3911 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3913 3912 if clearable and repo.vfs.exists(f):
3914 3913 util.unlink(repo.vfs.join(f))
3915 3914
3916 3915 afterresolvedstates = [
3917 3916 ('graftstate',
3918 3917 _('hg graft --continue')),
3919 3918 ]
3920 3919
3921 3920 def howtocontinue(repo):
3922 3921 '''Check for an unfinished operation and return the command to finish
3923 3922 it.
3924 3923
3925 3924 afterresolvedstates tuples define a .hg/{file} and the corresponding
3926 3925 command needed to finish it.
3927 3926
3928 3927 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3929 3928 a boolean.
3930 3929 '''
3931 3930 contmsg = _("continue: %s")
3932 3931 for f, msg in afterresolvedstates:
3933 3932 if repo.vfs.exists(f):
3934 3933 return contmsg % msg, True
3935 3934 if repo[None].dirty(missing=True, merge=False, branch=False):
3936 3935 return contmsg % _("hg commit"), False
3937 3936 return None, None
3938 3937
3939 3938 def checkafterresolved(repo):
3940 3939 '''Inform the user about the next action after completing hg resolve
3941 3940
3942 3941 If there's a matching afterresolvedstates, howtocontinue will yield
3943 3942 repo.ui.warn as the reporter.
3944 3943
3945 3944 Otherwise, it will yield repo.ui.note.
3946 3945 '''
3947 3946 msg, warning = howtocontinue(repo)
3948 3947 if msg is not None:
3949 3948 if warning:
3950 3949 repo.ui.warn("%s\n" % msg)
3951 3950 else:
3952 3951 repo.ui.note("%s\n" % msg)
3953 3952
3954 3953 def wrongtooltocontinue(repo, task):
3955 3954 '''Raise an abort suggesting how to properly continue if there is an
3956 3955 active task.
3957 3956
3958 3957 Uses howtocontinue() to find the active task.
3959 3958
3960 3959 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3961 3960 a hint.
3962 3961 '''
3963 3962 after = howtocontinue(repo)
3964 3963 hint = None
3965 3964 if after[1]:
3966 3965 hint = after[0]
3967 3966 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,838 +1,838
1 1 # filemerge.py - file-level merge handling for Mercurial
2 2 #
3 3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import os
11 11 import re
12 12 import tempfile
13 13
14 14 from .i18n import _
15 15 from .node import nullid, short
16 16
17 17 from . import (
18 18 encoding,
19 19 error,
20 20 formatter,
21 21 match,
22 22 pycompat,
23 23 registrar,
24 24 scmutil,
25 25 simplemerge,
26 26 tagmerge,
27 27 templatekw,
28 28 templater,
29 29 util,
30 30 )
31 31
32 32 def _toolstr(ui, tool, part, *args):
33 33 return ui.config("merge-tools", tool + "." + part, *args)
34 34
35 35 def _toolbool(ui, tool, part,*args):
36 36 return ui.configbool("merge-tools", tool + "." + part, *args)
37 37
38 38 def _toollist(ui, tool, part):
39 39 return ui.configlist("merge-tools", tool + "." + part)
40 40
41 41 internals = {}
42 42 # Merge tools to document.
43 43 internalsdoc = {}
44 44
45 45 internaltool = registrar.internalmerge()
46 46
47 47 # internal tool merge types
48 48 nomerge = internaltool.nomerge
49 49 mergeonly = internaltool.mergeonly # just the full merge, no premerge
50 50 fullmerge = internaltool.fullmerge # both premerge and merge
51 51
52 52 _localchangedotherdeletedmsg = _(
53 53 "local%(l)s changed %(fd)s which other%(o)s deleted\n"
54 54 "use (c)hanged version, (d)elete, or leave (u)nresolved?"
55 55 "$$ &Changed $$ &Delete $$ &Unresolved")
56 56
57 57 _otherchangedlocaldeletedmsg = _(
58 58 "other%(o)s changed %(fd)s which local%(l)s deleted\n"
59 59 "use (c)hanged version, leave (d)eleted, or "
60 60 "leave (u)nresolved?"
61 61 "$$ &Changed $$ &Deleted $$ &Unresolved")
62 62
63 63 class absentfilectx(object):
64 64 """Represents a file that's ostensibly in a context but is actually not
65 65 present in it.
66 66
67 67 This is here because it's very specific to the filemerge code for now --
68 68 other code is likely going to break with the values this returns."""
69 69 def __init__(self, ctx, f):
70 70 self._ctx = ctx
71 71 self._f = f
72 72
73 73 def path(self):
74 74 return self._f
75 75
76 76 def size(self):
77 77 return None
78 78
79 79 def data(self):
80 80 return None
81 81
82 82 def filenode(self):
83 83 return nullid
84 84
85 85 _customcmp = True
86 86 def cmp(self, fctx):
87 87 """compare with other file context
88 88
89 89 returns True if different from fctx.
90 90 """
91 91 return not (fctx.isabsent() and
92 92 fctx.ctx() == self.ctx() and
93 93 fctx.path() == self.path())
94 94
95 95 def flags(self):
96 96 return ''
97 97
98 98 def changectx(self):
99 99 return self._ctx
100 100
101 101 def isbinary(self):
102 102 return False
103 103
104 104 def isabsent(self):
105 105 return True
106 106
107 107 def _findtool(ui, tool):
108 108 if tool in internals:
109 109 return tool
110 110 return findexternaltool(ui, tool)
111 111
112 112 def findexternaltool(ui, tool):
113 113 for kn in ("regkey", "regkeyalt"):
114 114 k = _toolstr(ui, tool, kn)
115 115 if not k:
116 116 continue
117 117 p = util.lookupreg(k, _toolstr(ui, tool, "regname"))
118 118 if p:
119 119 p = util.findexe(p + _toolstr(ui, tool, "regappend", ""))
120 120 if p:
121 121 return p
122 122 exe = _toolstr(ui, tool, "executable", tool)
123 123 return util.findexe(util.expandpath(exe))
124 124
125 125 def _picktool(repo, ui, path, binary, symlink, changedelete):
126 126 def supportscd(tool):
127 127 return tool in internals and internals[tool].mergetype == nomerge
128 128
129 129 def check(tool, pat, symlink, binary, changedelete):
130 130 tmsg = tool
131 131 if pat:
132 132 tmsg = _("%s (for pattern %s)") % (tool, pat)
133 133 if not _findtool(ui, tool):
134 134 if pat: # explicitly requested tool deserves a warning
135 135 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
136 136 else: # configured but non-existing tools are more silent
137 137 ui.note(_("couldn't find merge tool %s\n") % tmsg)
138 138 elif symlink and not _toolbool(ui, tool, "symlink"):
139 139 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
140 140 elif binary and not _toolbool(ui, tool, "binary"):
141 141 ui.warn(_("tool %s can't handle binary\n") % tmsg)
142 142 elif changedelete and not supportscd(tool):
143 143 # the nomerge tools are the only tools that support change/delete
144 144 # conflicts
145 145 pass
146 146 elif not util.gui() and _toolbool(ui, tool, "gui"):
147 147 ui.warn(_("tool %s requires a GUI\n") % tmsg)
148 148 else:
149 149 return True
150 150 return False
151 151
152 152 # internal config: ui.forcemerge
153 153 # forcemerge comes from command line arguments, highest priority
154 154 force = ui.config('ui', 'forcemerge')
155 155 if force:
156 156 toolpath = _findtool(ui, force)
157 157 if changedelete and not supportscd(toolpath):
158 158 return ":prompt", None
159 159 else:
160 160 if toolpath:
161 161 return (force, util.shellquote(toolpath))
162 162 else:
163 163 # mimic HGMERGE if given tool not found
164 164 return (force, force)
165 165
166 166 # HGMERGE takes next precedence
167 167 hgmerge = encoding.environ.get("HGMERGE")
168 168 if hgmerge:
169 169 if changedelete and not supportscd(hgmerge):
170 170 return ":prompt", None
171 171 else:
172 172 return (hgmerge, hgmerge)
173 173
174 174 # then patterns
175 175 for pat, tool in ui.configitems("merge-patterns"):
176 176 mf = match.match(repo.root, '', [pat])
177 177 if mf(path) and check(tool, pat, symlink, False, changedelete):
178 178 toolpath = _findtool(ui, tool)
179 179 return (tool, util.shellquote(toolpath))
180 180
181 181 # then merge tools
182 182 tools = {}
183 183 disabled = set()
184 184 for k, v in ui.configitems("merge-tools"):
185 185 t = k.split('.')[0]
186 186 if t not in tools:
187 187 tools[t] = int(_toolstr(ui, t, "priority"))
188 188 if _toolbool(ui, t, "disabled"):
189 189 disabled.add(t)
190 190 names = tools.keys()
191 191 tools = sorted([(-p, tool) for tool, p in tools.items()
192 192 if tool not in disabled])
193 193 uimerge = ui.config("ui", "merge")
194 194 if uimerge:
195 195 # external tools defined in uimerge won't be able to handle
196 196 # change/delete conflicts
197 197 if uimerge not in names and not changedelete:
198 198 return (uimerge, uimerge)
199 199 tools.insert(0, (None, uimerge)) # highest priority
200 200 tools.append((None, "hgmerge")) # the old default, if found
201 201 for p, t in tools:
202 202 if check(t, None, symlink, binary, changedelete):
203 203 toolpath = _findtool(ui, t)
204 204 return (t, util.shellquote(toolpath))
205 205
206 206 # internal merge or prompt as last resort
207 207 if symlink or binary or changedelete:
208 208 if not changedelete and len(tools):
209 209 # any tool is rejected by capability for symlink or binary
210 210 ui.warn(_("no tool found to merge %s\n") % path)
211 211 return ":prompt", None
212 212 return ":merge", None
213 213
214 214 def _eoltype(data):
215 215 "Guess the EOL type of a file"
216 216 if '\0' in data: # binary
217 217 return None
218 218 if '\r\n' in data: # Windows
219 219 return '\r\n'
220 220 if '\r' in data: # Old Mac
221 221 return '\r'
222 222 if '\n' in data: # UNIX
223 223 return '\n'
224 224 return None # unknown
225 225
226 226 def _matcheol(file, back):
227 227 "Convert EOL markers in a file to match origfile"
228 228 tostyle = _eoltype(back.data()) # No repo.wread filters?
229 229 if tostyle:
230 230 data = util.readfile(file)
231 231 style = _eoltype(data)
232 232 if style:
233 233 newdata = data.replace(style, tostyle)
234 234 if newdata != data:
235 235 util.writefile(file, newdata)
236 236
237 237 @internaltool('prompt', nomerge)
238 238 def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
239 239 """Asks the user which of the local `p1()` or the other `p2()` version to
240 240 keep as the merged version."""
241 241 ui = repo.ui
242 242 fd = fcd.path()
243 243
244 244 # Avoid prompting during an in-memory merge since it doesn't support merge
245 245 # conflicts.
246 246 if fcd.changectx().isinmemory():
247 247 raise error.InMemoryMergeConflictsError('in-memory merge does not '
248 248 'support file conflicts')
249 249
250 250 prompts = partextras(labels)
251 251 prompts['fd'] = fd
252 252 try:
253 253 if fco.isabsent():
254 254 index = ui.promptchoice(
255 255 _localchangedotherdeletedmsg % prompts, 2)
256 256 choice = ['local', 'other', 'unresolved'][index]
257 257 elif fcd.isabsent():
258 258 index = ui.promptchoice(
259 259 _otherchangedlocaldeletedmsg % prompts, 2)
260 260 choice = ['other', 'local', 'unresolved'][index]
261 261 else:
262 262 index = ui.promptchoice(
263 263 _("keep (l)ocal%(l)s, take (o)ther%(o)s, or leave (u)nresolved"
264 264 " for %(fd)s?"
265 265 "$$ &Local $$ &Other $$ &Unresolved") % prompts, 2)
266 266 choice = ['local', 'other', 'unresolved'][index]
267 267
268 268 if choice == 'other':
269 269 return _iother(repo, mynode, orig, fcd, fco, fca, toolconf,
270 270 labels)
271 271 elif choice == 'local':
272 272 return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf,
273 273 labels)
274 274 elif choice == 'unresolved':
275 275 return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf,
276 276 labels)
277 277 except error.ResponseExpected:
278 278 ui.write("\n")
279 279 return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf,
280 280 labels)
281 281
282 282 @internaltool('local', nomerge)
283 283 def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
284 284 """Uses the local `p1()` version of files as the merged version."""
285 285 return 0, fcd.isabsent()
286 286
287 287 @internaltool('other', nomerge)
288 288 def _iother(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
289 289 """Uses the other `p2()` version of files as the merged version."""
290 290 if fco.isabsent():
291 291 # local changed, remote deleted -- 'deleted' picked
292 292 _underlyingfctxifabsent(fcd).remove()
293 293 deleted = True
294 294 else:
295 295 _underlyingfctxifabsent(fcd).write(fco.data(), fco.flags())
296 296 deleted = False
297 297 return 0, deleted
298 298
299 299 @internaltool('fail', nomerge)
300 300 def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
301 301 """
302 302 Rather than attempting to merge files that were modified on both
303 303 branches, it marks them as unresolved. The resolve command must be
304 304 used to resolve these conflicts."""
305 305 # for change/delete conflicts write out the changed version, then fail
306 306 if fcd.isabsent():
307 307 _underlyingfctxifabsent(fcd).write(fco.data(), fco.flags())
308 308 return 1, False
309 309
310 310 def _underlyingfctxifabsent(filectx):
311 311 """Sometimes when resolving, our fcd is actually an absentfilectx, but
312 312 we want to write to it (to do the resolve). This helper returns the
313 313 underyling workingfilectx in that case.
314 314 """
315 315 if filectx.isabsent():
316 316 return filectx.changectx()[filectx.path()]
317 317 else:
318 318 return filectx
319 319
320 320 def _premerge(repo, fcd, fco, fca, toolconf, files, labels=None):
321 321 tool, toolpath, binary, symlink = toolconf
322 322 if symlink or fcd.isabsent() or fco.isabsent():
323 323 return 1
324 324 unused, unused, unused, back = files
325 325
326 326 ui = repo.ui
327 327
328 328 validkeep = ['keep', 'keep-merge3']
329 329
330 330 # do we attempt to simplemerge first?
331 331 try:
332 332 premerge = _toolbool(ui, tool, "premerge", not binary)
333 333 except error.ConfigError:
334 334 premerge = _toolstr(ui, tool, "premerge", "").lower()
335 335 if premerge not in validkeep:
336 336 _valid = ', '.join(["'" + v + "'" for v in validkeep])
337 337 raise error.ConfigError(_("%s.premerge not valid "
338 338 "('%s' is neither boolean nor %s)") %
339 339 (tool, premerge, _valid))
340 340
341 341 if premerge:
342 342 if premerge == 'keep-merge3':
343 343 if not labels:
344 344 labels = _defaultconflictlabels
345 345 if len(labels) < 3:
346 346 labels.append('base')
347 347 r = simplemerge.simplemerge(ui, fcd, fca, fco, quiet=True, label=labels)
348 348 if not r:
349 349 ui.debug(" premerge successful\n")
350 350 return 0
351 351 if premerge not in validkeep:
352 352 # restore from backup and try again
353 353 _restorebackup(fcd, back)
354 354 return 1 # continue merging
355 355
356 356 def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf):
357 357 tool, toolpath, binary, symlink = toolconf
358 358 if symlink:
359 359 repo.ui.warn(_('warning: internal %s cannot merge symlinks '
360 360 'for %s\n') % (tool, fcd.path()))
361 361 return False
362 362 if fcd.isabsent() or fco.isabsent():
363 363 repo.ui.warn(_('warning: internal %s cannot merge change/delete '
364 364 'conflict for %s\n') % (tool, fcd.path()))
365 365 return False
366 366 return True
367 367
368 368 def _merge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, mode):
369 369 """
370 370 Uses the internal non-interactive simple merge algorithm for merging
371 371 files. It will fail if there are any conflicts and leave markers in
372 372 the partially merged file. Markers will have two sections, one for each side
373 373 of merge, unless mode equals 'union' which suppresses the markers."""
374 374 ui = repo.ui
375 375
376 376 r = simplemerge.simplemerge(ui, fcd, fca, fco, label=labels, mode=mode)
377 377 return True, r, False
378 378
379 379 @internaltool('union', fullmerge,
380 380 _("warning: conflicts while merging %s! "
381 381 "(edit, then use 'hg resolve --mark')\n"),
382 382 precheck=_mergecheck)
383 383 def _iunion(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
384 384 """
385 385 Uses the internal non-interactive simple merge algorithm for merging
386 386 files. It will use both left and right sides for conflict regions.
387 387 No markers are inserted."""
388 388 return _merge(repo, mynode, orig, fcd, fco, fca, toolconf,
389 389 files, labels, 'union')
390 390
391 391 @internaltool('merge', fullmerge,
392 392 _("warning: conflicts while merging %s! "
393 393 "(edit, then use 'hg resolve --mark')\n"),
394 394 precheck=_mergecheck)
395 395 def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
396 396 """
397 397 Uses the internal non-interactive simple merge algorithm for merging
398 398 files. It will fail if there are any conflicts and leave markers in
399 399 the partially merged file. Markers will have two sections, one for each side
400 400 of merge."""
401 401 return _merge(repo, mynode, orig, fcd, fco, fca, toolconf,
402 402 files, labels, 'merge')
403 403
404 404 @internaltool('merge3', fullmerge,
405 405 _("warning: conflicts while merging %s! "
406 406 "(edit, then use 'hg resolve --mark')\n"),
407 407 precheck=_mergecheck)
408 408 def _imerge3(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
409 409 """
410 410 Uses the internal non-interactive simple merge algorithm for merging
411 411 files. It will fail if there are any conflicts and leave markers in
412 412 the partially merged file. Marker will have three sections, one from each
413 413 side of the merge and one for the base content."""
414 414 if not labels:
415 415 labels = _defaultconflictlabels
416 416 if len(labels) < 3:
417 417 labels.append('base')
418 418 return _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels)
419 419
420 420 def _imergeauto(repo, mynode, orig, fcd, fco, fca, toolconf, files,
421 421 labels=None, localorother=None):
422 422 """
423 423 Generic driver for _imergelocal and _imergeother
424 424 """
425 425 assert localorother is not None
426 426 tool, toolpath, binary, symlink = toolconf
427 427 r = simplemerge.simplemerge(repo.ui, fcd, fca, fco, label=labels,
428 428 localorother=localorother)
429 429 return True, r
430 430
431 431 @internaltool('merge-local', mergeonly, precheck=_mergecheck)
432 432 def _imergelocal(*args, **kwargs):
433 433 """
434 434 Like :merge, but resolve all conflicts non-interactively in favor
435 435 of the local `p1()` changes."""
436 436 success, status = _imergeauto(localorother='local', *args, **kwargs)
437 437 return success, status, False
438 438
439 439 @internaltool('merge-other', mergeonly, precheck=_mergecheck)
440 440 def _imergeother(*args, **kwargs):
441 441 """
442 442 Like :merge, but resolve all conflicts non-interactively in favor
443 443 of the other `p2()` changes."""
444 444 success, status = _imergeauto(localorother='other', *args, **kwargs)
445 445 return success, status, False
446 446
447 447 @internaltool('tagmerge', mergeonly,
448 448 _("automatic tag merging of %s failed! "
449 449 "(use 'hg resolve --tool :merge' or another merge "
450 450 "tool of your choice)\n"))
451 451 def _itagmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
452 452 """
453 453 Uses the internal tag merge algorithm (experimental).
454 454 """
455 455 success, status = tagmerge.merge(repo, fcd, fco, fca)
456 456 return success, status, False
457 457
458 458 @internaltool('dump', fullmerge)
459 459 def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
460 460 """
461 461 Creates three versions of the files to merge, containing the
462 462 contents of local, other and base. These files can then be used to
463 463 perform a merge manually. If the file to be merged is named
464 464 ``a.txt``, these files will accordingly be named ``a.txt.local``,
465 465 ``a.txt.other`` and ``a.txt.base`` and they will be placed in the
466 466 same directory as ``a.txt``.
467 467
468 468 This implies premerge. Therefore, files aren't dumped, if premerge
469 469 runs successfully. Use :forcedump to forcibly write files out.
470 470 """
471 471 a = _workingpath(repo, fcd)
472 472 fd = fcd.path()
473 473
474 474 from . import context
475 475 if isinstance(fcd, context.overlayworkingfilectx):
476 476 raise error.InMemoryMergeConflictsError('in-memory merge does not '
477 477 'support the :dump tool.')
478 478
479 479 util.writefile(a + ".local", fcd.decodeddata())
480 480 repo.wwrite(fd + ".other", fco.data(), fco.flags())
481 481 repo.wwrite(fd + ".base", fca.data(), fca.flags())
482 482 return False, 1, False
483 483
484 484 @internaltool('forcedump', mergeonly)
485 485 def _forcedump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
486 486 labels=None):
487 487 """
488 488 Creates three versions of the files as same as :dump, but omits premerge.
489 489 """
490 490 return _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
491 491 labels=labels)
492 492
493 493 def _xmergeimm(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
494 494 # In-memory merge simply raises an exception on all external merge tools,
495 495 # for now.
496 496 #
497 497 # It would be possible to run most tools with temporary files, but this
498 498 # raises the question of what to do if the user only partially resolves the
499 499 # file -- we can't leave a merge state. (Copy to somewhere in the .hg/
500 500 # directory and tell the user how to get it is my best idea, but it's
501 501 # clunky.)
502 502 raise error.InMemoryMergeConflictsError('in-memory merge does not support '
503 503 'external merge tools')
504 504
505 505 def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
506 506 tool, toolpath, binary, symlink = toolconf
507 507 if fcd.isabsent() or fco.isabsent():
508 508 repo.ui.warn(_('warning: %s cannot merge change/delete conflict '
509 509 'for %s\n') % (tool, fcd.path()))
510 510 return False, 1, None
511 511 unused, unused, unused, back = files
512 512 a = _workingpath(repo, fcd)
513 513 b, c = _maketempfiles(repo, fco, fca)
514 514 try:
515 515 out = ""
516 516 env = {'HG_FILE': fcd.path(),
517 517 'HG_MY_NODE': short(mynode),
518 518 'HG_OTHER_NODE': str(fco.changectx()),
519 519 'HG_BASE_NODE': str(fca.changectx()),
520 520 'HG_MY_ISLINK': 'l' in fcd.flags(),
521 521 'HG_OTHER_ISLINK': 'l' in fco.flags(),
522 522 'HG_BASE_ISLINK': 'l' in fca.flags(),
523 523 }
524 524 ui = repo.ui
525 525
526 526 args = _toolstr(ui, tool, "args")
527 527 if "$output" in args:
528 528 # read input from backup, write to original
529 529 out = a
530 530 a = repo.wvfs.join(back.path())
531 531 replace = {'local': a, 'base': b, 'other': c, 'output': out}
532 532 args = util.interpolate(r'\$', replace, args,
533 533 lambda s: util.shellquote(util.localpath(s)))
534 534 cmd = toolpath + ' ' + args
535 535 if _toolbool(ui, tool, "gui"):
536 536 repo.ui.status(_('running merge tool %s for file %s\n') %
537 537 (tool, fcd.path()))
538 538 repo.ui.debug('launching merge tool: %s\n' % cmd)
539 539 r = ui.system(cmd, cwd=repo.root, environ=env, blockedtag='mergetool')
540 540 repo.ui.debug('merge tool returned: %d\n' % r)
541 541 return True, r, False
542 542 finally:
543 543 util.unlink(b)
544 544 util.unlink(c)
545 545
546 546 def _formatconflictmarker(ctx, template, label, pad):
547 547 """Applies the given template to the ctx, prefixed by the label.
548 548
549 549 Pad is the minimum width of the label prefix, so that multiple markers
550 550 can have aligned templated parts.
551 551 """
552 552 if ctx.node() is None:
553 553 ctx = ctx.p1()
554 554
555 props = templatekw.keywords.copy()
556 props['ctx'] = ctx
555 props = {'ctx': ctx}
557 556 templateresult = template.render(props)
558 557
559 558 label = ('%s:' % label).ljust(pad + 1)
560 559 mark = '%s %s' % (label, templateresult)
561 560
562 561 if mark:
563 562 mark = mark.splitlines()[0] # split for safety
564 563
565 564 # 8 for the prefix of conflict marker lines (e.g. '<<<<<<< ')
566 565 return util.ellipsis(mark, 80 - 8)
567 566
568 567 _defaultconflictlabels = ['local', 'other']
569 568
570 569 def _formatlabels(repo, fcd, fco, fca, labels):
571 570 """Formats the given labels using the conflict marker template.
572 571
573 572 Returns a list of formatted labels.
574 573 """
575 574 cd = fcd.changectx()
576 575 co = fco.changectx()
577 576 ca = fca.changectx()
578 577
579 578 ui = repo.ui
580 579 template = ui.config('ui', 'mergemarkertemplate')
581 580 template = templater.unquotestring(template)
582 581 tres = formatter.templateresources(ui, repo)
583 tmpl = formatter.maketemplater(ui, template, resources=tres)
582 tmpl = formatter.maketemplater(ui, template, defaults=templatekw.keywords,
583 resources=tres)
584 584
585 585 pad = max(len(l) for l in labels)
586 586
587 587 newlabels = [_formatconflictmarker(cd, tmpl, labels[0], pad),
588 588 _formatconflictmarker(co, tmpl, labels[1], pad)]
589 589 if len(labels) > 2:
590 590 newlabels.append(_formatconflictmarker(ca, tmpl, labels[2], pad))
591 591 return newlabels
592 592
593 593 def partextras(labels):
594 594 """Return a dictionary of extra labels for use in prompts to the user
595 595
596 596 Intended use is in strings of the form "(l)ocal%(l)s".
597 597 """
598 598 if labels is None:
599 599 return {
600 600 "l": "",
601 601 "o": "",
602 602 }
603 603
604 604 return {
605 605 "l": " [%s]" % labels[0],
606 606 "o": " [%s]" % labels[1],
607 607 }
608 608
609 609 def _restorebackup(fcd, back):
610 610 # TODO: Add a workingfilectx.write(otherfilectx) path so we can use
611 611 # util.copy here instead.
612 612 fcd.write(back.data(), fcd.flags())
613 613
614 614 def _makebackup(repo, ui, wctx, fcd, premerge):
615 615 """Makes and returns a filectx-like object for ``fcd``'s backup file.
616 616
617 617 In addition to preserving the user's pre-existing modifications to `fcd`
618 618 (if any), the backup is used to undo certain premerges, confirm whether a
619 619 merge changed anything, and determine what line endings the new file should
620 620 have.
621 621 """
622 622 if fcd.isabsent():
623 623 return None
624 624 # TODO: Break this import cycle somehow. (filectx -> ctx -> fileset ->
625 625 # merge -> filemerge). (I suspect the fileset import is the weakest link)
626 626 from . import context
627 627 a = _workingpath(repo, fcd)
628 628 back = scmutil.origpath(ui, repo, a)
629 629 inworkingdir = (back.startswith(repo.wvfs.base) and not
630 630 back.startswith(repo.vfs.base))
631 631
632 632 if isinstance(fcd, context.overlayworkingfilectx) and inworkingdir:
633 633 # If the backup file is to be in the working directory, and we're
634 634 # merging in-memory, we must redirect the backup to the memory context
635 635 # so we don't disturb the working directory.
636 636 relpath = back[len(repo.wvfs.base) + 1:]
637 637 wctx[relpath].write(fcd.data(), fcd.flags())
638 638 return wctx[relpath]
639 639 else:
640 640 # Otherwise, write to wherever the user specified the backups should go.
641 641 #
642 642 # A arbitraryfilectx is returned, so we can run the same functions on
643 643 # the backup context regardless of where it lives.
644 644 if premerge:
645 645 util.copyfile(a, back)
646 646 return context.arbitraryfilectx(back, repo=repo)
647 647
648 648 def _maketempfiles(repo, fco, fca):
649 649 """Writes out `fco` and `fca` as temporary files, so an external merge
650 650 tool may use them.
651 651 """
652 652 def temp(prefix, ctx):
653 653 fullbase, ext = os.path.splitext(ctx.path())
654 654 pre = "%s~%s." % (os.path.basename(fullbase), prefix)
655 655 (fd, name) = tempfile.mkstemp(prefix=pre, suffix=ext)
656 656 data = repo.wwritedata(ctx.path(), ctx.data())
657 657 f = os.fdopen(fd, pycompat.sysstr("wb"))
658 658 f.write(data)
659 659 f.close()
660 660 return name
661 661
662 662 b = temp("base", fca)
663 663 c = temp("other", fco)
664 664
665 665 return b, c
666 666
667 667 def _filemerge(premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
668 668 """perform a 3-way merge in the working directory
669 669
670 670 premerge = whether this is a premerge
671 671 mynode = parent node before merge
672 672 orig = original local filename before merge
673 673 fco = other file context
674 674 fca = ancestor file context
675 675 fcd = local file context for current/destination file
676 676
677 677 Returns whether the merge is complete, the return value of the merge, and
678 678 a boolean indicating whether the file was deleted from disk."""
679 679
680 680 if not fco.cmp(fcd): # files identical?
681 681 return True, None, False
682 682
683 683 ui = repo.ui
684 684 fd = fcd.path()
685 685 binary = fcd.isbinary() or fco.isbinary() or fca.isbinary()
686 686 symlink = 'l' in fcd.flags() + fco.flags()
687 687 changedelete = fcd.isabsent() or fco.isabsent()
688 688 tool, toolpath = _picktool(repo, ui, fd, binary, symlink, changedelete)
689 689 if tool in internals and tool.startswith('internal:'):
690 690 # normalize to new-style names (':merge' etc)
691 691 tool = tool[len('internal'):]
692 692 ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n"
693 693 % (tool, fd, pycompat.bytestr(binary), pycompat.bytestr(symlink),
694 694 pycompat.bytestr(changedelete)))
695 695
696 696 if tool in internals:
697 697 func = internals[tool]
698 698 mergetype = func.mergetype
699 699 onfailure = func.onfailure
700 700 precheck = func.precheck
701 701 else:
702 702 if wctx.isinmemory():
703 703 func = _xmergeimm
704 704 else:
705 705 func = _xmerge
706 706 mergetype = fullmerge
707 707 onfailure = _("merging %s failed!\n")
708 708 precheck = None
709 709
710 710 toolconf = tool, toolpath, binary, symlink
711 711
712 712 if mergetype == nomerge:
713 713 r, deleted = func(repo, mynode, orig, fcd, fco, fca, toolconf, labels)
714 714 return True, r, deleted
715 715
716 716 if premerge:
717 717 if orig != fco.path():
718 718 ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
719 719 else:
720 720 ui.status(_("merging %s\n") % fd)
721 721
722 722 ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
723 723
724 724 if precheck and not precheck(repo, mynode, orig, fcd, fco, fca,
725 725 toolconf):
726 726 if onfailure:
727 727 if wctx.isinmemory():
728 728 raise error.InMemoryMergeConflictsError('in-memory merge does '
729 729 'not support merge '
730 730 'conflicts')
731 731 ui.warn(onfailure % fd)
732 732 return True, 1, False
733 733
734 734 back = _makebackup(repo, ui, wctx, fcd, premerge)
735 735 files = (None, None, None, back)
736 736 r = 1
737 737 try:
738 738 markerstyle = ui.config('ui', 'mergemarkers')
739 739 if not labels:
740 740 labels = _defaultconflictlabels
741 741 if markerstyle != 'basic':
742 742 labels = _formatlabels(repo, fcd, fco, fca, labels)
743 743
744 744 if premerge and mergetype == fullmerge:
745 745 r = _premerge(repo, fcd, fco, fca, toolconf, files, labels=labels)
746 746 # complete if premerge successful (r is 0)
747 747 return not r, r, False
748 748
749 749 needcheck, r, deleted = func(repo, mynode, orig, fcd, fco, fca,
750 750 toolconf, files, labels=labels)
751 751
752 752 if needcheck:
753 753 r = _check(repo, r, ui, tool, fcd, files)
754 754
755 755 if r:
756 756 if onfailure:
757 757 if wctx.isinmemory():
758 758 raise error.InMemoryMergeConflictsError('in-memory merge '
759 759 'does not support '
760 760 'merge conflicts')
761 761 ui.warn(onfailure % fd)
762 762 _onfilemergefailure(ui)
763 763
764 764 return True, r, deleted
765 765 finally:
766 766 if not r and back is not None:
767 767 back.remove()
768 768
769 769 def _haltmerge():
770 770 msg = _('merge halted after failed merge (see hg resolve)')
771 771 raise error.InterventionRequired(msg)
772 772
773 773 def _onfilemergefailure(ui):
774 774 action = ui.config('merge', 'on-failure')
775 775 if action == 'prompt':
776 776 msg = _('continue merge operation (yn)?' '$$ &Yes $$ &No')
777 777 if ui.promptchoice(msg, 0) == 1:
778 778 _haltmerge()
779 779 if action == 'halt':
780 780 _haltmerge()
781 781 # default action is 'continue', in which case we neither prompt nor halt
782 782
783 783 def _check(repo, r, ui, tool, fcd, files):
784 784 fd = fcd.path()
785 785 unused, unused, unused, back = files
786 786
787 787 if not r and (_toolbool(ui, tool, "checkconflicts") or
788 788 'conflicts' in _toollist(ui, tool, "check")):
789 789 if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(),
790 790 re.MULTILINE):
791 791 r = 1
792 792
793 793 checked = False
794 794 if 'prompt' in _toollist(ui, tool, "check"):
795 795 checked = True
796 796 if ui.promptchoice(_("was merge of '%s' successful (yn)?"
797 797 "$$ &Yes $$ &No") % fd, 1):
798 798 r = 1
799 799
800 800 if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
801 801 'changed' in
802 802 _toollist(ui, tool, "check")):
803 803 if back is not None and not fcd.cmp(back):
804 804 if ui.promptchoice(_(" output file %s appears unchanged\n"
805 805 "was merge successful (yn)?"
806 806 "$$ &Yes $$ &No") % fd, 1):
807 807 r = 1
808 808
809 809 if back is not None and _toolbool(ui, tool, "fixeol"):
810 810 _matcheol(_workingpath(repo, fcd), back)
811 811
812 812 return r
813 813
814 814 def _workingpath(repo, ctx):
815 815 return repo.wjoin(ctx.path())
816 816
817 817 def premerge(repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
818 818 return _filemerge(True, repo, wctx, mynode, orig, fcd, fco, fca,
819 819 labels=labels)
820 820
821 821 def filemerge(repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
822 822 return _filemerge(False, repo, wctx, mynode, orig, fcd, fco, fca,
823 823 labels=labels)
824 824
825 825 def loadinternalmerge(ui, extname, registrarobj):
826 826 """Load internal merge tool from specified registrarobj
827 827 """
828 828 for name, func in registrarobj._table.iteritems():
829 829 fullname = ':' + name
830 830 internals[fullname] = func
831 831 internals['internal:' + name] = func
832 832 internalsdoc[fullname] = func
833 833
834 834 # load built-in merge tools explicitly to setup internalsdoc
835 835 loadinternalmerge(None, None, internaltool)
836 836
837 837 # tell hggettext to extract docstrings from these functions:
838 838 i18nfunctions = internals.values()
@@ -1,542 +1,544
1 1 # formatter.py - generic output formatting for mercurial
2 2 #
3 3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """Generic output formatting for Mercurial
9 9
10 10 The formatter provides API to show data in various ways. The following
11 11 functions should be used in place of ui.write():
12 12
13 13 - fm.write() for unconditional output
14 14 - fm.condwrite() to show some extra data conditionally in plain output
15 15 - fm.context() to provide changectx to template output
16 16 - fm.data() to provide extra data to JSON or template output
17 17 - fm.plain() to show raw text that isn't provided to JSON or template output
18 18
19 19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
20 20 beforehand so the data is converted to the appropriate data type. Use
21 21 fm.isplain() if you need to convert or format data conditionally which isn't
22 22 supported by the formatter API.
23 23
24 24 To build nested structure (i.e. a list of dicts), use fm.nested().
25 25
26 26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
27 27
28 28 fm.condwrite() vs 'if cond:':
29 29
30 30 In most cases, use fm.condwrite() so users can selectively show the data
31 31 in template output. If it's costly to build data, use plain 'if cond:' with
32 32 fm.write().
33 33
34 34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
35 35
36 36 fm.nested() should be used to form a tree structure (a list of dicts of
37 37 lists of dicts...) which can be accessed through template keywords, e.g.
38 38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
39 39 exports a dict-type object to template, which can be accessed by e.g.
40 40 "{get(foo, key)}" function.
41 41
42 42 Doctest helper:
43 43
44 44 >>> def show(fn, verbose=False, **opts):
45 45 ... import sys
46 46 ... from . import ui as uimod
47 47 ... ui = uimod.ui()
48 48 ... ui.verbose = verbose
49 49 ... ui.pushbuffer()
50 50 ... try:
51 51 ... return fn(ui, ui.formatter(pycompat.sysbytes(fn.__name__),
52 52 ... pycompat.byteskwargs(opts)))
53 53 ... finally:
54 54 ... print(pycompat.sysstr(ui.popbuffer()), end='')
55 55
56 56 Basic example:
57 57
58 58 >>> def files(ui, fm):
59 59 ... files = [(b'foo', 123, (0, 0)), (b'bar', 456, (1, 0))]
60 60 ... for f in files:
61 61 ... fm.startitem()
62 62 ... fm.write(b'path', b'%s', f[0])
63 63 ... fm.condwrite(ui.verbose, b'date', b' %s',
64 64 ... fm.formatdate(f[2], b'%Y-%m-%d %H:%M:%S'))
65 65 ... fm.data(size=f[1])
66 66 ... fm.plain(b'\\n')
67 67 ... fm.end()
68 68 >>> show(files)
69 69 foo
70 70 bar
71 71 >>> show(files, verbose=True)
72 72 foo 1970-01-01 00:00:00
73 73 bar 1970-01-01 00:00:01
74 74 >>> show(files, template=b'json')
75 75 [
76 76 {
77 77 "date": [0, 0],
78 78 "path": "foo",
79 79 "size": 123
80 80 },
81 81 {
82 82 "date": [1, 0],
83 83 "path": "bar",
84 84 "size": 456
85 85 }
86 86 ]
87 87 >>> show(files, template=b'path: {path}\\ndate: {date|rfc3339date}\\n')
88 88 path: foo
89 89 date: 1970-01-01T00:00:00+00:00
90 90 path: bar
91 91 date: 1970-01-01T00:00:01+00:00
92 92
93 93 Nested example:
94 94
95 95 >>> def subrepos(ui, fm):
96 96 ... fm.startitem()
97 97 ... fm.write(b'reponame', b'[%s]\\n', b'baz')
98 98 ... files(ui, fm.nested(b'files'))
99 99 ... fm.end()
100 100 >>> show(subrepos)
101 101 [baz]
102 102 foo
103 103 bar
104 104 >>> show(subrepos, template=b'{reponame}: {join(files % "{path}", ", ")}\\n')
105 105 baz: foo, bar
106 106 """
107 107
108 108 from __future__ import absolute_import, print_function
109 109
110 110 import collections
111 111 import contextlib
112 112 import itertools
113 113 import os
114 114
115 115 from .i18n import _
116 116 from .node import (
117 117 hex,
118 118 short,
119 119 )
120 120
121 121 from . import (
122 122 error,
123 123 pycompat,
124 124 templatefilters,
125 125 templatekw,
126 126 templater,
127 127 util,
128 128 )
129 129
130 130 pickle = util.pickle
131 131
132 132 class _nullconverter(object):
133 133 '''convert non-primitive data types to be processed by formatter'''
134 134
135 135 # set to True if context object should be stored as item
136 136 storecontext = False
137 137
138 138 @staticmethod
139 139 def formatdate(date, fmt):
140 140 '''convert date tuple to appropriate format'''
141 141 return date
142 142 @staticmethod
143 143 def formatdict(data, key, value, fmt, sep):
144 144 '''convert dict or key-value pairs to appropriate dict format'''
145 145 # use plain dict instead of util.sortdict so that data can be
146 146 # serialized as a builtin dict in pickle output
147 147 return dict(data)
148 148 @staticmethod
149 149 def formatlist(data, name, fmt, sep):
150 150 '''convert iterable to appropriate list format'''
151 151 return list(data)
152 152
153 153 class baseformatter(object):
154 154 def __init__(self, ui, topic, opts, converter):
155 155 self._ui = ui
156 156 self._topic = topic
157 157 self._style = opts.get("style")
158 158 self._template = opts.get("template")
159 159 self._converter = converter
160 160 self._item = None
161 161 # function to convert node to string suitable for this output
162 162 self.hexfunc = hex
163 163 def __enter__(self):
164 164 return self
165 165 def __exit__(self, exctype, excvalue, traceback):
166 166 if exctype is None:
167 167 self.end()
168 168 def _showitem(self):
169 169 '''show a formatted item once all data is collected'''
170 170 def startitem(self):
171 171 '''begin an item in the format list'''
172 172 if self._item is not None:
173 173 self._showitem()
174 174 self._item = {}
175 175 def formatdate(self, date, fmt='%a %b %d %H:%M:%S %Y %1%2'):
176 176 '''convert date tuple to appropriate format'''
177 177 return self._converter.formatdate(date, fmt)
178 178 def formatdict(self, data, key='key', value='value', fmt='%s=%s', sep=' '):
179 179 '''convert dict or key-value pairs to appropriate dict format'''
180 180 return self._converter.formatdict(data, key, value, fmt, sep)
181 181 def formatlist(self, data, name, fmt='%s', sep=' '):
182 182 '''convert iterable to appropriate list format'''
183 183 # name is mandatory argument for now, but it could be optional if
184 184 # we have default template keyword, e.g. {item}
185 185 return self._converter.formatlist(data, name, fmt, sep)
186 186 def context(self, **ctxs):
187 187 '''insert context objects to be used to render template keywords'''
188 188 ctxs = pycompat.byteskwargs(ctxs)
189 189 assert all(k == 'ctx' for k in ctxs)
190 190 if self._converter.storecontext:
191 191 self._item.update(ctxs)
192 192 def data(self, **data):
193 193 '''insert data into item that's not shown in default output'''
194 194 data = pycompat.byteskwargs(data)
195 195 self._item.update(data)
196 196 def write(self, fields, deftext, *fielddata, **opts):
197 197 '''do default text output while assigning data to item'''
198 198 fieldkeys = fields.split()
199 199 assert len(fieldkeys) == len(fielddata)
200 200 self._item.update(zip(fieldkeys, fielddata))
201 201 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
202 202 '''do conditional write (primarily for plain formatter)'''
203 203 fieldkeys = fields.split()
204 204 assert len(fieldkeys) == len(fielddata)
205 205 self._item.update(zip(fieldkeys, fielddata))
206 206 def plain(self, text, **opts):
207 207 '''show raw text for non-templated mode'''
208 208 def isplain(self):
209 209 '''check for plain formatter usage'''
210 210 return False
211 211 def nested(self, field):
212 212 '''sub formatter to store nested data in the specified field'''
213 213 self._item[field] = data = []
214 214 return _nestedformatter(self._ui, self._converter, data)
215 215 def end(self):
216 216 '''end output for the formatter'''
217 217 if self._item is not None:
218 218 self._showitem()
219 219
220 220 def nullformatter(ui, topic):
221 221 '''formatter that prints nothing'''
222 222 return baseformatter(ui, topic, opts={}, converter=_nullconverter)
223 223
224 224 class _nestedformatter(baseformatter):
225 225 '''build sub items and store them in the parent formatter'''
226 226 def __init__(self, ui, converter, data):
227 227 baseformatter.__init__(self, ui, topic='', opts={}, converter=converter)
228 228 self._data = data
229 229 def _showitem(self):
230 230 self._data.append(self._item)
231 231
232 232 def _iteritems(data):
233 233 '''iterate key-value pairs in stable order'''
234 234 if isinstance(data, dict):
235 235 return sorted(data.iteritems())
236 236 return data
237 237
238 238 class _plainconverter(object):
239 239 '''convert non-primitive data types to text'''
240 240
241 241 storecontext = False
242 242
243 243 @staticmethod
244 244 def formatdate(date, fmt):
245 245 '''stringify date tuple in the given format'''
246 246 return util.datestr(date, fmt)
247 247 @staticmethod
248 248 def formatdict(data, key, value, fmt, sep):
249 249 '''stringify key-value pairs separated by sep'''
250 250 return sep.join(fmt % (k, v) for k, v in _iteritems(data))
251 251 @staticmethod
252 252 def formatlist(data, name, fmt, sep):
253 253 '''stringify iterable separated by sep'''
254 254 return sep.join(fmt % e for e in data)
255 255
256 256 class plainformatter(baseformatter):
257 257 '''the default text output scheme'''
258 258 def __init__(self, ui, out, topic, opts):
259 259 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
260 260 if ui.debugflag:
261 261 self.hexfunc = hex
262 262 else:
263 263 self.hexfunc = short
264 264 if ui is out:
265 265 self._write = ui.write
266 266 else:
267 267 self._write = lambda s, **opts: out.write(s)
268 268 def startitem(self):
269 269 pass
270 270 def data(self, **data):
271 271 pass
272 272 def write(self, fields, deftext, *fielddata, **opts):
273 273 self._write(deftext % fielddata, **opts)
274 274 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
275 275 '''do conditional write'''
276 276 if cond:
277 277 self._write(deftext % fielddata, **opts)
278 278 def plain(self, text, **opts):
279 279 self._write(text, **opts)
280 280 def isplain(self):
281 281 return True
282 282 def nested(self, field):
283 283 # nested data will be directly written to ui
284 284 return self
285 285 def end(self):
286 286 pass
287 287
288 288 class debugformatter(baseformatter):
289 289 def __init__(self, ui, out, topic, opts):
290 290 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
291 291 self._out = out
292 292 self._out.write("%s = [\n" % self._topic)
293 293 def _showitem(self):
294 294 self._out.write(" " + repr(self._item) + ",\n")
295 295 def end(self):
296 296 baseformatter.end(self)
297 297 self._out.write("]\n")
298 298
299 299 class pickleformatter(baseformatter):
300 300 def __init__(self, ui, out, topic, opts):
301 301 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
302 302 self._out = out
303 303 self._data = []
304 304 def _showitem(self):
305 305 self._data.append(self._item)
306 306 def end(self):
307 307 baseformatter.end(self)
308 308 self._out.write(pickle.dumps(self._data))
309 309
310 310 class jsonformatter(baseformatter):
311 311 def __init__(self, ui, out, topic, opts):
312 312 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
313 313 self._out = out
314 314 self._out.write("[")
315 315 self._first = True
316 316 def _showitem(self):
317 317 if self._first:
318 318 self._first = False
319 319 else:
320 320 self._out.write(",")
321 321
322 322 self._out.write("\n {\n")
323 323 first = True
324 324 for k, v in sorted(self._item.items()):
325 325 if first:
326 326 first = False
327 327 else:
328 328 self._out.write(",\n")
329 329 u = templatefilters.json(v, paranoid=False)
330 330 self._out.write(' "%s": %s' % (k, u))
331 331 self._out.write("\n }")
332 332 def end(self):
333 333 baseformatter.end(self)
334 334 self._out.write("\n]\n")
335 335
336 336 class _templateconverter(object):
337 337 '''convert non-primitive data types to be processed by templater'''
338 338
339 339 storecontext = True
340 340
341 341 @staticmethod
342 342 def formatdate(date, fmt):
343 343 '''return date tuple'''
344 344 return date
345 345 @staticmethod
346 346 def formatdict(data, key, value, fmt, sep):
347 347 '''build object that can be evaluated as either plain string or dict'''
348 348 data = util.sortdict(_iteritems(data))
349 349 def f():
350 350 yield _plainconverter.formatdict(data, key, value, fmt, sep)
351 351 return templatekw.hybriddict(data, key=key, value=value, fmt=fmt, gen=f)
352 352 @staticmethod
353 353 def formatlist(data, name, fmt, sep):
354 354 '''build object that can be evaluated as either plain string or list'''
355 355 data = list(data)
356 356 def f():
357 357 yield _plainconverter.formatlist(data, name, fmt, sep)
358 358 return templatekw.hybridlist(data, name=name, fmt=fmt, gen=f)
359 359
360 360 class templateformatter(baseformatter):
361 361 def __init__(self, ui, out, topic, opts):
362 362 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
363 363 self._out = out
364 364 spec = lookuptemplate(ui, topic, opts.get('template', ''))
365 365 self._tref = spec.ref
366 self._t = loadtemplater(ui, spec, resources=templateresources(ui),
366 self._t = loadtemplater(ui, spec, defaults=templatekw.keywords,
367 resources=templateresources(ui),
367 368 cache=templatekw.defaulttempl)
368 369 self._parts = templatepartsmap(spec, self._t,
369 370 ['docheader', 'docfooter', 'separator'])
370 371 self._counter = itertools.count()
371 372 self._renderitem('docheader', {})
372 373
373 374 def _showitem(self):
374 375 item = self._item.copy()
375 376 item['index'] = index = next(self._counter)
376 377 if index > 0:
377 378 self._renderitem('separator', {})
378 379 self._renderitem(self._tref, item)
379 380
380 381 def _renderitem(self, part, item):
381 382 if part not in self._parts:
382 383 return
383 384 ref = self._parts[part]
384 385
385 386 # TODO: add support for filectx. probably each template keyword or
386 387 # function will have to declare dependent resources. e.g.
387 388 # @templatekeyword(..., requires=('ctx',))
388 389 props = {}
389 if 'ctx' in item:
390 props.update(templatekw.keywords)
391 390 # explicitly-defined fields precede templatekw
392 391 props.update(item)
393 392 if 'ctx' in item:
394 393 # but template resources must be always available
395 394 props['repo'] = props['ctx'].repo()
396 395 props['revcache'] = {}
397 396 props = pycompat.strkwargs(props)
398 397 g = self._t(ref, **props)
399 398 self._out.write(templater.stringify(g))
400 399
401 400 def end(self):
402 401 baseformatter.end(self)
403 402 self._renderitem('docfooter', {})
404 403
405 404 templatespec = collections.namedtuple(r'templatespec',
406 405 r'ref tmpl mapfile')
407 406
408 407 def lookuptemplate(ui, topic, tmpl):
409 408 """Find the template matching the given -T/--template spec 'tmpl'
410 409
411 410 'tmpl' can be any of the following:
412 411
413 412 - a literal template (e.g. '{rev}')
414 413 - a map-file name or path (e.g. 'changelog')
415 414 - a reference to [templates] in config file
416 415 - a path to raw template file
417 416
418 417 A map file defines a stand-alone template environment. If a map file
419 418 selected, all templates defined in the file will be loaded, and the
420 419 template matching the given topic will be rendered. Aliases won't be
421 420 loaded from user config, but from the map file.
422 421
423 422 If no map file selected, all templates in [templates] section will be
424 423 available as well as aliases in [templatealias].
425 424 """
426 425
427 426 # looks like a literal template?
428 427 if '{' in tmpl:
429 428 return templatespec('', tmpl, None)
430 429
431 430 # perhaps a stock style?
432 431 if not os.path.split(tmpl)[0]:
433 432 mapname = (templater.templatepath('map-cmdline.' + tmpl)
434 433 or templater.templatepath(tmpl))
435 434 if mapname and os.path.isfile(mapname):
436 435 return templatespec(topic, None, mapname)
437 436
438 437 # perhaps it's a reference to [templates]
439 438 if ui.config('templates', tmpl):
440 439 return templatespec(tmpl, None, None)
441 440
442 441 if tmpl == 'list':
443 442 ui.write(_("available styles: %s\n") % templater.stylelist())
444 443 raise error.Abort(_("specify a template"))
445 444
446 445 # perhaps it's a path to a map or a template
447 446 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
448 447 # is it a mapfile for a style?
449 448 if os.path.basename(tmpl).startswith("map-"):
450 449 return templatespec(topic, None, os.path.realpath(tmpl))
451 450 with util.posixfile(tmpl, 'rb') as f:
452 451 tmpl = f.read()
453 452 return templatespec('', tmpl, None)
454 453
455 454 # constant string?
456 455 return templatespec('', tmpl, None)
457 456
458 457 def templatepartsmap(spec, t, partnames):
459 458 """Create a mapping of {part: ref}"""
460 459 partsmap = {spec.ref: spec.ref} # initial ref must exist in t
461 460 if spec.mapfile:
462 461 partsmap.update((p, p) for p in partnames if p in t)
463 462 elif spec.ref:
464 463 for part in partnames:
465 464 ref = '%s:%s' % (spec.ref, part) # select config sub-section
466 465 if ref in t:
467 466 partsmap[part] = ref
468 467 return partsmap
469 468
470 def loadtemplater(ui, spec, resources=None, cache=None):
469 def loadtemplater(ui, spec, defaults=None, resources=None, cache=None):
471 470 """Create a templater from either a literal template or loading from
472 471 a map file"""
473 472 assert not (spec.tmpl and spec.mapfile)
474 473 if spec.mapfile:
475 474 frommapfile = templater.templater.frommapfile
476 return frommapfile(spec.mapfile, resources=resources, cache=cache)
477 return maketemplater(ui, spec.tmpl, resources=resources, cache=cache)
475 return frommapfile(spec.mapfile, defaults=defaults, resources=resources,
476 cache=cache)
477 return maketemplater(ui, spec.tmpl, defaults=defaults, resources=resources,
478 cache=cache)
478 479
479 def maketemplater(ui, tmpl, resources=None, cache=None):
480 def maketemplater(ui, tmpl, defaults=None, resources=None, cache=None):
480 481 """Create a templater from a string template 'tmpl'"""
481 482 aliases = ui.configitems('templatealias')
482 t = templater.templater(resources=resources, cache=cache, aliases=aliases)
483 t = templater.templater(defaults=defaults, resources=resources,
484 cache=cache, aliases=aliases)
483 485 t.cache.update((k, templater.unquotestring(v))
484 486 for k, v in ui.configitems('templates'))
485 487 if tmpl:
486 488 t.cache[''] = tmpl
487 489 return t
488 490
489 491 def templateresources(ui, repo=None):
490 492 """Create a dict of template resources designed for the default templatekw
491 493 and function"""
492 494 return {
493 495 'cache': {}, # for templatekw/funcs to store reusable data
494 496 'ctx': None,
495 497 'repo': repo,
496 498 'revcache': None, # per-ctx cache; set later
497 499 'ui': ui,
498 500 }
499 501
500 502 def formatter(ui, out, topic, opts):
501 503 template = opts.get("template", "")
502 504 if template == "json":
503 505 return jsonformatter(ui, out, topic, opts)
504 506 elif template == "pickle":
505 507 return pickleformatter(ui, out, topic, opts)
506 508 elif template == "debug":
507 509 return debugformatter(ui, out, topic, opts)
508 510 elif template != "":
509 511 return templateformatter(ui, out, topic, opts)
510 512 # developer config: ui.formatdebug
511 513 elif ui.configbool('ui', 'formatdebug'):
512 514 return debugformatter(ui, out, topic, opts)
513 515 # deprecated config: ui.formatjson
514 516 elif ui.configbool('ui', 'formatjson'):
515 517 return jsonformatter(ui, out, topic, opts)
516 518 return plainformatter(ui, out, topic, opts)
517 519
518 520 @contextlib.contextmanager
519 521 def openformatter(ui, filename, topic, opts):
520 522 """Create a formatter that writes outputs to the specified file
521 523
522 524 Must be invoked using the 'with' statement.
523 525 """
524 526 with util.posixfile(filename, 'wb') as out:
525 527 with formatter(ui, out, topic, opts) as fm:
526 528 yield fm
527 529
528 530 @contextlib.contextmanager
529 531 def _neverending(fm):
530 532 yield fm
531 533
532 534 def maybereopen(fm, filename, opts):
533 535 """Create a formatter backed by file if filename specified, else return
534 536 the given formatter
535 537
536 538 Must be invoked using the 'with' statement. This will never call fm.end()
537 539 of the given formatter.
538 540 """
539 541 if filename:
540 542 return openformatter(fm._ui, filename, fm._topic, opts)
541 543 else:
542 544 return _neverending(fm)
@@ -1,57 +1,60
1 1
2 2 $ cat > engine.py << EOF
3 3 >
4 4 > from mercurial import templater
5 5 >
6 6 > class mytemplater(object):
7 7 > def __init__(self, loader, filters, defaults, resources, aliases):
8 8 > self.loader = loader
9 > self._defaults = defaults
9 10 > self._resources = resources
10 11 >
11 12 > def process(self, t, map):
12 13 > tmpl = self.loader(t)
13 > for k, v in map.iteritems():
14 > props = self._defaults.copy()
15 > props.update(map)
16 > for k, v in props.iteritems():
14 17 > if k in ('templ', 'ctx', 'repo', 'revcache', 'cache', 'troubles'):
15 18 > continue
16 19 > if hasattr(v, '__call__'):
17 20 > props = self._resources.copy()
18 21 > props.update(map)
19 22 > v = v(**props)
20 23 > v = templater.stringify(v)
21 24 > tmpl = tmpl.replace('{{%s}}' % k, v)
22 25 > yield tmpl
23 26 >
24 27 > templater.engines['my'] = mytemplater
25 28 > EOF
26 29 $ hg init test
27 30 $ echo '[extensions]' > test/.hg/hgrc
28 31 $ echo "engine = `pwd`/engine.py" >> test/.hg/hgrc
29 32 $ cd test
30 33 $ cat > mymap << EOF
31 34 > changeset = my:changeset.txt
32 35 > EOF
33 36 $ cat > changeset.txt << EOF
34 37 > {{rev}} {{node}} {{author}}
35 38 > EOF
36 39 $ hg ci -Ama
37 40 adding changeset.txt
38 41 adding mymap
39 42 $ hg log --style=./mymap
40 43 0 97e5f848f0936960273bbf75be6388cd0350a32b test
41 44
42 45 $ cat > changeset.txt << EOF
43 46 > {{p1rev}} {{p1node}} {{p2rev}} {{p2node}}
44 47 > EOF
45 48 $ hg ci -Ama
46 49 $ hg log --style=./mymap
47 50 0 97e5f848f0936960273bbf75be6388cd0350a32b -1 0000000000000000000000000000000000000000
48 51 -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000
49 52
50 53 invalid engine type:
51 54
52 55 $ echo 'changeset = unknown:changeset.txt' > unknownenginemap
53 56 $ hg log --style=./unknownenginemap
54 57 abort: invalid template engine: unknown
55 58 [255]
56 59
57 60 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now