##// END OF EJS Templates
templater: switch 'revcache' based on new mapping items...
Yuya Nishihara -
r37121:be3f33f5 default
parent child Browse files
Show More
@@ -1,3213 +1,3213
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import os
12 12 import re
13 13 import tempfile
14 14
15 15 from .i18n import _
16 16 from .node import (
17 17 hex,
18 18 nullid,
19 19 nullrev,
20 20 short,
21 21 )
22 22
23 23 from . import (
24 24 bookmarks,
25 25 changelog,
26 26 copies,
27 27 crecord as crecordmod,
28 28 dirstateguard,
29 29 encoding,
30 30 error,
31 31 formatter,
32 32 logcmdutil,
33 33 match as matchmod,
34 34 merge as mergemod,
35 35 mergeutil,
36 36 obsolete,
37 37 patch,
38 38 pathutil,
39 39 pycompat,
40 40 registrar,
41 41 revlog,
42 42 rewriteutil,
43 43 scmutil,
44 44 smartset,
45 45 subrepoutil,
46 46 templatekw,
47 47 templater,
48 48 util,
49 49 vfs as vfsmod,
50 50 )
51 51
52 52 from .utils import (
53 53 dateutil,
54 54 stringutil,
55 55 )
56 56
57 57 stringio = util.stringio
58 58
59 59 # templates of common command options
60 60
61 61 dryrunopts = [
62 62 ('n', 'dry-run', None,
63 63 _('do not perform actions, just print output')),
64 64 ]
65 65
66 66 remoteopts = [
67 67 ('e', 'ssh', '',
68 68 _('specify ssh command to use'), _('CMD')),
69 69 ('', 'remotecmd', '',
70 70 _('specify hg command to run on the remote side'), _('CMD')),
71 71 ('', 'insecure', None,
72 72 _('do not verify server certificate (ignoring web.cacerts config)')),
73 73 ]
74 74
75 75 walkopts = [
76 76 ('I', 'include', [],
77 77 _('include names matching the given patterns'), _('PATTERN')),
78 78 ('X', 'exclude', [],
79 79 _('exclude names matching the given patterns'), _('PATTERN')),
80 80 ]
81 81
82 82 commitopts = [
83 83 ('m', 'message', '',
84 84 _('use text as commit message'), _('TEXT')),
85 85 ('l', 'logfile', '',
86 86 _('read commit message from file'), _('FILE')),
87 87 ]
88 88
89 89 commitopts2 = [
90 90 ('d', 'date', '',
91 91 _('record the specified date as commit date'), _('DATE')),
92 92 ('u', 'user', '',
93 93 _('record the specified user as committer'), _('USER')),
94 94 ]
95 95
96 96 # hidden for now
97 97 formatteropts = [
98 98 ('T', 'template', '',
99 99 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
100 100 ]
101 101
102 102 templateopts = [
103 103 ('', 'style', '',
104 104 _('display using template map file (DEPRECATED)'), _('STYLE')),
105 105 ('T', 'template', '',
106 106 _('display with template'), _('TEMPLATE')),
107 107 ]
108 108
109 109 logopts = [
110 110 ('p', 'patch', None, _('show patch')),
111 111 ('g', 'git', None, _('use git extended diff format')),
112 112 ('l', 'limit', '',
113 113 _('limit number of changes displayed'), _('NUM')),
114 114 ('M', 'no-merges', None, _('do not show merges')),
115 115 ('', 'stat', None, _('output diffstat-style summary of changes')),
116 116 ('G', 'graph', None, _("show the revision DAG")),
117 117 ] + templateopts
118 118
119 119 diffopts = [
120 120 ('a', 'text', None, _('treat all files as text')),
121 121 ('g', 'git', None, _('use git extended diff format')),
122 122 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
123 123 ('', 'nodates', None, _('omit dates from diff headers'))
124 124 ]
125 125
126 126 diffwsopts = [
127 127 ('w', 'ignore-all-space', None,
128 128 _('ignore white space when comparing lines')),
129 129 ('b', 'ignore-space-change', None,
130 130 _('ignore changes in the amount of white space')),
131 131 ('B', 'ignore-blank-lines', None,
132 132 _('ignore changes whose lines are all blank')),
133 133 ('Z', 'ignore-space-at-eol', None,
134 134 _('ignore changes in whitespace at EOL')),
135 135 ]
136 136
137 137 diffopts2 = [
138 138 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
139 139 ('p', 'show-function', None, _('show which function each change is in')),
140 140 ('', 'reverse', None, _('produce a diff that undoes the changes')),
141 141 ] + diffwsopts + [
142 142 ('U', 'unified', '',
143 143 _('number of lines of context to show'), _('NUM')),
144 144 ('', 'stat', None, _('output diffstat-style summary of changes')),
145 145 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
146 146 ]
147 147
148 148 mergetoolopts = [
149 149 ('t', 'tool', '', _('specify merge tool')),
150 150 ]
151 151
152 152 similarityopts = [
153 153 ('s', 'similarity', '',
154 154 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
155 155 ]
156 156
157 157 subrepoopts = [
158 158 ('S', 'subrepos', None,
159 159 _('recurse into subrepositories'))
160 160 ]
161 161
162 162 debugrevlogopts = [
163 163 ('c', 'changelog', False, _('open changelog')),
164 164 ('m', 'manifest', False, _('open manifest')),
165 165 ('', 'dir', '', _('open directory manifest')),
166 166 ]
167 167
168 168 # special string such that everything below this line will be ingored in the
169 169 # editor text
170 170 _linebelow = "^HG: ------------------------ >8 ------------------------$"
171 171
172 172 def ishunk(x):
173 173 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
174 174 return isinstance(x, hunkclasses)
175 175
176 176 def newandmodified(chunks, originalchunks):
177 177 newlyaddedandmodifiedfiles = set()
178 178 for chunk in chunks:
179 179 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
180 180 originalchunks:
181 181 newlyaddedandmodifiedfiles.add(chunk.header.filename())
182 182 return newlyaddedandmodifiedfiles
183 183
184 184 def parsealiases(cmd):
185 185 return cmd.lstrip("^").split("|")
186 186
187 187 def setupwrapcolorwrite(ui):
188 188 # wrap ui.write so diff output can be labeled/colorized
189 189 def wrapwrite(orig, *args, **kw):
190 190 label = kw.pop(r'label', '')
191 191 for chunk, l in patch.difflabel(lambda: args):
192 192 orig(chunk, label=label + l)
193 193
194 194 oldwrite = ui.write
195 195 def wrap(*args, **kwargs):
196 196 return wrapwrite(oldwrite, *args, **kwargs)
197 197 setattr(ui, 'write', wrap)
198 198 return oldwrite
199 199
200 200 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
201 201 if usecurses:
202 202 if testfile:
203 203 recordfn = crecordmod.testdecorator(testfile,
204 204 crecordmod.testchunkselector)
205 205 else:
206 206 recordfn = crecordmod.chunkselector
207 207
208 208 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
209 209
210 210 else:
211 211 return patch.filterpatch(ui, originalhunks, operation)
212 212
213 213 def recordfilter(ui, originalhunks, operation=None):
214 214 """ Prompts the user to filter the originalhunks and return a list of
215 215 selected hunks.
216 216 *operation* is used for to build ui messages to indicate the user what
217 217 kind of filtering they are doing: reverting, committing, shelving, etc.
218 218 (see patch.filterpatch).
219 219 """
220 220 usecurses = crecordmod.checkcurses(ui)
221 221 testfile = ui.config('experimental', 'crecordtest')
222 222 oldwrite = setupwrapcolorwrite(ui)
223 223 try:
224 224 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
225 225 testfile, operation)
226 226 finally:
227 227 ui.write = oldwrite
228 228 return newchunks, newopts
229 229
230 230 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
231 231 filterfn, *pats, **opts):
232 232 opts = pycompat.byteskwargs(opts)
233 233 if not ui.interactive():
234 234 if cmdsuggest:
235 235 msg = _('running non-interactively, use %s instead') % cmdsuggest
236 236 else:
237 237 msg = _('running non-interactively')
238 238 raise error.Abort(msg)
239 239
240 240 # make sure username is set before going interactive
241 241 if not opts.get('user'):
242 242 ui.username() # raise exception, username not provided
243 243
244 244 def recordfunc(ui, repo, message, match, opts):
245 245 """This is generic record driver.
246 246
247 247 Its job is to interactively filter local changes, and
248 248 accordingly prepare working directory into a state in which the
249 249 job can be delegated to a non-interactive commit command such as
250 250 'commit' or 'qrefresh'.
251 251
252 252 After the actual job is done by non-interactive command, the
253 253 working directory is restored to its original state.
254 254
255 255 In the end we'll record interesting changes, and everything else
256 256 will be left in place, so the user can continue working.
257 257 """
258 258
259 259 checkunfinished(repo, commit=True)
260 260 wctx = repo[None]
261 261 merge = len(wctx.parents()) > 1
262 262 if merge:
263 263 raise error.Abort(_('cannot partially commit a merge '
264 264 '(use "hg commit" instead)'))
265 265
266 266 def fail(f, msg):
267 267 raise error.Abort('%s: %s' % (f, msg))
268 268
269 269 force = opts.get('force')
270 270 if not force:
271 271 vdirs = []
272 272 match.explicitdir = vdirs.append
273 273 match.bad = fail
274 274
275 275 status = repo.status(match=match)
276 276 if not force:
277 277 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
278 278 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
279 279 diffopts.nodates = True
280 280 diffopts.git = True
281 281 diffopts.showfunc = True
282 282 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
283 283 originalchunks = patch.parsepatch(originaldiff)
284 284
285 285 # 1. filter patch, since we are intending to apply subset of it
286 286 try:
287 287 chunks, newopts = filterfn(ui, originalchunks)
288 288 except error.PatchError as err:
289 289 raise error.Abort(_('error parsing patch: %s') % err)
290 290 opts.update(newopts)
291 291
292 292 # We need to keep a backup of files that have been newly added and
293 293 # modified during the recording process because there is a previous
294 294 # version without the edit in the workdir
295 295 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
296 296 contenders = set()
297 297 for h in chunks:
298 298 try:
299 299 contenders.update(set(h.files()))
300 300 except AttributeError:
301 301 pass
302 302
303 303 changed = status.modified + status.added + status.removed
304 304 newfiles = [f for f in changed if f in contenders]
305 305 if not newfiles:
306 306 ui.status(_('no changes to record\n'))
307 307 return 0
308 308
309 309 modified = set(status.modified)
310 310
311 311 # 2. backup changed files, so we can restore them in the end
312 312
313 313 if backupall:
314 314 tobackup = changed
315 315 else:
316 316 tobackup = [f for f in newfiles if f in modified or f in \
317 317 newlyaddedandmodifiedfiles]
318 318 backups = {}
319 319 if tobackup:
320 320 backupdir = repo.vfs.join('record-backups')
321 321 try:
322 322 os.mkdir(backupdir)
323 323 except OSError as err:
324 324 if err.errno != errno.EEXIST:
325 325 raise
326 326 try:
327 327 # backup continues
328 328 for f in tobackup:
329 329 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
330 330 dir=backupdir)
331 331 os.close(fd)
332 332 ui.debug('backup %r as %r\n' % (f, tmpname))
333 333 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
334 334 backups[f] = tmpname
335 335
336 336 fp = stringio()
337 337 for c in chunks:
338 338 fname = c.filename()
339 339 if fname in backups:
340 340 c.write(fp)
341 341 dopatch = fp.tell()
342 342 fp.seek(0)
343 343
344 344 # 2.5 optionally review / modify patch in text editor
345 345 if opts.get('review', False):
346 346 patchtext = (crecordmod.diffhelptext
347 347 + crecordmod.patchhelptext
348 348 + fp.read())
349 349 reviewedpatch = ui.edit(patchtext, "",
350 350 action="diff",
351 351 repopath=repo.path)
352 352 fp.truncate(0)
353 353 fp.write(reviewedpatch)
354 354 fp.seek(0)
355 355
356 356 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
357 357 # 3a. apply filtered patch to clean repo (clean)
358 358 if backups:
359 359 # Equivalent to hg.revert
360 360 m = scmutil.matchfiles(repo, backups.keys())
361 361 mergemod.update(repo, repo.dirstate.p1(),
362 362 False, True, matcher=m)
363 363
364 364 # 3b. (apply)
365 365 if dopatch:
366 366 try:
367 367 ui.debug('applying patch\n')
368 368 ui.debug(fp.getvalue())
369 369 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
370 370 except error.PatchError as err:
371 371 raise error.Abort(pycompat.bytestr(err))
372 372 del fp
373 373
374 374 # 4. We prepared working directory according to filtered
375 375 # patch. Now is the time to delegate the job to
376 376 # commit/qrefresh or the like!
377 377
378 378 # Make all of the pathnames absolute.
379 379 newfiles = [repo.wjoin(nf) for nf in newfiles]
380 380 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
381 381 finally:
382 382 # 5. finally restore backed-up files
383 383 try:
384 384 dirstate = repo.dirstate
385 385 for realname, tmpname in backups.iteritems():
386 386 ui.debug('restoring %r to %r\n' % (tmpname, realname))
387 387
388 388 if dirstate[realname] == 'n':
389 389 # without normallookup, restoring timestamp
390 390 # may cause partially committed files
391 391 # to be treated as unmodified
392 392 dirstate.normallookup(realname)
393 393
394 394 # copystat=True here and above are a hack to trick any
395 395 # editors that have f open that we haven't modified them.
396 396 #
397 397 # Also note that this racy as an editor could notice the
398 398 # file's mtime before we've finished writing it.
399 399 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
400 400 os.unlink(tmpname)
401 401 if tobackup:
402 402 os.rmdir(backupdir)
403 403 except OSError:
404 404 pass
405 405
406 406 def recordinwlock(ui, repo, message, match, opts):
407 407 with repo.wlock():
408 408 return recordfunc(ui, repo, message, match, opts)
409 409
410 410 return commit(ui, repo, recordinwlock, pats, opts)
411 411
412 412 class dirnode(object):
413 413 """
414 414 Represent a directory in user working copy with information required for
415 415 the purpose of tersing its status.
416 416
417 417 path is the path to the directory
418 418
419 419 statuses is a set of statuses of all files in this directory (this includes
420 420 all the files in all the subdirectories too)
421 421
422 422 files is a list of files which are direct child of this directory
423 423
424 424 subdirs is a dictionary of sub-directory name as the key and it's own
425 425 dirnode object as the value
426 426 """
427 427
428 428 def __init__(self, dirpath):
429 429 self.path = dirpath
430 430 self.statuses = set([])
431 431 self.files = []
432 432 self.subdirs = {}
433 433
434 434 def _addfileindir(self, filename, status):
435 435 """Add a file in this directory as a direct child."""
436 436 self.files.append((filename, status))
437 437
438 438 def addfile(self, filename, status):
439 439 """
440 440 Add a file to this directory or to its direct parent directory.
441 441
442 442 If the file is not direct child of this directory, we traverse to the
443 443 directory of which this file is a direct child of and add the file
444 444 there.
445 445 """
446 446
447 447 # the filename contains a path separator, it means it's not the direct
448 448 # child of this directory
449 449 if '/' in filename:
450 450 subdir, filep = filename.split('/', 1)
451 451
452 452 # does the dirnode object for subdir exists
453 453 if subdir not in self.subdirs:
454 454 subdirpath = os.path.join(self.path, subdir)
455 455 self.subdirs[subdir] = dirnode(subdirpath)
456 456
457 457 # try adding the file in subdir
458 458 self.subdirs[subdir].addfile(filep, status)
459 459
460 460 else:
461 461 self._addfileindir(filename, status)
462 462
463 463 if status not in self.statuses:
464 464 self.statuses.add(status)
465 465
466 466 def iterfilepaths(self):
467 467 """Yield (status, path) for files directly under this directory."""
468 468 for f, st in self.files:
469 469 yield st, os.path.join(self.path, f)
470 470
471 471 def tersewalk(self, terseargs):
472 472 """
473 473 Yield (status, path) obtained by processing the status of this
474 474 dirnode.
475 475
476 476 terseargs is the string of arguments passed by the user with `--terse`
477 477 flag.
478 478
479 479 Following are the cases which can happen:
480 480
481 481 1) All the files in the directory (including all the files in its
482 482 subdirectories) share the same status and the user has asked us to terse
483 483 that status. -> yield (status, dirpath)
484 484
485 485 2) Otherwise, we do following:
486 486
487 487 a) Yield (status, filepath) for all the files which are in this
488 488 directory (only the ones in this directory, not the subdirs)
489 489
490 490 b) Recurse the function on all the subdirectories of this
491 491 directory
492 492 """
493 493
494 494 if len(self.statuses) == 1:
495 495 onlyst = self.statuses.pop()
496 496
497 497 # Making sure we terse only when the status abbreviation is
498 498 # passed as terse argument
499 499 if onlyst in terseargs:
500 500 yield onlyst, self.path + pycompat.ossep
501 501 return
502 502
503 503 # add the files to status list
504 504 for st, fpath in self.iterfilepaths():
505 505 yield st, fpath
506 506
507 507 #recurse on the subdirs
508 508 for dirobj in self.subdirs.values():
509 509 for st, fpath in dirobj.tersewalk(terseargs):
510 510 yield st, fpath
511 511
512 512 def tersedir(statuslist, terseargs):
513 513 """
514 514 Terse the status if all the files in a directory shares the same status.
515 515
516 516 statuslist is scmutil.status() object which contains a list of files for
517 517 each status.
518 518 terseargs is string which is passed by the user as the argument to `--terse`
519 519 flag.
520 520
521 521 The function makes a tree of objects of dirnode class, and at each node it
522 522 stores the information required to know whether we can terse a certain
523 523 directory or not.
524 524 """
525 525 # the order matters here as that is used to produce final list
526 526 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
527 527
528 528 # checking the argument validity
529 529 for s in pycompat.bytestr(terseargs):
530 530 if s not in allst:
531 531 raise error.Abort(_("'%s' not recognized") % s)
532 532
533 533 # creating a dirnode object for the root of the repo
534 534 rootobj = dirnode('')
535 535 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
536 536 'ignored', 'removed')
537 537
538 538 tersedict = {}
539 539 for attrname in pstatus:
540 540 statuschar = attrname[0:1]
541 541 for f in getattr(statuslist, attrname):
542 542 rootobj.addfile(f, statuschar)
543 543 tersedict[statuschar] = []
544 544
545 545 # we won't be tersing the root dir, so add files in it
546 546 for st, fpath in rootobj.iterfilepaths():
547 547 tersedict[st].append(fpath)
548 548
549 549 # process each sub-directory and build tersedict
550 550 for subdir in rootobj.subdirs.values():
551 551 for st, f in subdir.tersewalk(terseargs):
552 552 tersedict[st].append(f)
553 553
554 554 tersedlist = []
555 555 for st in allst:
556 556 tersedict[st].sort()
557 557 tersedlist.append(tersedict[st])
558 558
559 559 return tersedlist
560 560
561 561 def _commentlines(raw):
562 562 '''Surround lineswith a comment char and a new line'''
563 563 lines = raw.splitlines()
564 564 commentedlines = ['# %s' % line for line in lines]
565 565 return '\n'.join(commentedlines) + '\n'
566 566
567 567 def _conflictsmsg(repo):
568 568 mergestate = mergemod.mergestate.read(repo)
569 569 if not mergestate.active():
570 570 return
571 571
572 572 m = scmutil.match(repo[None])
573 573 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
574 574 if unresolvedlist:
575 575 mergeliststr = '\n'.join(
576 576 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
577 577 for path in unresolvedlist])
578 578 msg = _('''Unresolved merge conflicts:
579 579
580 580 %s
581 581
582 582 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
583 583 else:
584 584 msg = _('No unresolved merge conflicts.')
585 585
586 586 return _commentlines(msg)
587 587
588 588 def _helpmessage(continuecmd, abortcmd):
589 589 msg = _('To continue: %s\n'
590 590 'To abort: %s') % (continuecmd, abortcmd)
591 591 return _commentlines(msg)
592 592
593 593 def _rebasemsg():
594 594 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
595 595
596 596 def _histeditmsg():
597 597 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
598 598
599 599 def _unshelvemsg():
600 600 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
601 601
602 602 def _updatecleanmsg(dest=None):
603 603 warning = _('warning: this will discard uncommitted changes')
604 604 return 'hg update --clean %s (%s)' % (dest or '.', warning)
605 605
606 606 def _graftmsg():
607 607 # tweakdefaults requires `update` to have a rev hence the `.`
608 608 return _helpmessage('hg graft --continue', _updatecleanmsg())
609 609
610 610 def _mergemsg():
611 611 # tweakdefaults requires `update` to have a rev hence the `.`
612 612 return _helpmessage('hg commit', _updatecleanmsg())
613 613
614 614 def _bisectmsg():
615 615 msg = _('To mark the changeset good: hg bisect --good\n'
616 616 'To mark the changeset bad: hg bisect --bad\n'
617 617 'To abort: hg bisect --reset\n')
618 618 return _commentlines(msg)
619 619
620 620 def fileexistspredicate(filename):
621 621 return lambda repo: repo.vfs.exists(filename)
622 622
623 623 def _mergepredicate(repo):
624 624 return len(repo[None].parents()) > 1
625 625
626 626 STATES = (
627 627 # (state, predicate to detect states, helpful message function)
628 628 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
629 629 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
630 630 ('graft', fileexistspredicate('graftstate'), _graftmsg),
631 631 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
632 632 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
633 633 # The merge state is part of a list that will be iterated over.
634 634 # They need to be last because some of the other unfinished states may also
635 635 # be in a merge or update state (eg. rebase, histedit, graft, etc).
636 636 # We want those to have priority.
637 637 ('merge', _mergepredicate, _mergemsg),
638 638 )
639 639
640 640 def _getrepostate(repo):
641 641 # experimental config: commands.status.skipstates
642 642 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
643 643 for state, statedetectionpredicate, msgfn in STATES:
644 644 if state in skip:
645 645 continue
646 646 if statedetectionpredicate(repo):
647 647 return (state, statedetectionpredicate, msgfn)
648 648
649 649 def morestatus(repo, fm):
650 650 statetuple = _getrepostate(repo)
651 651 label = 'status.morestatus'
652 652 if statetuple:
653 653 fm.startitem()
654 654 state, statedetectionpredicate, helpfulmsg = statetuple
655 655 statemsg = _('The repository is in an unfinished *%s* state.') % state
656 656 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
657 657 conmsg = _conflictsmsg(repo)
658 658 if conmsg:
659 659 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
660 660 if helpfulmsg:
661 661 helpmsg = helpfulmsg()
662 662 fm.write('helpmsg', '%s\n', helpmsg, label=label)
663 663
664 664 def findpossible(cmd, table, strict=False):
665 665 """
666 666 Return cmd -> (aliases, command table entry)
667 667 for each matching command.
668 668 Return debug commands (or their aliases) only if no normal command matches.
669 669 """
670 670 choice = {}
671 671 debugchoice = {}
672 672
673 673 if cmd in table:
674 674 # short-circuit exact matches, "log" alias beats "^log|history"
675 675 keys = [cmd]
676 676 else:
677 677 keys = table.keys()
678 678
679 679 allcmds = []
680 680 for e in keys:
681 681 aliases = parsealiases(e)
682 682 allcmds.extend(aliases)
683 683 found = None
684 684 if cmd in aliases:
685 685 found = cmd
686 686 elif not strict:
687 687 for a in aliases:
688 688 if a.startswith(cmd):
689 689 found = a
690 690 break
691 691 if found is not None:
692 692 if aliases[0].startswith("debug") or found.startswith("debug"):
693 693 debugchoice[found] = (aliases, table[e])
694 694 else:
695 695 choice[found] = (aliases, table[e])
696 696
697 697 if not choice and debugchoice:
698 698 choice = debugchoice
699 699
700 700 return choice, allcmds
701 701
702 702 def findcmd(cmd, table, strict=True):
703 703 """Return (aliases, command table entry) for command string."""
704 704 choice, allcmds = findpossible(cmd, table, strict)
705 705
706 706 if cmd in choice:
707 707 return choice[cmd]
708 708
709 709 if len(choice) > 1:
710 710 clist = sorted(choice)
711 711 raise error.AmbiguousCommand(cmd, clist)
712 712
713 713 if choice:
714 714 return list(choice.values())[0]
715 715
716 716 raise error.UnknownCommand(cmd, allcmds)
717 717
718 718 def changebranch(ui, repo, revs, label):
719 719 """ Change the branch name of given revs to label """
720 720
721 721 with repo.wlock(), repo.lock(), repo.transaction('branches'):
722 722 # abort in case of uncommitted merge or dirty wdir
723 723 bailifchanged(repo)
724 724 revs = scmutil.revrange(repo, revs)
725 725 if not revs:
726 726 raise error.Abort("empty revision set")
727 727 roots = repo.revs('roots(%ld)', revs)
728 728 if len(roots) > 1:
729 729 raise error.Abort(_("cannot change branch of non-linear revisions"))
730 730 rewriteutil.precheck(repo, revs, 'change branch of')
731 731
732 732 root = repo[roots.first()]
733 733 if not root.p1().branch() == label and label in repo.branchmap():
734 734 raise error.Abort(_("a branch of the same name already exists"))
735 735
736 736 if repo.revs('merge() and %ld', revs):
737 737 raise error.Abort(_("cannot change branch of a merge commit"))
738 738 if repo.revs('obsolete() and %ld', revs):
739 739 raise error.Abort(_("cannot change branch of a obsolete changeset"))
740 740
741 741 # make sure only topological heads
742 742 if repo.revs('heads(%ld) - head()', revs):
743 743 raise error.Abort(_("cannot change branch in middle of a stack"))
744 744
745 745 replacements = {}
746 746 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
747 747 # mercurial.subrepo -> mercurial.cmdutil
748 748 from . import context
749 749 for rev in revs:
750 750 ctx = repo[rev]
751 751 oldbranch = ctx.branch()
752 752 # check if ctx has same branch
753 753 if oldbranch == label:
754 754 continue
755 755
756 756 def filectxfn(repo, newctx, path):
757 757 try:
758 758 return ctx[path]
759 759 except error.ManifestLookupError:
760 760 return None
761 761
762 762 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
763 763 % (hex(ctx.node()), oldbranch, label))
764 764 extra = ctx.extra()
765 765 extra['branch_change'] = hex(ctx.node())
766 766 # While changing branch of set of linear commits, make sure that
767 767 # we base our commits on new parent rather than old parent which
768 768 # was obsoleted while changing the branch
769 769 p1 = ctx.p1().node()
770 770 p2 = ctx.p2().node()
771 771 if p1 in replacements:
772 772 p1 = replacements[p1][0]
773 773 if p2 in replacements:
774 774 p2 = replacements[p2][0]
775 775
776 776 mc = context.memctx(repo, (p1, p2),
777 777 ctx.description(),
778 778 ctx.files(),
779 779 filectxfn,
780 780 user=ctx.user(),
781 781 date=ctx.date(),
782 782 extra=extra,
783 783 branch=label)
784 784
785 785 commitphase = ctx.phase()
786 786 overrides = {('phases', 'new-commit'): commitphase}
787 787 with repo.ui.configoverride(overrides, 'branch-change'):
788 788 newnode = repo.commitctx(mc)
789 789
790 790 replacements[ctx.node()] = (newnode,)
791 791 ui.debug('new node id is %s\n' % hex(newnode))
792 792
793 793 # create obsmarkers and move bookmarks
794 794 scmutil.cleanupnodes(repo, replacements, 'branch-change')
795 795
796 796 # move the working copy too
797 797 wctx = repo[None]
798 798 # in-progress merge is a bit too complex for now.
799 799 if len(wctx.parents()) == 1:
800 800 newid = replacements.get(wctx.p1().node())
801 801 if newid is not None:
802 802 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
803 803 # mercurial.cmdutil
804 804 from . import hg
805 805 hg.update(repo, newid[0], quietempty=True)
806 806
807 807 ui.status(_("changed branch on %d changesets\n") % len(replacements))
808 808
809 809 def findrepo(p):
810 810 while not os.path.isdir(os.path.join(p, ".hg")):
811 811 oldp, p = p, os.path.dirname(p)
812 812 if p == oldp:
813 813 return None
814 814
815 815 return p
816 816
817 817 def bailifchanged(repo, merge=True, hint=None):
818 818 """ enforce the precondition that working directory must be clean.
819 819
820 820 'merge' can be set to false if a pending uncommitted merge should be
821 821 ignored (such as when 'update --check' runs).
822 822
823 823 'hint' is the usual hint given to Abort exception.
824 824 """
825 825
826 826 if merge and repo.dirstate.p2() != nullid:
827 827 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
828 828 modified, added, removed, deleted = repo.status()[:4]
829 829 if modified or added or removed or deleted:
830 830 raise error.Abort(_('uncommitted changes'), hint=hint)
831 831 ctx = repo[None]
832 832 for s in sorted(ctx.substate):
833 833 ctx.sub(s).bailifchanged(hint=hint)
834 834
835 835 def logmessage(ui, opts):
836 836 """ get the log message according to -m and -l option """
837 837 message = opts.get('message')
838 838 logfile = opts.get('logfile')
839 839
840 840 if message and logfile:
841 841 raise error.Abort(_('options --message and --logfile are mutually '
842 842 'exclusive'))
843 843 if not message and logfile:
844 844 try:
845 845 if isstdiofilename(logfile):
846 846 message = ui.fin.read()
847 847 else:
848 848 message = '\n'.join(util.readfile(logfile).splitlines())
849 849 except IOError as inst:
850 850 raise error.Abort(_("can't read commit message '%s': %s") %
851 851 (logfile, encoding.strtolocal(inst.strerror)))
852 852 return message
853 853
854 854 def mergeeditform(ctxorbool, baseformname):
855 855 """return appropriate editform name (referencing a committemplate)
856 856
857 857 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
858 858 merging is committed.
859 859
860 860 This returns baseformname with '.merge' appended if it is a merge,
861 861 otherwise '.normal' is appended.
862 862 """
863 863 if isinstance(ctxorbool, bool):
864 864 if ctxorbool:
865 865 return baseformname + ".merge"
866 866 elif 1 < len(ctxorbool.parents()):
867 867 return baseformname + ".merge"
868 868
869 869 return baseformname + ".normal"
870 870
871 871 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
872 872 editform='', **opts):
873 873 """get appropriate commit message editor according to '--edit' option
874 874
875 875 'finishdesc' is a function to be called with edited commit message
876 876 (= 'description' of the new changeset) just after editing, but
877 877 before checking empty-ness. It should return actual text to be
878 878 stored into history. This allows to change description before
879 879 storing.
880 880
881 881 'extramsg' is a extra message to be shown in the editor instead of
882 882 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
883 883 is automatically added.
884 884
885 885 'editform' is a dot-separated list of names, to distinguish
886 886 the purpose of commit text editing.
887 887
888 888 'getcommiteditor' returns 'commitforceeditor' regardless of
889 889 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
890 890 they are specific for usage in MQ.
891 891 """
892 892 if edit or finishdesc or extramsg:
893 893 return lambda r, c, s: commitforceeditor(r, c, s,
894 894 finishdesc=finishdesc,
895 895 extramsg=extramsg,
896 896 editform=editform)
897 897 elif editform:
898 898 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
899 899 else:
900 900 return commiteditor
901 901
902 902 def rendertemplate(ctx, tmpl, props=None):
903 903 """Expand a literal template 'tmpl' byte-string against one changeset
904 904
905 905 Each props item must be a stringify-able value or a callable returning
906 906 such value, i.e. no bare list nor dict should be passed.
907 907 """
908 908 repo = ctx.repo()
909 909 tres = formatter.templateresources(repo.ui, repo)
910 910 t = formatter.maketemplater(repo.ui, tmpl, defaults=templatekw.keywords,
911 911 resources=tres)
912 mapping = {'ctx': ctx, 'revcache': {}}
912 mapping = {'ctx': ctx}
913 913 if props:
914 914 mapping.update(props)
915 915 return t.renderdefault(mapping)
916 916
917 917 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
918 918 r"""Convert old-style filename format string to template string
919 919
920 920 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
921 921 'foo-{reporoot|basename}-{seqno}.patch'
922 922 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
923 923 '{rev}{tags % "{tag}"}{node}'
924 924
925 925 '\' in outermost strings has to be escaped because it is a directory
926 926 separator on Windows:
927 927
928 928 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
929 929 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
930 930 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
931 931 '\\\\\\\\foo\\\\bar.patch'
932 932 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
933 933 '\\\\{tags % "{tag}"}'
934 934
935 935 but inner strings follow the template rules (i.e. '\' is taken as an
936 936 escape character):
937 937
938 938 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
939 939 '{"c:\\tmp"}'
940 940 """
941 941 expander = {
942 942 b'H': b'{node}',
943 943 b'R': b'{rev}',
944 944 b'h': b'{node|short}',
945 945 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
946 946 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
947 947 b'%': b'%',
948 948 b'b': b'{reporoot|basename}',
949 949 }
950 950 if total is not None:
951 951 expander[b'N'] = b'{total}'
952 952 if seqno is not None:
953 953 expander[b'n'] = b'{seqno}'
954 954 if total is not None and seqno is not None:
955 955 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
956 956 if pathname is not None:
957 957 expander[b's'] = b'{pathname|basename}'
958 958 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
959 959 expander[b'p'] = b'{pathname}'
960 960
961 961 newname = []
962 962 for typ, start, end in templater.scantemplate(pat, raw=True):
963 963 if typ != b'string':
964 964 newname.append(pat[start:end])
965 965 continue
966 966 i = start
967 967 while i < end:
968 968 n = pat.find(b'%', i, end)
969 969 if n < 0:
970 970 newname.append(stringutil.escapestr(pat[i:end]))
971 971 break
972 972 newname.append(stringutil.escapestr(pat[i:n]))
973 973 if n + 2 > end:
974 974 raise error.Abort(_("incomplete format spec in output "
975 975 "filename"))
976 976 c = pat[n + 1:n + 2]
977 977 i = n + 2
978 978 try:
979 979 newname.append(expander[c])
980 980 except KeyError:
981 981 raise error.Abort(_("invalid format spec '%%%s' in output "
982 982 "filename") % c)
983 983 return ''.join(newname)
984 984
985 985 def makefilename(ctx, pat, **props):
986 986 if not pat:
987 987 return pat
988 988 tmpl = _buildfntemplate(pat, **props)
989 989 # BUG: alias expansion shouldn't be made against template fragments
990 990 # rewritten from %-format strings, but we have no easy way to partially
991 991 # disable the expansion.
992 992 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
993 993
994 994 def isstdiofilename(pat):
995 995 """True if the given pat looks like a filename denoting stdin/stdout"""
996 996 return not pat or pat == '-'
997 997
998 998 class _unclosablefile(object):
999 999 def __init__(self, fp):
1000 1000 self._fp = fp
1001 1001
1002 1002 def close(self):
1003 1003 pass
1004 1004
1005 1005 def __iter__(self):
1006 1006 return iter(self._fp)
1007 1007
1008 1008 def __getattr__(self, attr):
1009 1009 return getattr(self._fp, attr)
1010 1010
1011 1011 def __enter__(self):
1012 1012 return self
1013 1013
1014 1014 def __exit__(self, exc_type, exc_value, exc_tb):
1015 1015 pass
1016 1016
1017 1017 def makefileobj(ctx, pat, mode='wb', modemap=None, **props):
1018 1018 writable = mode not in ('r', 'rb')
1019 1019
1020 1020 if isstdiofilename(pat):
1021 1021 repo = ctx.repo()
1022 1022 if writable:
1023 1023 fp = repo.ui.fout
1024 1024 else:
1025 1025 fp = repo.ui.fin
1026 1026 return _unclosablefile(fp)
1027 1027 fn = makefilename(ctx, pat, **props)
1028 1028 if modemap is not None:
1029 1029 mode = modemap.get(fn, mode)
1030 1030 if mode == 'wb':
1031 1031 modemap[fn] = 'ab'
1032 1032 return open(fn, mode)
1033 1033
1034 1034 def openrevlog(repo, cmd, file_, opts):
1035 1035 """opens the changelog, manifest, a filelog or a given revlog"""
1036 1036 cl = opts['changelog']
1037 1037 mf = opts['manifest']
1038 1038 dir = opts['dir']
1039 1039 msg = None
1040 1040 if cl and mf:
1041 1041 msg = _('cannot specify --changelog and --manifest at the same time')
1042 1042 elif cl and dir:
1043 1043 msg = _('cannot specify --changelog and --dir at the same time')
1044 1044 elif cl or mf or dir:
1045 1045 if file_:
1046 1046 msg = _('cannot specify filename with --changelog or --manifest')
1047 1047 elif not repo:
1048 1048 msg = _('cannot specify --changelog or --manifest or --dir '
1049 1049 'without a repository')
1050 1050 if msg:
1051 1051 raise error.Abort(msg)
1052 1052
1053 1053 r = None
1054 1054 if repo:
1055 1055 if cl:
1056 1056 r = repo.unfiltered().changelog
1057 1057 elif dir:
1058 1058 if 'treemanifest' not in repo.requirements:
1059 1059 raise error.Abort(_("--dir can only be used on repos with "
1060 1060 "treemanifest enabled"))
1061 1061 dirlog = repo.manifestlog._revlog.dirlog(dir)
1062 1062 if len(dirlog):
1063 1063 r = dirlog
1064 1064 elif mf:
1065 1065 r = repo.manifestlog._revlog
1066 1066 elif file_:
1067 1067 filelog = repo.file(file_)
1068 1068 if len(filelog):
1069 1069 r = filelog
1070 1070 if not r:
1071 1071 if not file_:
1072 1072 raise error.CommandError(cmd, _('invalid arguments'))
1073 1073 if not os.path.isfile(file_):
1074 1074 raise error.Abort(_("revlog '%s' not found") % file_)
1075 1075 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
1076 1076 file_[:-2] + ".i")
1077 1077 return r
1078 1078
1079 1079 def copy(ui, repo, pats, opts, rename=False):
1080 1080 # called with the repo lock held
1081 1081 #
1082 1082 # hgsep => pathname that uses "/" to separate directories
1083 1083 # ossep => pathname that uses os.sep to separate directories
1084 1084 cwd = repo.getcwd()
1085 1085 targets = {}
1086 1086 after = opts.get("after")
1087 1087 dryrun = opts.get("dry_run")
1088 1088 wctx = repo[None]
1089 1089
1090 1090 def walkpat(pat):
1091 1091 srcs = []
1092 1092 if after:
1093 1093 badstates = '?'
1094 1094 else:
1095 1095 badstates = '?r'
1096 1096 m = scmutil.match(wctx, [pat], opts, globbed=True)
1097 1097 for abs in wctx.walk(m):
1098 1098 state = repo.dirstate[abs]
1099 1099 rel = m.rel(abs)
1100 1100 exact = m.exact(abs)
1101 1101 if state in badstates:
1102 1102 if exact and state == '?':
1103 1103 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1104 1104 if exact and state == 'r':
1105 1105 ui.warn(_('%s: not copying - file has been marked for'
1106 1106 ' remove\n') % rel)
1107 1107 continue
1108 1108 # abs: hgsep
1109 1109 # rel: ossep
1110 1110 srcs.append((abs, rel, exact))
1111 1111 return srcs
1112 1112
1113 1113 # abssrc: hgsep
1114 1114 # relsrc: ossep
1115 1115 # otarget: ossep
1116 1116 def copyfile(abssrc, relsrc, otarget, exact):
1117 1117 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1118 1118 if '/' in abstarget:
1119 1119 # We cannot normalize abstarget itself, this would prevent
1120 1120 # case only renames, like a => A.
1121 1121 abspath, absname = abstarget.rsplit('/', 1)
1122 1122 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1123 1123 reltarget = repo.pathto(abstarget, cwd)
1124 1124 target = repo.wjoin(abstarget)
1125 1125 src = repo.wjoin(abssrc)
1126 1126 state = repo.dirstate[abstarget]
1127 1127
1128 1128 scmutil.checkportable(ui, abstarget)
1129 1129
1130 1130 # check for collisions
1131 1131 prevsrc = targets.get(abstarget)
1132 1132 if prevsrc is not None:
1133 1133 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1134 1134 (reltarget, repo.pathto(abssrc, cwd),
1135 1135 repo.pathto(prevsrc, cwd)))
1136 1136 return
1137 1137
1138 1138 # check for overwrites
1139 1139 exists = os.path.lexists(target)
1140 1140 samefile = False
1141 1141 if exists and abssrc != abstarget:
1142 1142 if (repo.dirstate.normalize(abssrc) ==
1143 1143 repo.dirstate.normalize(abstarget)):
1144 1144 if not rename:
1145 1145 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1146 1146 return
1147 1147 exists = False
1148 1148 samefile = True
1149 1149
1150 1150 if not after and exists or after and state in 'mn':
1151 1151 if not opts['force']:
1152 1152 if state in 'mn':
1153 1153 msg = _('%s: not overwriting - file already committed\n')
1154 1154 if after:
1155 1155 flags = '--after --force'
1156 1156 else:
1157 1157 flags = '--force'
1158 1158 if rename:
1159 1159 hint = _('(hg rename %s to replace the file by '
1160 1160 'recording a rename)\n') % flags
1161 1161 else:
1162 1162 hint = _('(hg copy %s to replace the file by '
1163 1163 'recording a copy)\n') % flags
1164 1164 else:
1165 1165 msg = _('%s: not overwriting - file exists\n')
1166 1166 if rename:
1167 1167 hint = _('(hg rename --after to record the rename)\n')
1168 1168 else:
1169 1169 hint = _('(hg copy --after to record the copy)\n')
1170 1170 ui.warn(msg % reltarget)
1171 1171 ui.warn(hint)
1172 1172 return
1173 1173
1174 1174 if after:
1175 1175 if not exists:
1176 1176 if rename:
1177 1177 ui.warn(_('%s: not recording move - %s does not exist\n') %
1178 1178 (relsrc, reltarget))
1179 1179 else:
1180 1180 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1181 1181 (relsrc, reltarget))
1182 1182 return
1183 1183 elif not dryrun:
1184 1184 try:
1185 1185 if exists:
1186 1186 os.unlink(target)
1187 1187 targetdir = os.path.dirname(target) or '.'
1188 1188 if not os.path.isdir(targetdir):
1189 1189 os.makedirs(targetdir)
1190 1190 if samefile:
1191 1191 tmp = target + "~hgrename"
1192 1192 os.rename(src, tmp)
1193 1193 os.rename(tmp, target)
1194 1194 else:
1195 1195 # Preserve stat info on renames, not on copies; this matches
1196 1196 # Linux CLI behavior.
1197 1197 util.copyfile(src, target, copystat=rename)
1198 1198 srcexists = True
1199 1199 except IOError as inst:
1200 1200 if inst.errno == errno.ENOENT:
1201 1201 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1202 1202 srcexists = False
1203 1203 else:
1204 1204 ui.warn(_('%s: cannot copy - %s\n') %
1205 1205 (relsrc, encoding.strtolocal(inst.strerror)))
1206 1206 return True # report a failure
1207 1207
1208 1208 if ui.verbose or not exact:
1209 1209 if rename:
1210 1210 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1211 1211 else:
1212 1212 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1213 1213
1214 1214 targets[abstarget] = abssrc
1215 1215
1216 1216 # fix up dirstate
1217 1217 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1218 1218 dryrun=dryrun, cwd=cwd)
1219 1219 if rename and not dryrun:
1220 1220 if not after and srcexists and not samefile:
1221 1221 repo.wvfs.unlinkpath(abssrc)
1222 1222 wctx.forget([abssrc])
1223 1223
1224 1224 # pat: ossep
1225 1225 # dest ossep
1226 1226 # srcs: list of (hgsep, hgsep, ossep, bool)
1227 1227 # return: function that takes hgsep and returns ossep
1228 1228 def targetpathfn(pat, dest, srcs):
1229 1229 if os.path.isdir(pat):
1230 1230 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1231 1231 abspfx = util.localpath(abspfx)
1232 1232 if destdirexists:
1233 1233 striplen = len(os.path.split(abspfx)[0])
1234 1234 else:
1235 1235 striplen = len(abspfx)
1236 1236 if striplen:
1237 1237 striplen += len(pycompat.ossep)
1238 1238 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1239 1239 elif destdirexists:
1240 1240 res = lambda p: os.path.join(dest,
1241 1241 os.path.basename(util.localpath(p)))
1242 1242 else:
1243 1243 res = lambda p: dest
1244 1244 return res
1245 1245
1246 1246 # pat: ossep
1247 1247 # dest ossep
1248 1248 # srcs: list of (hgsep, hgsep, ossep, bool)
1249 1249 # return: function that takes hgsep and returns ossep
1250 1250 def targetpathafterfn(pat, dest, srcs):
1251 1251 if matchmod.patkind(pat):
1252 1252 # a mercurial pattern
1253 1253 res = lambda p: os.path.join(dest,
1254 1254 os.path.basename(util.localpath(p)))
1255 1255 else:
1256 1256 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1257 1257 if len(abspfx) < len(srcs[0][0]):
1258 1258 # A directory. Either the target path contains the last
1259 1259 # component of the source path or it does not.
1260 1260 def evalpath(striplen):
1261 1261 score = 0
1262 1262 for s in srcs:
1263 1263 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1264 1264 if os.path.lexists(t):
1265 1265 score += 1
1266 1266 return score
1267 1267
1268 1268 abspfx = util.localpath(abspfx)
1269 1269 striplen = len(abspfx)
1270 1270 if striplen:
1271 1271 striplen += len(pycompat.ossep)
1272 1272 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1273 1273 score = evalpath(striplen)
1274 1274 striplen1 = len(os.path.split(abspfx)[0])
1275 1275 if striplen1:
1276 1276 striplen1 += len(pycompat.ossep)
1277 1277 if evalpath(striplen1) > score:
1278 1278 striplen = striplen1
1279 1279 res = lambda p: os.path.join(dest,
1280 1280 util.localpath(p)[striplen:])
1281 1281 else:
1282 1282 # a file
1283 1283 if destdirexists:
1284 1284 res = lambda p: os.path.join(dest,
1285 1285 os.path.basename(util.localpath(p)))
1286 1286 else:
1287 1287 res = lambda p: dest
1288 1288 return res
1289 1289
1290 1290 pats = scmutil.expandpats(pats)
1291 1291 if not pats:
1292 1292 raise error.Abort(_('no source or destination specified'))
1293 1293 if len(pats) == 1:
1294 1294 raise error.Abort(_('no destination specified'))
1295 1295 dest = pats.pop()
1296 1296 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1297 1297 if not destdirexists:
1298 1298 if len(pats) > 1 or matchmod.patkind(pats[0]):
1299 1299 raise error.Abort(_('with multiple sources, destination must be an '
1300 1300 'existing directory'))
1301 1301 if util.endswithsep(dest):
1302 1302 raise error.Abort(_('destination %s is not a directory') % dest)
1303 1303
1304 1304 tfn = targetpathfn
1305 1305 if after:
1306 1306 tfn = targetpathafterfn
1307 1307 copylist = []
1308 1308 for pat in pats:
1309 1309 srcs = walkpat(pat)
1310 1310 if not srcs:
1311 1311 continue
1312 1312 copylist.append((tfn(pat, dest, srcs), srcs))
1313 1313 if not copylist:
1314 1314 raise error.Abort(_('no files to copy'))
1315 1315
1316 1316 errors = 0
1317 1317 for targetpath, srcs in copylist:
1318 1318 for abssrc, relsrc, exact in srcs:
1319 1319 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1320 1320 errors += 1
1321 1321
1322 1322 if errors:
1323 1323 ui.warn(_('(consider using --after)\n'))
1324 1324
1325 1325 return errors != 0
1326 1326
1327 1327 ## facility to let extension process additional data into an import patch
1328 1328 # list of identifier to be executed in order
1329 1329 extrapreimport = [] # run before commit
1330 1330 extrapostimport = [] # run after commit
1331 1331 # mapping from identifier to actual import function
1332 1332 #
1333 1333 # 'preimport' are run before the commit is made and are provided the following
1334 1334 # arguments:
1335 1335 # - repo: the localrepository instance,
1336 1336 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1337 1337 # - extra: the future extra dictionary of the changeset, please mutate it,
1338 1338 # - opts: the import options.
1339 1339 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1340 1340 # mutation of in memory commit and more. Feel free to rework the code to get
1341 1341 # there.
1342 1342 extrapreimportmap = {}
1343 1343 # 'postimport' are run after the commit is made and are provided the following
1344 1344 # argument:
1345 1345 # - ctx: the changectx created by import.
1346 1346 extrapostimportmap = {}
1347 1347
1348 1348 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1349 1349 """Utility function used by commands.import to import a single patch
1350 1350
1351 1351 This function is explicitly defined here to help the evolve extension to
1352 1352 wrap this part of the import logic.
1353 1353
1354 1354 The API is currently a bit ugly because it a simple code translation from
1355 1355 the import command. Feel free to make it better.
1356 1356
1357 1357 :hunk: a patch (as a binary string)
1358 1358 :parents: nodes that will be parent of the created commit
1359 1359 :opts: the full dict of option passed to the import command
1360 1360 :msgs: list to save commit message to.
1361 1361 (used in case we need to save it when failing)
1362 1362 :updatefunc: a function that update a repo to a given node
1363 1363 updatefunc(<repo>, <node>)
1364 1364 """
1365 1365 # avoid cycle context -> subrepo -> cmdutil
1366 1366 from . import context
1367 1367 extractdata = patch.extract(ui, hunk)
1368 1368 tmpname = extractdata.get('filename')
1369 1369 message = extractdata.get('message')
1370 1370 user = opts.get('user') or extractdata.get('user')
1371 1371 date = opts.get('date') or extractdata.get('date')
1372 1372 branch = extractdata.get('branch')
1373 1373 nodeid = extractdata.get('nodeid')
1374 1374 p1 = extractdata.get('p1')
1375 1375 p2 = extractdata.get('p2')
1376 1376
1377 1377 nocommit = opts.get('no_commit')
1378 1378 importbranch = opts.get('import_branch')
1379 1379 update = not opts.get('bypass')
1380 1380 strip = opts["strip"]
1381 1381 prefix = opts["prefix"]
1382 1382 sim = float(opts.get('similarity') or 0)
1383 1383 if not tmpname:
1384 1384 return (None, None, False)
1385 1385
1386 1386 rejects = False
1387 1387
1388 1388 try:
1389 1389 cmdline_message = logmessage(ui, opts)
1390 1390 if cmdline_message:
1391 1391 # pickup the cmdline msg
1392 1392 message = cmdline_message
1393 1393 elif message:
1394 1394 # pickup the patch msg
1395 1395 message = message.strip()
1396 1396 else:
1397 1397 # launch the editor
1398 1398 message = None
1399 1399 ui.debug('message:\n%s\n' % message)
1400 1400
1401 1401 if len(parents) == 1:
1402 1402 parents.append(repo[nullid])
1403 1403 if opts.get('exact'):
1404 1404 if not nodeid or not p1:
1405 1405 raise error.Abort(_('not a Mercurial patch'))
1406 1406 p1 = repo[p1]
1407 1407 p2 = repo[p2 or nullid]
1408 1408 elif p2:
1409 1409 try:
1410 1410 p1 = repo[p1]
1411 1411 p2 = repo[p2]
1412 1412 # Without any options, consider p2 only if the
1413 1413 # patch is being applied on top of the recorded
1414 1414 # first parent.
1415 1415 if p1 != parents[0]:
1416 1416 p1 = parents[0]
1417 1417 p2 = repo[nullid]
1418 1418 except error.RepoError:
1419 1419 p1, p2 = parents
1420 1420 if p2.node() == nullid:
1421 1421 ui.warn(_("warning: import the patch as a normal revision\n"
1422 1422 "(use --exact to import the patch as a merge)\n"))
1423 1423 else:
1424 1424 p1, p2 = parents
1425 1425
1426 1426 n = None
1427 1427 if update:
1428 1428 if p1 != parents[0]:
1429 1429 updatefunc(repo, p1.node())
1430 1430 if p2 != parents[1]:
1431 1431 repo.setparents(p1.node(), p2.node())
1432 1432
1433 1433 if opts.get('exact') or importbranch:
1434 1434 repo.dirstate.setbranch(branch or 'default')
1435 1435
1436 1436 partial = opts.get('partial', False)
1437 1437 files = set()
1438 1438 try:
1439 1439 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1440 1440 files=files, eolmode=None, similarity=sim / 100.0)
1441 1441 except error.PatchError as e:
1442 1442 if not partial:
1443 1443 raise error.Abort(pycompat.bytestr(e))
1444 1444 if partial:
1445 1445 rejects = True
1446 1446
1447 1447 files = list(files)
1448 1448 if nocommit:
1449 1449 if message:
1450 1450 msgs.append(message)
1451 1451 else:
1452 1452 if opts.get('exact') or p2:
1453 1453 # If you got here, you either use --force and know what
1454 1454 # you are doing or used --exact or a merge patch while
1455 1455 # being updated to its first parent.
1456 1456 m = None
1457 1457 else:
1458 1458 m = scmutil.matchfiles(repo, files or [])
1459 1459 editform = mergeeditform(repo[None], 'import.normal')
1460 1460 if opts.get('exact'):
1461 1461 editor = None
1462 1462 else:
1463 1463 editor = getcommiteditor(editform=editform,
1464 1464 **pycompat.strkwargs(opts))
1465 1465 extra = {}
1466 1466 for idfunc in extrapreimport:
1467 1467 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1468 1468 overrides = {}
1469 1469 if partial:
1470 1470 overrides[('ui', 'allowemptycommit')] = True
1471 1471 with repo.ui.configoverride(overrides, 'import'):
1472 1472 n = repo.commit(message, user,
1473 1473 date, match=m,
1474 1474 editor=editor, extra=extra)
1475 1475 for idfunc in extrapostimport:
1476 1476 extrapostimportmap[idfunc](repo[n])
1477 1477 else:
1478 1478 if opts.get('exact') or importbranch:
1479 1479 branch = branch or 'default'
1480 1480 else:
1481 1481 branch = p1.branch()
1482 1482 store = patch.filestore()
1483 1483 try:
1484 1484 files = set()
1485 1485 try:
1486 1486 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1487 1487 files, eolmode=None)
1488 1488 except error.PatchError as e:
1489 1489 raise error.Abort(stringutil.forcebytestr(e))
1490 1490 if opts.get('exact'):
1491 1491 editor = None
1492 1492 else:
1493 1493 editor = getcommiteditor(editform='import.bypass')
1494 1494 memctx = context.memctx(repo, (p1.node(), p2.node()),
1495 1495 message,
1496 1496 files=files,
1497 1497 filectxfn=store,
1498 1498 user=user,
1499 1499 date=date,
1500 1500 branch=branch,
1501 1501 editor=editor)
1502 1502 n = memctx.commit()
1503 1503 finally:
1504 1504 store.close()
1505 1505 if opts.get('exact') and nocommit:
1506 1506 # --exact with --no-commit is still useful in that it does merge
1507 1507 # and branch bits
1508 1508 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1509 1509 elif opts.get('exact') and hex(n) != nodeid:
1510 1510 raise error.Abort(_('patch is damaged or loses information'))
1511 1511 msg = _('applied to working directory')
1512 1512 if n:
1513 1513 # i18n: refers to a short changeset id
1514 1514 msg = _('created %s') % short(n)
1515 1515 return (msg, n, rejects)
1516 1516 finally:
1517 1517 os.unlink(tmpname)
1518 1518
1519 1519 # facility to let extensions include additional data in an exported patch
1520 1520 # list of identifiers to be executed in order
1521 1521 extraexport = []
1522 1522 # mapping from identifier to actual export function
1523 1523 # function as to return a string to be added to the header or None
1524 1524 # it is given two arguments (sequencenumber, changectx)
1525 1525 extraexportmap = {}
1526 1526
1527 1527 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1528 1528 node = scmutil.binnode(ctx)
1529 1529 parents = [p.node() for p in ctx.parents() if p]
1530 1530 branch = ctx.branch()
1531 1531 if switch_parent:
1532 1532 parents.reverse()
1533 1533
1534 1534 if parents:
1535 1535 prev = parents[0]
1536 1536 else:
1537 1537 prev = nullid
1538 1538
1539 1539 write("# HG changeset patch\n")
1540 1540 write("# User %s\n" % ctx.user())
1541 1541 write("# Date %d %d\n" % ctx.date())
1542 1542 write("# %s\n" % dateutil.datestr(ctx.date()))
1543 1543 if branch and branch != 'default':
1544 1544 write("# Branch %s\n" % branch)
1545 1545 write("# Node ID %s\n" % hex(node))
1546 1546 write("# Parent %s\n" % hex(prev))
1547 1547 if len(parents) > 1:
1548 1548 write("# Parent %s\n" % hex(parents[1]))
1549 1549
1550 1550 for headerid in extraexport:
1551 1551 header = extraexportmap[headerid](seqno, ctx)
1552 1552 if header is not None:
1553 1553 write('# %s\n' % header)
1554 1554 write(ctx.description().rstrip())
1555 1555 write("\n\n")
1556 1556
1557 1557 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1558 1558 write(chunk, label=label)
1559 1559
1560 1560 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1561 1561 opts=None, match=None):
1562 1562 '''export changesets as hg patches
1563 1563
1564 1564 Args:
1565 1565 repo: The repository from which we're exporting revisions.
1566 1566 revs: A list of revisions to export as revision numbers.
1567 1567 fntemplate: An optional string to use for generating patch file names.
1568 1568 fp: An optional file-like object to which patches should be written.
1569 1569 switch_parent: If True, show diffs against second parent when not nullid.
1570 1570 Default is false, which always shows diff against p1.
1571 1571 opts: diff options to use for generating the patch.
1572 1572 match: If specified, only export changes to files matching this matcher.
1573 1573
1574 1574 Returns:
1575 1575 Nothing.
1576 1576
1577 1577 Side Effect:
1578 1578 "HG Changeset Patch" data is emitted to one of the following
1579 1579 destinations:
1580 1580 fp is specified: All revs are written to the specified
1581 1581 file-like object.
1582 1582 fntemplate specified: Each rev is written to a unique file named using
1583 1583 the given template.
1584 1584 Neither fp nor template specified: All revs written to repo.ui.write()
1585 1585 '''
1586 1586
1587 1587 total = len(revs)
1588 1588 revwidth = max(len(str(rev)) for rev in revs)
1589 1589 filemode = {}
1590 1590
1591 1591 write = None
1592 1592 dest = '<unnamed>'
1593 1593 if fp:
1594 1594 dest = getattr(fp, 'name', dest)
1595 1595 def write(s, **kw):
1596 1596 fp.write(s)
1597 1597 elif not fntemplate:
1598 1598 write = repo.ui.write
1599 1599
1600 1600 for seqno, rev in enumerate(revs, 1):
1601 1601 ctx = repo[rev]
1602 1602 fo = None
1603 1603 if not fp and fntemplate:
1604 1604 fo = makefileobj(ctx, fntemplate, mode='wb', modemap=filemode,
1605 1605 total=total, seqno=seqno, revwidth=revwidth)
1606 1606 dest = fo.name
1607 1607 def write(s, **kw):
1608 1608 fo.write(s)
1609 1609 if not dest.startswith('<'):
1610 1610 repo.ui.note("%s\n" % dest)
1611 1611 _exportsingle(
1612 1612 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1613 1613 if fo is not None:
1614 1614 fo.close()
1615 1615
1616 1616 def showmarker(fm, marker, index=None):
1617 1617 """utility function to display obsolescence marker in a readable way
1618 1618
1619 1619 To be used by debug function."""
1620 1620 if index is not None:
1621 1621 fm.write('index', '%i ', index)
1622 1622 fm.write('prednode', '%s ', hex(marker.prednode()))
1623 1623 succs = marker.succnodes()
1624 1624 fm.condwrite(succs, 'succnodes', '%s ',
1625 1625 fm.formatlist(map(hex, succs), name='node'))
1626 1626 fm.write('flag', '%X ', marker.flags())
1627 1627 parents = marker.parentnodes()
1628 1628 if parents is not None:
1629 1629 fm.write('parentnodes', '{%s} ',
1630 1630 fm.formatlist(map(hex, parents), name='node', sep=', '))
1631 1631 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1632 1632 meta = marker.metadata().copy()
1633 1633 meta.pop('date', None)
1634 1634 smeta = util.rapply(pycompat.maybebytestr, meta)
1635 1635 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1636 1636 fm.plain('\n')
1637 1637
1638 1638 def finddate(ui, repo, date):
1639 1639 """Find the tipmost changeset that matches the given date spec"""
1640 1640
1641 1641 df = dateutil.matchdate(date)
1642 1642 m = scmutil.matchall(repo)
1643 1643 results = {}
1644 1644
1645 1645 def prep(ctx, fns):
1646 1646 d = ctx.date()
1647 1647 if df(d[0]):
1648 1648 results[ctx.rev()] = d
1649 1649
1650 1650 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1651 1651 rev = ctx.rev()
1652 1652 if rev in results:
1653 1653 ui.status(_("found revision %s from %s\n") %
1654 1654 (rev, dateutil.datestr(results[rev])))
1655 1655 return '%d' % rev
1656 1656
1657 1657 raise error.Abort(_("revision matching date not found"))
1658 1658
1659 1659 def increasingwindows(windowsize=8, sizelimit=512):
1660 1660 while True:
1661 1661 yield windowsize
1662 1662 if windowsize < sizelimit:
1663 1663 windowsize *= 2
1664 1664
1665 1665 def _walkrevs(repo, opts):
1666 1666 # Default --rev value depends on --follow but --follow behavior
1667 1667 # depends on revisions resolved from --rev...
1668 1668 follow = opts.get('follow') or opts.get('follow_first')
1669 1669 if opts.get('rev'):
1670 1670 revs = scmutil.revrange(repo, opts['rev'])
1671 1671 elif follow and repo.dirstate.p1() == nullid:
1672 1672 revs = smartset.baseset()
1673 1673 elif follow:
1674 1674 revs = repo.revs('reverse(:.)')
1675 1675 else:
1676 1676 revs = smartset.spanset(repo)
1677 1677 revs.reverse()
1678 1678 return revs
1679 1679
1680 1680 class FileWalkError(Exception):
1681 1681 pass
1682 1682
1683 1683 def walkfilerevs(repo, match, follow, revs, fncache):
1684 1684 '''Walks the file history for the matched files.
1685 1685
1686 1686 Returns the changeset revs that are involved in the file history.
1687 1687
1688 1688 Throws FileWalkError if the file history can't be walked using
1689 1689 filelogs alone.
1690 1690 '''
1691 1691 wanted = set()
1692 1692 copies = []
1693 1693 minrev, maxrev = min(revs), max(revs)
1694 1694 def filerevgen(filelog, last):
1695 1695 """
1696 1696 Only files, no patterns. Check the history of each file.
1697 1697
1698 1698 Examines filelog entries within minrev, maxrev linkrev range
1699 1699 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1700 1700 tuples in backwards order
1701 1701 """
1702 1702 cl_count = len(repo)
1703 1703 revs = []
1704 1704 for j in xrange(0, last + 1):
1705 1705 linkrev = filelog.linkrev(j)
1706 1706 if linkrev < minrev:
1707 1707 continue
1708 1708 # only yield rev for which we have the changelog, it can
1709 1709 # happen while doing "hg log" during a pull or commit
1710 1710 if linkrev >= cl_count:
1711 1711 break
1712 1712
1713 1713 parentlinkrevs = []
1714 1714 for p in filelog.parentrevs(j):
1715 1715 if p != nullrev:
1716 1716 parentlinkrevs.append(filelog.linkrev(p))
1717 1717 n = filelog.node(j)
1718 1718 revs.append((linkrev, parentlinkrevs,
1719 1719 follow and filelog.renamed(n)))
1720 1720
1721 1721 return reversed(revs)
1722 1722 def iterfiles():
1723 1723 pctx = repo['.']
1724 1724 for filename in match.files():
1725 1725 if follow:
1726 1726 if filename not in pctx:
1727 1727 raise error.Abort(_('cannot follow file not in parent '
1728 1728 'revision: "%s"') % filename)
1729 1729 yield filename, pctx[filename].filenode()
1730 1730 else:
1731 1731 yield filename, None
1732 1732 for filename_node in copies:
1733 1733 yield filename_node
1734 1734
1735 1735 for file_, node in iterfiles():
1736 1736 filelog = repo.file(file_)
1737 1737 if not len(filelog):
1738 1738 if node is None:
1739 1739 # A zero count may be a directory or deleted file, so
1740 1740 # try to find matching entries on the slow path.
1741 1741 if follow:
1742 1742 raise error.Abort(
1743 1743 _('cannot follow nonexistent file: "%s"') % file_)
1744 1744 raise FileWalkError("Cannot walk via filelog")
1745 1745 else:
1746 1746 continue
1747 1747
1748 1748 if node is None:
1749 1749 last = len(filelog) - 1
1750 1750 else:
1751 1751 last = filelog.rev(node)
1752 1752
1753 1753 # keep track of all ancestors of the file
1754 1754 ancestors = {filelog.linkrev(last)}
1755 1755
1756 1756 # iterate from latest to oldest revision
1757 1757 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1758 1758 if not follow:
1759 1759 if rev > maxrev:
1760 1760 continue
1761 1761 else:
1762 1762 # Note that last might not be the first interesting
1763 1763 # rev to us:
1764 1764 # if the file has been changed after maxrev, we'll
1765 1765 # have linkrev(last) > maxrev, and we still need
1766 1766 # to explore the file graph
1767 1767 if rev not in ancestors:
1768 1768 continue
1769 1769 # XXX insert 1327 fix here
1770 1770 if flparentlinkrevs:
1771 1771 ancestors.update(flparentlinkrevs)
1772 1772
1773 1773 fncache.setdefault(rev, []).append(file_)
1774 1774 wanted.add(rev)
1775 1775 if copied:
1776 1776 copies.append(copied)
1777 1777
1778 1778 return wanted
1779 1779
1780 1780 class _followfilter(object):
1781 1781 def __init__(self, repo, onlyfirst=False):
1782 1782 self.repo = repo
1783 1783 self.startrev = nullrev
1784 1784 self.roots = set()
1785 1785 self.onlyfirst = onlyfirst
1786 1786
1787 1787 def match(self, rev):
1788 1788 def realparents(rev):
1789 1789 if self.onlyfirst:
1790 1790 return self.repo.changelog.parentrevs(rev)[0:1]
1791 1791 else:
1792 1792 return filter(lambda x: x != nullrev,
1793 1793 self.repo.changelog.parentrevs(rev))
1794 1794
1795 1795 if self.startrev == nullrev:
1796 1796 self.startrev = rev
1797 1797 return True
1798 1798
1799 1799 if rev > self.startrev:
1800 1800 # forward: all descendants
1801 1801 if not self.roots:
1802 1802 self.roots.add(self.startrev)
1803 1803 for parent in realparents(rev):
1804 1804 if parent in self.roots:
1805 1805 self.roots.add(rev)
1806 1806 return True
1807 1807 else:
1808 1808 # backwards: all parents
1809 1809 if not self.roots:
1810 1810 self.roots.update(realparents(self.startrev))
1811 1811 if rev in self.roots:
1812 1812 self.roots.remove(rev)
1813 1813 self.roots.update(realparents(rev))
1814 1814 return True
1815 1815
1816 1816 return False
1817 1817
1818 1818 def walkchangerevs(repo, match, opts, prepare):
1819 1819 '''Iterate over files and the revs in which they changed.
1820 1820
1821 1821 Callers most commonly need to iterate backwards over the history
1822 1822 in which they are interested. Doing so has awful (quadratic-looking)
1823 1823 performance, so we use iterators in a "windowed" way.
1824 1824
1825 1825 We walk a window of revisions in the desired order. Within the
1826 1826 window, we first walk forwards to gather data, then in the desired
1827 1827 order (usually backwards) to display it.
1828 1828
1829 1829 This function returns an iterator yielding contexts. Before
1830 1830 yielding each context, the iterator will first call the prepare
1831 1831 function on each context in the window in forward order.'''
1832 1832
1833 1833 follow = opts.get('follow') or opts.get('follow_first')
1834 1834 revs = _walkrevs(repo, opts)
1835 1835 if not revs:
1836 1836 return []
1837 1837 wanted = set()
1838 1838 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1839 1839 fncache = {}
1840 1840 change = repo.changectx
1841 1841
1842 1842 # First step is to fill wanted, the set of revisions that we want to yield.
1843 1843 # When it does not induce extra cost, we also fill fncache for revisions in
1844 1844 # wanted: a cache of filenames that were changed (ctx.files()) and that
1845 1845 # match the file filtering conditions.
1846 1846
1847 1847 if match.always():
1848 1848 # No files, no patterns. Display all revs.
1849 1849 wanted = revs
1850 1850 elif not slowpath:
1851 1851 # We only have to read through the filelog to find wanted revisions
1852 1852
1853 1853 try:
1854 1854 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1855 1855 except FileWalkError:
1856 1856 slowpath = True
1857 1857
1858 1858 # We decided to fall back to the slowpath because at least one
1859 1859 # of the paths was not a file. Check to see if at least one of them
1860 1860 # existed in history, otherwise simply return
1861 1861 for path in match.files():
1862 1862 if path == '.' or path in repo.store:
1863 1863 break
1864 1864 else:
1865 1865 return []
1866 1866
1867 1867 if slowpath:
1868 1868 # We have to read the changelog to match filenames against
1869 1869 # changed files
1870 1870
1871 1871 if follow:
1872 1872 raise error.Abort(_('can only follow copies/renames for explicit '
1873 1873 'filenames'))
1874 1874
1875 1875 # The slow path checks files modified in every changeset.
1876 1876 # This is really slow on large repos, so compute the set lazily.
1877 1877 class lazywantedset(object):
1878 1878 def __init__(self):
1879 1879 self.set = set()
1880 1880 self.revs = set(revs)
1881 1881
1882 1882 # No need to worry about locality here because it will be accessed
1883 1883 # in the same order as the increasing window below.
1884 1884 def __contains__(self, value):
1885 1885 if value in self.set:
1886 1886 return True
1887 1887 elif not value in self.revs:
1888 1888 return False
1889 1889 else:
1890 1890 self.revs.discard(value)
1891 1891 ctx = change(value)
1892 1892 matches = [f for f in ctx.files() if match(f)]
1893 1893 if matches:
1894 1894 fncache[value] = matches
1895 1895 self.set.add(value)
1896 1896 return True
1897 1897 return False
1898 1898
1899 1899 def discard(self, value):
1900 1900 self.revs.discard(value)
1901 1901 self.set.discard(value)
1902 1902
1903 1903 wanted = lazywantedset()
1904 1904
1905 1905 # it might be worthwhile to do this in the iterator if the rev range
1906 1906 # is descending and the prune args are all within that range
1907 1907 for rev in opts.get('prune', ()):
1908 1908 rev = repo[rev].rev()
1909 1909 ff = _followfilter(repo)
1910 1910 stop = min(revs[0], revs[-1])
1911 1911 for x in xrange(rev, stop - 1, -1):
1912 1912 if ff.match(x):
1913 1913 wanted = wanted - [x]
1914 1914
1915 1915 # Now that wanted is correctly initialized, we can iterate over the
1916 1916 # revision range, yielding only revisions in wanted.
1917 1917 def iterate():
1918 1918 if follow and match.always():
1919 1919 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1920 1920 def want(rev):
1921 1921 return ff.match(rev) and rev in wanted
1922 1922 else:
1923 1923 def want(rev):
1924 1924 return rev in wanted
1925 1925
1926 1926 it = iter(revs)
1927 1927 stopiteration = False
1928 1928 for windowsize in increasingwindows():
1929 1929 nrevs = []
1930 1930 for i in xrange(windowsize):
1931 1931 rev = next(it, None)
1932 1932 if rev is None:
1933 1933 stopiteration = True
1934 1934 break
1935 1935 elif want(rev):
1936 1936 nrevs.append(rev)
1937 1937 for rev in sorted(nrevs):
1938 1938 fns = fncache.get(rev)
1939 1939 ctx = change(rev)
1940 1940 if not fns:
1941 1941 def fns_generator():
1942 1942 for f in ctx.files():
1943 1943 if match(f):
1944 1944 yield f
1945 1945 fns = fns_generator()
1946 1946 prepare(ctx, fns)
1947 1947 for rev in nrevs:
1948 1948 yield change(rev)
1949 1949
1950 1950 if stopiteration:
1951 1951 break
1952 1952
1953 1953 return iterate()
1954 1954
1955 1955 def add(ui, repo, match, prefix, explicitonly, **opts):
1956 1956 join = lambda f: os.path.join(prefix, f)
1957 1957 bad = []
1958 1958
1959 1959 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1960 1960 names = []
1961 1961 wctx = repo[None]
1962 1962 cca = None
1963 1963 abort, warn = scmutil.checkportabilityalert(ui)
1964 1964 if abort or warn:
1965 1965 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1966 1966
1967 1967 badmatch = matchmod.badmatch(match, badfn)
1968 1968 dirstate = repo.dirstate
1969 1969 # We don't want to just call wctx.walk here, since it would return a lot of
1970 1970 # clean files, which we aren't interested in and takes time.
1971 1971 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
1972 1972 unknown=True, ignored=False, full=False)):
1973 1973 exact = match.exact(f)
1974 1974 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
1975 1975 if cca:
1976 1976 cca(f)
1977 1977 names.append(f)
1978 1978 if ui.verbose or not exact:
1979 1979 ui.status(_('adding %s\n') % match.rel(f))
1980 1980
1981 1981 for subpath in sorted(wctx.substate):
1982 1982 sub = wctx.sub(subpath)
1983 1983 try:
1984 1984 submatch = matchmod.subdirmatcher(subpath, match)
1985 1985 if opts.get(r'subrepos'):
1986 1986 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
1987 1987 else:
1988 1988 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
1989 1989 except error.LookupError:
1990 1990 ui.status(_("skipping missing subrepository: %s\n")
1991 1991 % join(subpath))
1992 1992
1993 1993 if not opts.get(r'dry_run'):
1994 1994 rejected = wctx.add(names, prefix)
1995 1995 bad.extend(f for f in rejected if f in match.files())
1996 1996 return bad
1997 1997
1998 1998 def addwebdirpath(repo, serverpath, webconf):
1999 1999 webconf[serverpath] = repo.root
2000 2000 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2001 2001
2002 2002 for r in repo.revs('filelog("path:.hgsub")'):
2003 2003 ctx = repo[r]
2004 2004 for subpath in ctx.substate:
2005 2005 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2006 2006
2007 2007 def forget(ui, repo, match, prefix, explicitonly, dryrun):
2008 2008 join = lambda f: os.path.join(prefix, f)
2009 2009 bad = []
2010 2010 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2011 2011 wctx = repo[None]
2012 2012 forgot = []
2013 2013
2014 2014 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2015 2015 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2016 2016 if explicitonly:
2017 2017 forget = [f for f in forget if match.exact(f)]
2018 2018
2019 2019 for subpath in sorted(wctx.substate):
2020 2020 sub = wctx.sub(subpath)
2021 2021 try:
2022 2022 submatch = matchmod.subdirmatcher(subpath, match)
2023 2023 subbad, subforgot = sub.forget(submatch, prefix, dryrun=dryrun)
2024 2024 bad.extend([subpath + '/' + f for f in subbad])
2025 2025 forgot.extend([subpath + '/' + f for f in subforgot])
2026 2026 except error.LookupError:
2027 2027 ui.status(_("skipping missing subrepository: %s\n")
2028 2028 % join(subpath))
2029 2029
2030 2030 if not explicitonly:
2031 2031 for f in match.files():
2032 2032 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2033 2033 if f not in forgot:
2034 2034 if repo.wvfs.exists(f):
2035 2035 # Don't complain if the exact case match wasn't given.
2036 2036 # But don't do this until after checking 'forgot', so
2037 2037 # that subrepo files aren't normalized, and this op is
2038 2038 # purely from data cached by the status walk above.
2039 2039 if repo.dirstate.normalize(f) in repo.dirstate:
2040 2040 continue
2041 2041 ui.warn(_('not removing %s: '
2042 2042 'file is already untracked\n')
2043 2043 % match.rel(f))
2044 2044 bad.append(f)
2045 2045
2046 2046 for f in forget:
2047 2047 if ui.verbose or not match.exact(f):
2048 2048 ui.status(_('removing %s\n') % match.rel(f))
2049 2049
2050 2050 if not dryrun:
2051 2051 rejected = wctx.forget(forget, prefix)
2052 2052 bad.extend(f for f in rejected if f in match.files())
2053 2053 forgot.extend(f for f in forget if f not in rejected)
2054 2054 return bad, forgot
2055 2055
2056 2056 def files(ui, ctx, m, fm, fmt, subrepos):
2057 2057 rev = ctx.rev()
2058 2058 ret = 1
2059 2059 ds = ctx.repo().dirstate
2060 2060
2061 2061 for f in ctx.matches(m):
2062 2062 if rev is None and ds[f] == 'r':
2063 2063 continue
2064 2064 fm.startitem()
2065 2065 if ui.verbose:
2066 2066 fc = ctx[f]
2067 2067 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2068 2068 fm.data(abspath=f)
2069 2069 fm.write('path', fmt, m.rel(f))
2070 2070 ret = 0
2071 2071
2072 2072 for subpath in sorted(ctx.substate):
2073 2073 submatch = matchmod.subdirmatcher(subpath, m)
2074 2074 if (subrepos or m.exact(subpath) or any(submatch.files())):
2075 2075 sub = ctx.sub(subpath)
2076 2076 try:
2077 2077 recurse = m.exact(subpath) or subrepos
2078 2078 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2079 2079 ret = 0
2080 2080 except error.LookupError:
2081 2081 ui.status(_("skipping missing subrepository: %s\n")
2082 2082 % m.abs(subpath))
2083 2083
2084 2084 return ret
2085 2085
2086 2086 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2087 2087 join = lambda f: os.path.join(prefix, f)
2088 2088 ret = 0
2089 2089 s = repo.status(match=m, clean=True)
2090 2090 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2091 2091
2092 2092 wctx = repo[None]
2093 2093
2094 2094 if warnings is None:
2095 2095 warnings = []
2096 2096 warn = True
2097 2097 else:
2098 2098 warn = False
2099 2099
2100 2100 subs = sorted(wctx.substate)
2101 2101 total = len(subs)
2102 2102 count = 0
2103 2103 for subpath in subs:
2104 2104 count += 1
2105 2105 submatch = matchmod.subdirmatcher(subpath, m)
2106 2106 if subrepos or m.exact(subpath) or any(submatch.files()):
2107 2107 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2108 2108 sub = wctx.sub(subpath)
2109 2109 try:
2110 2110 if sub.removefiles(submatch, prefix, after, force, subrepos,
2111 2111 warnings):
2112 2112 ret = 1
2113 2113 except error.LookupError:
2114 2114 warnings.append(_("skipping missing subrepository: %s\n")
2115 2115 % join(subpath))
2116 2116 ui.progress(_('searching'), None)
2117 2117
2118 2118 # warn about failure to delete explicit files/dirs
2119 2119 deleteddirs = util.dirs(deleted)
2120 2120 files = m.files()
2121 2121 total = len(files)
2122 2122 count = 0
2123 2123 for f in files:
2124 2124 def insubrepo():
2125 2125 for subpath in wctx.substate:
2126 2126 if f.startswith(subpath + '/'):
2127 2127 return True
2128 2128 return False
2129 2129
2130 2130 count += 1
2131 2131 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2132 2132 isdir = f in deleteddirs or wctx.hasdir(f)
2133 2133 if (f in repo.dirstate or isdir or f == '.'
2134 2134 or insubrepo() or f in subs):
2135 2135 continue
2136 2136
2137 2137 if repo.wvfs.exists(f):
2138 2138 if repo.wvfs.isdir(f):
2139 2139 warnings.append(_('not removing %s: no tracked files\n')
2140 2140 % m.rel(f))
2141 2141 else:
2142 2142 warnings.append(_('not removing %s: file is untracked\n')
2143 2143 % m.rel(f))
2144 2144 # missing files will generate a warning elsewhere
2145 2145 ret = 1
2146 2146 ui.progress(_('deleting'), None)
2147 2147
2148 2148 if force:
2149 2149 list = modified + deleted + clean + added
2150 2150 elif after:
2151 2151 list = deleted
2152 2152 remaining = modified + added + clean
2153 2153 total = len(remaining)
2154 2154 count = 0
2155 2155 for f in remaining:
2156 2156 count += 1
2157 2157 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2158 2158 if ui.verbose or (f in files):
2159 2159 warnings.append(_('not removing %s: file still exists\n')
2160 2160 % m.rel(f))
2161 2161 ret = 1
2162 2162 ui.progress(_('skipping'), None)
2163 2163 else:
2164 2164 list = deleted + clean
2165 2165 total = len(modified) + len(added)
2166 2166 count = 0
2167 2167 for f in modified:
2168 2168 count += 1
2169 2169 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2170 2170 warnings.append(_('not removing %s: file is modified (use -f'
2171 2171 ' to force removal)\n') % m.rel(f))
2172 2172 ret = 1
2173 2173 for f in added:
2174 2174 count += 1
2175 2175 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2176 2176 warnings.append(_("not removing %s: file has been marked for add"
2177 2177 " (use 'hg forget' to undo add)\n") % m.rel(f))
2178 2178 ret = 1
2179 2179 ui.progress(_('skipping'), None)
2180 2180
2181 2181 list = sorted(list)
2182 2182 total = len(list)
2183 2183 count = 0
2184 2184 for f in list:
2185 2185 count += 1
2186 2186 if ui.verbose or not m.exact(f):
2187 2187 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2188 2188 ui.status(_('removing %s\n') % m.rel(f))
2189 2189 ui.progress(_('deleting'), None)
2190 2190
2191 2191 with repo.wlock():
2192 2192 if not after:
2193 2193 for f in list:
2194 2194 if f in added:
2195 2195 continue # we never unlink added files on remove
2196 2196 repo.wvfs.unlinkpath(f, ignoremissing=True)
2197 2197 repo[None].forget(list)
2198 2198
2199 2199 if warn:
2200 2200 for warning in warnings:
2201 2201 ui.warn(warning)
2202 2202
2203 2203 return ret
2204 2204
2205 2205 def _updatecatformatter(fm, ctx, matcher, path, decode):
2206 2206 """Hook for adding data to the formatter used by ``hg cat``.
2207 2207
2208 2208 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2209 2209 this method first."""
2210 2210 data = ctx[path].data()
2211 2211 if decode:
2212 2212 data = ctx.repo().wwritedata(path, data)
2213 2213 fm.startitem()
2214 2214 fm.write('data', '%s', data)
2215 2215 fm.data(abspath=path, path=matcher.rel(path))
2216 2216
2217 2217 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2218 2218 err = 1
2219 2219 opts = pycompat.byteskwargs(opts)
2220 2220
2221 2221 def write(path):
2222 2222 filename = None
2223 2223 if fntemplate:
2224 2224 filename = makefilename(ctx, fntemplate,
2225 2225 pathname=os.path.join(prefix, path))
2226 2226 # attempt to create the directory if it does not already exist
2227 2227 try:
2228 2228 os.makedirs(os.path.dirname(filename))
2229 2229 except OSError:
2230 2230 pass
2231 2231 with formatter.maybereopen(basefm, filename, opts) as fm:
2232 2232 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2233 2233
2234 2234 # Automation often uses hg cat on single files, so special case it
2235 2235 # for performance to avoid the cost of parsing the manifest.
2236 2236 if len(matcher.files()) == 1 and not matcher.anypats():
2237 2237 file = matcher.files()[0]
2238 2238 mfl = repo.manifestlog
2239 2239 mfnode = ctx.manifestnode()
2240 2240 try:
2241 2241 if mfnode and mfl[mfnode].find(file)[0]:
2242 2242 scmutil.fileprefetchhooks(repo, ctx, [file])
2243 2243 write(file)
2244 2244 return 0
2245 2245 except KeyError:
2246 2246 pass
2247 2247
2248 2248 files = [f for f in ctx.walk(matcher)]
2249 2249 scmutil.fileprefetchhooks(repo, ctx, files)
2250 2250
2251 2251 for abs in files:
2252 2252 write(abs)
2253 2253 err = 0
2254 2254
2255 2255 for subpath in sorted(ctx.substate):
2256 2256 sub = ctx.sub(subpath)
2257 2257 try:
2258 2258 submatch = matchmod.subdirmatcher(subpath, matcher)
2259 2259
2260 2260 if not sub.cat(submatch, basefm, fntemplate,
2261 2261 os.path.join(prefix, sub._path),
2262 2262 **pycompat.strkwargs(opts)):
2263 2263 err = 0
2264 2264 except error.RepoLookupError:
2265 2265 ui.status(_("skipping missing subrepository: %s\n")
2266 2266 % os.path.join(prefix, subpath))
2267 2267
2268 2268 return err
2269 2269
2270 2270 def commit(ui, repo, commitfunc, pats, opts):
2271 2271 '''commit the specified files or all outstanding changes'''
2272 2272 date = opts.get('date')
2273 2273 if date:
2274 2274 opts['date'] = dateutil.parsedate(date)
2275 2275 message = logmessage(ui, opts)
2276 2276 matcher = scmutil.match(repo[None], pats, opts)
2277 2277
2278 2278 dsguard = None
2279 2279 # extract addremove carefully -- this function can be called from a command
2280 2280 # that doesn't support addremove
2281 2281 if opts.get('addremove'):
2282 2282 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2283 2283 with dsguard or util.nullcontextmanager():
2284 2284 if dsguard:
2285 2285 if scmutil.addremove(repo, matcher, "", opts) != 0:
2286 2286 raise error.Abort(
2287 2287 _("failed to mark all new/missing files as added/removed"))
2288 2288
2289 2289 return commitfunc(ui, repo, message, matcher, opts)
2290 2290
2291 2291 def samefile(f, ctx1, ctx2):
2292 2292 if f in ctx1.manifest():
2293 2293 a = ctx1.filectx(f)
2294 2294 if f in ctx2.manifest():
2295 2295 b = ctx2.filectx(f)
2296 2296 return (not a.cmp(b)
2297 2297 and a.flags() == b.flags())
2298 2298 else:
2299 2299 return False
2300 2300 else:
2301 2301 return f not in ctx2.manifest()
2302 2302
2303 2303 def amend(ui, repo, old, extra, pats, opts):
2304 2304 # avoid cycle context -> subrepo -> cmdutil
2305 2305 from . import context
2306 2306
2307 2307 # amend will reuse the existing user if not specified, but the obsolete
2308 2308 # marker creation requires that the current user's name is specified.
2309 2309 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2310 2310 ui.username() # raise exception if username not set
2311 2311
2312 2312 ui.note(_('amending changeset %s\n') % old)
2313 2313 base = old.p1()
2314 2314
2315 2315 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2316 2316 # Participating changesets:
2317 2317 #
2318 2318 # wctx o - workingctx that contains changes from working copy
2319 2319 # | to go into amending commit
2320 2320 # |
2321 2321 # old o - changeset to amend
2322 2322 # |
2323 2323 # base o - first parent of the changeset to amend
2324 2324 wctx = repo[None]
2325 2325
2326 2326 # Copy to avoid mutating input
2327 2327 extra = extra.copy()
2328 2328 # Update extra dict from amended commit (e.g. to preserve graft
2329 2329 # source)
2330 2330 extra.update(old.extra())
2331 2331
2332 2332 # Also update it from the from the wctx
2333 2333 extra.update(wctx.extra())
2334 2334
2335 2335 user = opts.get('user') or old.user()
2336 2336 date = opts.get('date') or old.date()
2337 2337
2338 2338 # Parse the date to allow comparison between date and old.date()
2339 2339 date = dateutil.parsedate(date)
2340 2340
2341 2341 if len(old.parents()) > 1:
2342 2342 # ctx.files() isn't reliable for merges, so fall back to the
2343 2343 # slower repo.status() method
2344 2344 files = set([fn for st in repo.status(base, old)[:3]
2345 2345 for fn in st])
2346 2346 else:
2347 2347 files = set(old.files())
2348 2348
2349 2349 # add/remove the files to the working copy if the "addremove" option
2350 2350 # was specified.
2351 2351 matcher = scmutil.match(wctx, pats, opts)
2352 2352 if (opts.get('addremove')
2353 2353 and scmutil.addremove(repo, matcher, "", opts)):
2354 2354 raise error.Abort(
2355 2355 _("failed to mark all new/missing files as added/removed"))
2356 2356
2357 2357 # Check subrepos. This depends on in-place wctx._status update in
2358 2358 # subrepo.precommit(). To minimize the risk of this hack, we do
2359 2359 # nothing if .hgsub does not exist.
2360 2360 if '.hgsub' in wctx or '.hgsub' in old:
2361 2361 subs, commitsubs, newsubstate = subrepoutil.precommit(
2362 2362 ui, wctx, wctx._status, matcher)
2363 2363 # amend should abort if commitsubrepos is enabled
2364 2364 assert not commitsubs
2365 2365 if subs:
2366 2366 subrepoutil.writestate(repo, newsubstate)
2367 2367
2368 2368 ms = mergemod.mergestate.read(repo)
2369 2369 mergeutil.checkunresolved(ms)
2370 2370
2371 2371 filestoamend = set(f for f in wctx.files() if matcher(f))
2372 2372
2373 2373 changes = (len(filestoamend) > 0)
2374 2374 if changes:
2375 2375 # Recompute copies (avoid recording a -> b -> a)
2376 2376 copied = copies.pathcopies(base, wctx, matcher)
2377 2377 if old.p2:
2378 2378 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2379 2379
2380 2380 # Prune files which were reverted by the updates: if old
2381 2381 # introduced file X and the file was renamed in the working
2382 2382 # copy, then those two files are the same and
2383 2383 # we can discard X from our list of files. Likewise if X
2384 2384 # was removed, it's no longer relevant. If X is missing (aka
2385 2385 # deleted), old X must be preserved.
2386 2386 files.update(filestoamend)
2387 2387 files = [f for f in files if (not samefile(f, wctx, base)
2388 2388 or f in wctx.deleted())]
2389 2389
2390 2390 def filectxfn(repo, ctx_, path):
2391 2391 try:
2392 2392 # If the file being considered is not amongst the files
2393 2393 # to be amended, we should return the file context from the
2394 2394 # old changeset. This avoids issues when only some files in
2395 2395 # the working copy are being amended but there are also
2396 2396 # changes to other files from the old changeset.
2397 2397 if path not in filestoamend:
2398 2398 return old.filectx(path)
2399 2399
2400 2400 # Return None for removed files.
2401 2401 if path in wctx.removed():
2402 2402 return None
2403 2403
2404 2404 fctx = wctx[path]
2405 2405 flags = fctx.flags()
2406 2406 mctx = context.memfilectx(repo, ctx_,
2407 2407 fctx.path(), fctx.data(),
2408 2408 islink='l' in flags,
2409 2409 isexec='x' in flags,
2410 2410 copied=copied.get(path))
2411 2411 return mctx
2412 2412 except KeyError:
2413 2413 return None
2414 2414 else:
2415 2415 ui.note(_('copying changeset %s to %s\n') % (old, base))
2416 2416
2417 2417 # Use version of files as in the old cset
2418 2418 def filectxfn(repo, ctx_, path):
2419 2419 try:
2420 2420 return old.filectx(path)
2421 2421 except KeyError:
2422 2422 return None
2423 2423
2424 2424 # See if we got a message from -m or -l, if not, open the editor with
2425 2425 # the message of the changeset to amend.
2426 2426 message = logmessage(ui, opts)
2427 2427
2428 2428 editform = mergeeditform(old, 'commit.amend')
2429 2429 editor = getcommiteditor(editform=editform,
2430 2430 **pycompat.strkwargs(opts))
2431 2431
2432 2432 if not message:
2433 2433 editor = getcommiteditor(edit=True, editform=editform)
2434 2434 message = old.description()
2435 2435
2436 2436 pureextra = extra.copy()
2437 2437 extra['amend_source'] = old.hex()
2438 2438
2439 2439 new = context.memctx(repo,
2440 2440 parents=[base.node(), old.p2().node()],
2441 2441 text=message,
2442 2442 files=files,
2443 2443 filectxfn=filectxfn,
2444 2444 user=user,
2445 2445 date=date,
2446 2446 extra=extra,
2447 2447 editor=editor)
2448 2448
2449 2449 newdesc = changelog.stripdesc(new.description())
2450 2450 if ((not changes)
2451 2451 and newdesc == old.description()
2452 2452 and user == old.user()
2453 2453 and date == old.date()
2454 2454 and pureextra == old.extra()):
2455 2455 # nothing changed. continuing here would create a new node
2456 2456 # anyway because of the amend_source noise.
2457 2457 #
2458 2458 # This not what we expect from amend.
2459 2459 return old.node()
2460 2460
2461 2461 if opts.get('secret'):
2462 2462 commitphase = 'secret'
2463 2463 else:
2464 2464 commitphase = old.phase()
2465 2465 overrides = {('phases', 'new-commit'): commitphase}
2466 2466 with ui.configoverride(overrides, 'amend'):
2467 2467 newid = repo.commitctx(new)
2468 2468
2469 2469 # Reroute the working copy parent to the new changeset
2470 2470 repo.setparents(newid, nullid)
2471 2471 mapping = {old.node(): (newid,)}
2472 2472 obsmetadata = None
2473 2473 if opts.get('note'):
2474 2474 obsmetadata = {'note': opts['note']}
2475 2475 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
2476 2476
2477 2477 # Fixing the dirstate because localrepo.commitctx does not update
2478 2478 # it. This is rather convenient because we did not need to update
2479 2479 # the dirstate for all the files in the new commit which commitctx
2480 2480 # could have done if it updated the dirstate. Now, we can
2481 2481 # selectively update the dirstate only for the amended files.
2482 2482 dirstate = repo.dirstate
2483 2483
2484 2484 # Update the state of the files which were added and
2485 2485 # and modified in the amend to "normal" in the dirstate.
2486 2486 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2487 2487 for f in normalfiles:
2488 2488 dirstate.normal(f)
2489 2489
2490 2490 # Update the state of files which were removed in the amend
2491 2491 # to "removed" in the dirstate.
2492 2492 removedfiles = set(wctx.removed()) & filestoamend
2493 2493 for f in removedfiles:
2494 2494 dirstate.drop(f)
2495 2495
2496 2496 return newid
2497 2497
2498 2498 def commiteditor(repo, ctx, subs, editform=''):
2499 2499 if ctx.description():
2500 2500 return ctx.description()
2501 2501 return commitforceeditor(repo, ctx, subs, editform=editform,
2502 2502 unchangedmessagedetection=True)
2503 2503
2504 2504 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2505 2505 editform='', unchangedmessagedetection=False):
2506 2506 if not extramsg:
2507 2507 extramsg = _("Leave message empty to abort commit.")
2508 2508
2509 2509 forms = [e for e in editform.split('.') if e]
2510 2510 forms.insert(0, 'changeset')
2511 2511 templatetext = None
2512 2512 while forms:
2513 2513 ref = '.'.join(forms)
2514 2514 if repo.ui.config('committemplate', ref):
2515 2515 templatetext = committext = buildcommittemplate(
2516 2516 repo, ctx, subs, extramsg, ref)
2517 2517 break
2518 2518 forms.pop()
2519 2519 else:
2520 2520 committext = buildcommittext(repo, ctx, subs, extramsg)
2521 2521
2522 2522 # run editor in the repository root
2523 2523 olddir = pycompat.getcwd()
2524 2524 os.chdir(repo.root)
2525 2525
2526 2526 # make in-memory changes visible to external process
2527 2527 tr = repo.currenttransaction()
2528 2528 repo.dirstate.write(tr)
2529 2529 pending = tr and tr.writepending() and repo.root
2530 2530
2531 2531 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2532 2532 editform=editform, pending=pending,
2533 2533 repopath=repo.path, action='commit')
2534 2534 text = editortext
2535 2535
2536 2536 # strip away anything below this special string (used for editors that want
2537 2537 # to display the diff)
2538 2538 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2539 2539 if stripbelow:
2540 2540 text = text[:stripbelow.start()]
2541 2541
2542 2542 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2543 2543 os.chdir(olddir)
2544 2544
2545 2545 if finishdesc:
2546 2546 text = finishdesc(text)
2547 2547 if not text.strip():
2548 2548 raise error.Abort(_("empty commit message"))
2549 2549 if unchangedmessagedetection and editortext == templatetext:
2550 2550 raise error.Abort(_("commit message unchanged"))
2551 2551
2552 2552 return text
2553 2553
2554 2554 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2555 2555 ui = repo.ui
2556 2556 spec = formatter.templatespec(ref, None, None)
2557 2557 t = logcmdutil.changesettemplater(ui, repo, spec)
2558 2558 t.t.cache.update((k, templater.unquotestring(v))
2559 2559 for k, v in repo.ui.configitems('committemplate'))
2560 2560
2561 2561 if not extramsg:
2562 2562 extramsg = '' # ensure that extramsg is string
2563 2563
2564 2564 ui.pushbuffer()
2565 2565 t.show(ctx, extramsg=extramsg)
2566 2566 return ui.popbuffer()
2567 2567
2568 2568 def hgprefix(msg):
2569 2569 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2570 2570
2571 2571 def buildcommittext(repo, ctx, subs, extramsg):
2572 2572 edittext = []
2573 2573 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2574 2574 if ctx.description():
2575 2575 edittext.append(ctx.description())
2576 2576 edittext.append("")
2577 2577 edittext.append("") # Empty line between message and comments.
2578 2578 edittext.append(hgprefix(_("Enter commit message."
2579 2579 " Lines beginning with 'HG:' are removed.")))
2580 2580 edittext.append(hgprefix(extramsg))
2581 2581 edittext.append("HG: --")
2582 2582 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2583 2583 if ctx.p2():
2584 2584 edittext.append(hgprefix(_("branch merge")))
2585 2585 if ctx.branch():
2586 2586 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2587 2587 if bookmarks.isactivewdirparent(repo):
2588 2588 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2589 2589 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2590 2590 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2591 2591 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2592 2592 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2593 2593 if not added and not modified and not removed:
2594 2594 edittext.append(hgprefix(_("no files changed")))
2595 2595 edittext.append("")
2596 2596
2597 2597 return "\n".join(edittext)
2598 2598
2599 2599 def commitstatus(repo, node, branch, bheads=None, opts=None):
2600 2600 if opts is None:
2601 2601 opts = {}
2602 2602 ctx = repo[node]
2603 2603 parents = ctx.parents()
2604 2604
2605 2605 if (not opts.get('amend') and bheads and node not in bheads and not
2606 2606 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2607 2607 repo.ui.status(_('created new head\n'))
2608 2608 # The message is not printed for initial roots. For the other
2609 2609 # changesets, it is printed in the following situations:
2610 2610 #
2611 2611 # Par column: for the 2 parents with ...
2612 2612 # N: null or no parent
2613 2613 # B: parent is on another named branch
2614 2614 # C: parent is a regular non head changeset
2615 2615 # H: parent was a branch head of the current branch
2616 2616 # Msg column: whether we print "created new head" message
2617 2617 # In the following, it is assumed that there already exists some
2618 2618 # initial branch heads of the current branch, otherwise nothing is
2619 2619 # printed anyway.
2620 2620 #
2621 2621 # Par Msg Comment
2622 2622 # N N y additional topo root
2623 2623 #
2624 2624 # B N y additional branch root
2625 2625 # C N y additional topo head
2626 2626 # H N n usual case
2627 2627 #
2628 2628 # B B y weird additional branch root
2629 2629 # C B y branch merge
2630 2630 # H B n merge with named branch
2631 2631 #
2632 2632 # C C y additional head from merge
2633 2633 # C H n merge with a head
2634 2634 #
2635 2635 # H H n head merge: head count decreases
2636 2636
2637 2637 if not opts.get('close_branch'):
2638 2638 for r in parents:
2639 2639 if r.closesbranch() and r.branch() == branch:
2640 2640 repo.ui.status(_('reopening closed branch head %d\n') % r.rev())
2641 2641
2642 2642 if repo.ui.debugflag:
2643 2643 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx.hex()))
2644 2644 elif repo.ui.verbose:
2645 2645 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx))
2646 2646
2647 2647 def postcommitstatus(repo, pats, opts):
2648 2648 return repo.status(match=scmutil.match(repo[None], pats, opts))
2649 2649
2650 2650 def revert(ui, repo, ctx, parents, *pats, **opts):
2651 2651 opts = pycompat.byteskwargs(opts)
2652 2652 parent, p2 = parents
2653 2653 node = ctx.node()
2654 2654
2655 2655 mf = ctx.manifest()
2656 2656 if node == p2:
2657 2657 parent = p2
2658 2658
2659 2659 # need all matching names in dirstate and manifest of target rev,
2660 2660 # so have to walk both. do not print errors if files exist in one
2661 2661 # but not other. in both cases, filesets should be evaluated against
2662 2662 # workingctx to get consistent result (issue4497). this means 'set:**'
2663 2663 # cannot be used to select missing files from target rev.
2664 2664
2665 2665 # `names` is a mapping for all elements in working copy and target revision
2666 2666 # The mapping is in the form:
2667 2667 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2668 2668 names = {}
2669 2669
2670 2670 with repo.wlock():
2671 2671 ## filling of the `names` mapping
2672 2672 # walk dirstate to fill `names`
2673 2673
2674 2674 interactive = opts.get('interactive', False)
2675 2675 wctx = repo[None]
2676 2676 m = scmutil.match(wctx, pats, opts)
2677 2677
2678 2678 # we'll need this later
2679 2679 targetsubs = sorted(s for s in wctx.substate if m(s))
2680 2680
2681 2681 if not m.always():
2682 2682 matcher = matchmod.badmatch(m, lambda x, y: False)
2683 2683 for abs in wctx.walk(matcher):
2684 2684 names[abs] = m.rel(abs), m.exact(abs)
2685 2685
2686 2686 # walk target manifest to fill `names`
2687 2687
2688 2688 def badfn(path, msg):
2689 2689 if path in names:
2690 2690 return
2691 2691 if path in ctx.substate:
2692 2692 return
2693 2693 path_ = path + '/'
2694 2694 for f in names:
2695 2695 if f.startswith(path_):
2696 2696 return
2697 2697 ui.warn("%s: %s\n" % (m.rel(path), msg))
2698 2698
2699 2699 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2700 2700 if abs not in names:
2701 2701 names[abs] = m.rel(abs), m.exact(abs)
2702 2702
2703 2703 # Find status of all file in `names`.
2704 2704 m = scmutil.matchfiles(repo, names)
2705 2705
2706 2706 changes = repo.status(node1=node, match=m,
2707 2707 unknown=True, ignored=True, clean=True)
2708 2708 else:
2709 2709 changes = repo.status(node1=node, match=m)
2710 2710 for kind in changes:
2711 2711 for abs in kind:
2712 2712 names[abs] = m.rel(abs), m.exact(abs)
2713 2713
2714 2714 m = scmutil.matchfiles(repo, names)
2715 2715
2716 2716 modified = set(changes.modified)
2717 2717 added = set(changes.added)
2718 2718 removed = set(changes.removed)
2719 2719 _deleted = set(changes.deleted)
2720 2720 unknown = set(changes.unknown)
2721 2721 unknown.update(changes.ignored)
2722 2722 clean = set(changes.clean)
2723 2723 modadded = set()
2724 2724
2725 2725 # We need to account for the state of the file in the dirstate,
2726 2726 # even when we revert against something else than parent. This will
2727 2727 # slightly alter the behavior of revert (doing back up or not, delete
2728 2728 # or just forget etc).
2729 2729 if parent == node:
2730 2730 dsmodified = modified
2731 2731 dsadded = added
2732 2732 dsremoved = removed
2733 2733 # store all local modifications, useful later for rename detection
2734 2734 localchanges = dsmodified | dsadded
2735 2735 modified, added, removed = set(), set(), set()
2736 2736 else:
2737 2737 changes = repo.status(node1=parent, match=m)
2738 2738 dsmodified = set(changes.modified)
2739 2739 dsadded = set(changes.added)
2740 2740 dsremoved = set(changes.removed)
2741 2741 # store all local modifications, useful later for rename detection
2742 2742 localchanges = dsmodified | dsadded
2743 2743
2744 2744 # only take into account for removes between wc and target
2745 2745 clean |= dsremoved - removed
2746 2746 dsremoved &= removed
2747 2747 # distinct between dirstate remove and other
2748 2748 removed -= dsremoved
2749 2749
2750 2750 modadded = added & dsmodified
2751 2751 added -= modadded
2752 2752
2753 2753 # tell newly modified apart.
2754 2754 dsmodified &= modified
2755 2755 dsmodified |= modified & dsadded # dirstate added may need backup
2756 2756 modified -= dsmodified
2757 2757
2758 2758 # We need to wait for some post-processing to update this set
2759 2759 # before making the distinction. The dirstate will be used for
2760 2760 # that purpose.
2761 2761 dsadded = added
2762 2762
2763 2763 # in case of merge, files that are actually added can be reported as
2764 2764 # modified, we need to post process the result
2765 2765 if p2 != nullid:
2766 2766 mergeadd = set(dsmodified)
2767 2767 for path in dsmodified:
2768 2768 if path in mf:
2769 2769 mergeadd.remove(path)
2770 2770 dsadded |= mergeadd
2771 2771 dsmodified -= mergeadd
2772 2772
2773 2773 # if f is a rename, update `names` to also revert the source
2774 2774 cwd = repo.getcwd()
2775 2775 for f in localchanges:
2776 2776 src = repo.dirstate.copied(f)
2777 2777 # XXX should we check for rename down to target node?
2778 2778 if src and src not in names and repo.dirstate[src] == 'r':
2779 2779 dsremoved.add(src)
2780 2780 names[src] = (repo.pathto(src, cwd), True)
2781 2781
2782 2782 # determine the exact nature of the deleted changesets
2783 2783 deladded = set(_deleted)
2784 2784 for path in _deleted:
2785 2785 if path in mf:
2786 2786 deladded.remove(path)
2787 2787 deleted = _deleted - deladded
2788 2788
2789 2789 # distinguish between file to forget and the other
2790 2790 added = set()
2791 2791 for abs in dsadded:
2792 2792 if repo.dirstate[abs] != 'a':
2793 2793 added.add(abs)
2794 2794 dsadded -= added
2795 2795
2796 2796 for abs in deladded:
2797 2797 if repo.dirstate[abs] == 'a':
2798 2798 dsadded.add(abs)
2799 2799 deladded -= dsadded
2800 2800
2801 2801 # For files marked as removed, we check if an unknown file is present at
2802 2802 # the same path. If a such file exists it may need to be backed up.
2803 2803 # Making the distinction at this stage helps have simpler backup
2804 2804 # logic.
2805 2805 removunk = set()
2806 2806 for abs in removed:
2807 2807 target = repo.wjoin(abs)
2808 2808 if os.path.lexists(target):
2809 2809 removunk.add(abs)
2810 2810 removed -= removunk
2811 2811
2812 2812 dsremovunk = set()
2813 2813 for abs in dsremoved:
2814 2814 target = repo.wjoin(abs)
2815 2815 if os.path.lexists(target):
2816 2816 dsremovunk.add(abs)
2817 2817 dsremoved -= dsremovunk
2818 2818
2819 2819 # action to be actually performed by revert
2820 2820 # (<list of file>, message>) tuple
2821 2821 actions = {'revert': ([], _('reverting %s\n')),
2822 2822 'add': ([], _('adding %s\n')),
2823 2823 'remove': ([], _('removing %s\n')),
2824 2824 'drop': ([], _('removing %s\n')),
2825 2825 'forget': ([], _('forgetting %s\n')),
2826 2826 'undelete': ([], _('undeleting %s\n')),
2827 2827 'noop': (None, _('no changes needed to %s\n')),
2828 2828 'unknown': (None, _('file not managed: %s\n')),
2829 2829 }
2830 2830
2831 2831 # "constant" that convey the backup strategy.
2832 2832 # All set to `discard` if `no-backup` is set do avoid checking
2833 2833 # no_backup lower in the code.
2834 2834 # These values are ordered for comparison purposes
2835 2835 backupinteractive = 3 # do backup if interactively modified
2836 2836 backup = 2 # unconditionally do backup
2837 2837 check = 1 # check if the existing file differs from target
2838 2838 discard = 0 # never do backup
2839 2839 if opts.get('no_backup'):
2840 2840 backupinteractive = backup = check = discard
2841 2841 if interactive:
2842 2842 dsmodifiedbackup = backupinteractive
2843 2843 else:
2844 2844 dsmodifiedbackup = backup
2845 2845 tobackup = set()
2846 2846
2847 2847 backupanddel = actions['remove']
2848 2848 if not opts.get('no_backup'):
2849 2849 backupanddel = actions['drop']
2850 2850
2851 2851 disptable = (
2852 2852 # dispatch table:
2853 2853 # file state
2854 2854 # action
2855 2855 # make backup
2856 2856
2857 2857 ## Sets that results that will change file on disk
2858 2858 # Modified compared to target, no local change
2859 2859 (modified, actions['revert'], discard),
2860 2860 # Modified compared to target, but local file is deleted
2861 2861 (deleted, actions['revert'], discard),
2862 2862 # Modified compared to target, local change
2863 2863 (dsmodified, actions['revert'], dsmodifiedbackup),
2864 2864 # Added since target
2865 2865 (added, actions['remove'], discard),
2866 2866 # Added in working directory
2867 2867 (dsadded, actions['forget'], discard),
2868 2868 # Added since target, have local modification
2869 2869 (modadded, backupanddel, backup),
2870 2870 # Added since target but file is missing in working directory
2871 2871 (deladded, actions['drop'], discard),
2872 2872 # Removed since target, before working copy parent
2873 2873 (removed, actions['add'], discard),
2874 2874 # Same as `removed` but an unknown file exists at the same path
2875 2875 (removunk, actions['add'], check),
2876 2876 # Removed since targe, marked as such in working copy parent
2877 2877 (dsremoved, actions['undelete'], discard),
2878 2878 # Same as `dsremoved` but an unknown file exists at the same path
2879 2879 (dsremovunk, actions['undelete'], check),
2880 2880 ## the following sets does not result in any file changes
2881 2881 # File with no modification
2882 2882 (clean, actions['noop'], discard),
2883 2883 # Existing file, not tracked anywhere
2884 2884 (unknown, actions['unknown'], discard),
2885 2885 )
2886 2886
2887 2887 for abs, (rel, exact) in sorted(names.items()):
2888 2888 # target file to be touch on disk (relative to cwd)
2889 2889 target = repo.wjoin(abs)
2890 2890 # search the entry in the dispatch table.
2891 2891 # if the file is in any of these sets, it was touched in the working
2892 2892 # directory parent and we are sure it needs to be reverted.
2893 2893 for table, (xlist, msg), dobackup in disptable:
2894 2894 if abs not in table:
2895 2895 continue
2896 2896 if xlist is not None:
2897 2897 xlist.append(abs)
2898 2898 if dobackup:
2899 2899 # If in interactive mode, don't automatically create
2900 2900 # .orig files (issue4793)
2901 2901 if dobackup == backupinteractive:
2902 2902 tobackup.add(abs)
2903 2903 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
2904 2904 bakname = scmutil.origpath(ui, repo, rel)
2905 2905 ui.note(_('saving current version of %s as %s\n') %
2906 2906 (rel, bakname))
2907 2907 if not opts.get('dry_run'):
2908 2908 if interactive:
2909 2909 util.copyfile(target, bakname)
2910 2910 else:
2911 2911 util.rename(target, bakname)
2912 2912 if ui.verbose or not exact:
2913 2913 if not isinstance(msg, bytes):
2914 2914 msg = msg(abs)
2915 2915 ui.status(msg % rel)
2916 2916 elif exact:
2917 2917 ui.warn(msg % rel)
2918 2918 break
2919 2919
2920 2920 if not opts.get('dry_run'):
2921 2921 needdata = ('revert', 'add', 'undelete')
2922 2922 if _revertprefetch is not _revertprefetchstub:
2923 2923 ui.deprecwarn("'cmdutil._revertprefetch' is deprecated, "
2924 2924 "add a callback to 'scmutil.fileprefetchhooks'",
2925 2925 '4.6', stacklevel=1)
2926 2926 _revertprefetch(repo, ctx,
2927 2927 *[actions[name][0] for name in needdata])
2928 2928 oplist = [actions[name][0] for name in needdata]
2929 2929 prefetch = scmutil.fileprefetchhooks
2930 2930 prefetch(repo, ctx, [f for sublist in oplist for f in sublist])
2931 2931 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
2932 2932
2933 2933 if targetsubs:
2934 2934 # Revert the subrepos on the revert list
2935 2935 for sub in targetsubs:
2936 2936 try:
2937 2937 wctx.sub(sub).revert(ctx.substate[sub], *pats,
2938 2938 **pycompat.strkwargs(opts))
2939 2939 except KeyError:
2940 2940 raise error.Abort("subrepository '%s' does not exist in %s!"
2941 2941 % (sub, short(ctx.node())))
2942 2942
2943 2943 def _revertprefetchstub(repo, ctx, *files):
2944 2944 """Stub method for detecting extension wrapping of _revertprefetch(), to
2945 2945 issue a deprecation warning."""
2946 2946
2947 2947 _revertprefetch = _revertprefetchstub
2948 2948
2949 2949 def _performrevert(repo, parents, ctx, actions, interactive=False,
2950 2950 tobackup=None):
2951 2951 """function that actually perform all the actions computed for revert
2952 2952
2953 2953 This is an independent function to let extension to plug in and react to
2954 2954 the imminent revert.
2955 2955
2956 2956 Make sure you have the working directory locked when calling this function.
2957 2957 """
2958 2958 parent, p2 = parents
2959 2959 node = ctx.node()
2960 2960 excluded_files = []
2961 2961
2962 2962 def checkout(f):
2963 2963 fc = ctx[f]
2964 2964 repo.wwrite(f, fc.data(), fc.flags())
2965 2965
2966 2966 def doremove(f):
2967 2967 try:
2968 2968 repo.wvfs.unlinkpath(f)
2969 2969 except OSError:
2970 2970 pass
2971 2971 repo.dirstate.remove(f)
2972 2972
2973 2973 audit_path = pathutil.pathauditor(repo.root, cached=True)
2974 2974 for f in actions['forget'][0]:
2975 2975 if interactive:
2976 2976 choice = repo.ui.promptchoice(
2977 2977 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
2978 2978 if choice == 0:
2979 2979 repo.dirstate.drop(f)
2980 2980 else:
2981 2981 excluded_files.append(f)
2982 2982 else:
2983 2983 repo.dirstate.drop(f)
2984 2984 for f in actions['remove'][0]:
2985 2985 audit_path(f)
2986 2986 if interactive:
2987 2987 choice = repo.ui.promptchoice(
2988 2988 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
2989 2989 if choice == 0:
2990 2990 doremove(f)
2991 2991 else:
2992 2992 excluded_files.append(f)
2993 2993 else:
2994 2994 doremove(f)
2995 2995 for f in actions['drop'][0]:
2996 2996 audit_path(f)
2997 2997 repo.dirstate.remove(f)
2998 2998
2999 2999 normal = None
3000 3000 if node == parent:
3001 3001 # We're reverting to our parent. If possible, we'd like status
3002 3002 # to report the file as clean. We have to use normallookup for
3003 3003 # merges to avoid losing information about merged/dirty files.
3004 3004 if p2 != nullid:
3005 3005 normal = repo.dirstate.normallookup
3006 3006 else:
3007 3007 normal = repo.dirstate.normal
3008 3008
3009 3009 newlyaddedandmodifiedfiles = set()
3010 3010 if interactive:
3011 3011 # Prompt the user for changes to revert
3012 3012 torevert = [f for f in actions['revert'][0] if f not in excluded_files]
3013 3013 m = scmutil.matchfiles(repo, torevert)
3014 3014 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3015 3015 diffopts.nodates = True
3016 3016 diffopts.git = True
3017 3017 operation = 'discard'
3018 3018 reversehunks = True
3019 3019 if node != parent:
3020 3020 operation = 'apply'
3021 3021 reversehunks = False
3022 3022 if reversehunks:
3023 3023 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3024 3024 else:
3025 3025 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3026 3026 originalchunks = patch.parsepatch(diff)
3027 3027
3028 3028 try:
3029 3029
3030 3030 chunks, opts = recordfilter(repo.ui, originalchunks,
3031 3031 operation=operation)
3032 3032 if reversehunks:
3033 3033 chunks = patch.reversehunks(chunks)
3034 3034
3035 3035 except error.PatchError as err:
3036 3036 raise error.Abort(_('error parsing patch: %s') % err)
3037 3037
3038 3038 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3039 3039 if tobackup is None:
3040 3040 tobackup = set()
3041 3041 # Apply changes
3042 3042 fp = stringio()
3043 3043 for c in chunks:
3044 3044 # Create a backup file only if this hunk should be backed up
3045 3045 if ishunk(c) and c.header.filename() in tobackup:
3046 3046 abs = c.header.filename()
3047 3047 target = repo.wjoin(abs)
3048 3048 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3049 3049 util.copyfile(target, bakname)
3050 3050 tobackup.remove(abs)
3051 3051 c.write(fp)
3052 3052 dopatch = fp.tell()
3053 3053 fp.seek(0)
3054 3054 if dopatch:
3055 3055 try:
3056 3056 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3057 3057 except error.PatchError as err:
3058 3058 raise error.Abort(pycompat.bytestr(err))
3059 3059 del fp
3060 3060 else:
3061 3061 for f in actions['revert'][0]:
3062 3062 checkout(f)
3063 3063 if normal:
3064 3064 normal(f)
3065 3065
3066 3066 for f in actions['add'][0]:
3067 3067 # Don't checkout modified files, they are already created by the diff
3068 3068 if f not in newlyaddedandmodifiedfiles:
3069 3069 checkout(f)
3070 3070 repo.dirstate.add(f)
3071 3071
3072 3072 normal = repo.dirstate.normallookup
3073 3073 if node == parent and p2 == nullid:
3074 3074 normal = repo.dirstate.normal
3075 3075 for f in actions['undelete'][0]:
3076 3076 checkout(f)
3077 3077 normal(f)
3078 3078
3079 3079 copied = copies.pathcopies(repo[parent], ctx)
3080 3080
3081 3081 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3082 3082 if f in copied:
3083 3083 repo.dirstate.copy(copied[f], f)
3084 3084
3085 3085 class command(registrar.command):
3086 3086 """deprecated: used registrar.command instead"""
3087 3087 def _doregister(self, func, name, *args, **kwargs):
3088 3088 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3089 3089 return super(command, self)._doregister(func, name, *args, **kwargs)
3090 3090
3091 3091 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3092 3092 # commands.outgoing. "missing" is "missing" of the result of
3093 3093 # "findcommonoutgoing()"
3094 3094 outgoinghooks = util.hooks()
3095 3095
3096 3096 # a list of (ui, repo) functions called by commands.summary
3097 3097 summaryhooks = util.hooks()
3098 3098
3099 3099 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3100 3100 #
3101 3101 # functions should return tuple of booleans below, if 'changes' is None:
3102 3102 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3103 3103 #
3104 3104 # otherwise, 'changes' is a tuple of tuples below:
3105 3105 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3106 3106 # - (desturl, destbranch, destpeer, outgoing)
3107 3107 summaryremotehooks = util.hooks()
3108 3108
3109 3109 # A list of state files kept by multistep operations like graft.
3110 3110 # Since graft cannot be aborted, it is considered 'clearable' by update.
3111 3111 # note: bisect is intentionally excluded
3112 3112 # (state file, clearable, allowcommit, error, hint)
3113 3113 unfinishedstates = [
3114 3114 ('graftstate', True, False, _('graft in progress'),
3115 3115 _("use 'hg graft --continue' or 'hg update' to abort")),
3116 3116 ('updatestate', True, False, _('last update was interrupted'),
3117 3117 _("use 'hg update' to get a consistent checkout"))
3118 3118 ]
3119 3119
3120 3120 def checkunfinished(repo, commit=False):
3121 3121 '''Look for an unfinished multistep operation, like graft, and abort
3122 3122 if found. It's probably good to check this right before
3123 3123 bailifchanged().
3124 3124 '''
3125 3125 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3126 3126 if commit and allowcommit:
3127 3127 continue
3128 3128 if repo.vfs.exists(f):
3129 3129 raise error.Abort(msg, hint=hint)
3130 3130
3131 3131 def clearunfinished(repo):
3132 3132 '''Check for unfinished operations (as above), and clear the ones
3133 3133 that are clearable.
3134 3134 '''
3135 3135 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3136 3136 if not clearable and repo.vfs.exists(f):
3137 3137 raise error.Abort(msg, hint=hint)
3138 3138 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3139 3139 if clearable and repo.vfs.exists(f):
3140 3140 util.unlink(repo.vfs.join(f))
3141 3141
3142 3142 afterresolvedstates = [
3143 3143 ('graftstate',
3144 3144 _('hg graft --continue')),
3145 3145 ]
3146 3146
3147 3147 def howtocontinue(repo):
3148 3148 '''Check for an unfinished operation and return the command to finish
3149 3149 it.
3150 3150
3151 3151 afterresolvedstates tuples define a .hg/{file} and the corresponding
3152 3152 command needed to finish it.
3153 3153
3154 3154 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3155 3155 a boolean.
3156 3156 '''
3157 3157 contmsg = _("continue: %s")
3158 3158 for f, msg in afterresolvedstates:
3159 3159 if repo.vfs.exists(f):
3160 3160 return contmsg % msg, True
3161 3161 if repo[None].dirty(missing=True, merge=False, branch=False):
3162 3162 return contmsg % _("hg commit"), False
3163 3163 return None, None
3164 3164
3165 3165 def checkafterresolved(repo):
3166 3166 '''Inform the user about the next action after completing hg resolve
3167 3167
3168 3168 If there's a matching afterresolvedstates, howtocontinue will yield
3169 3169 repo.ui.warn as the reporter.
3170 3170
3171 3171 Otherwise, it will yield repo.ui.note.
3172 3172 '''
3173 3173 msg, warning = howtocontinue(repo)
3174 3174 if msg is not None:
3175 3175 if warning:
3176 3176 repo.ui.warn("%s\n" % msg)
3177 3177 else:
3178 3178 repo.ui.note("%s\n" % msg)
3179 3179
3180 3180 def wrongtooltocontinue(repo, task):
3181 3181 '''Raise an abort suggesting how to properly continue if there is an
3182 3182 active task.
3183 3183
3184 3184 Uses howtocontinue() to find the active task.
3185 3185
3186 3186 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3187 3187 a hint.
3188 3188 '''
3189 3189 after = howtocontinue(repo)
3190 3190 hint = None
3191 3191 if after[1]:
3192 3192 hint = after[0]
3193 3193 raise error.Abort(_('no %s in progress') % task, hint=hint)
3194 3194
3195 3195 class changeset_printer(logcmdutil.changesetprinter):
3196 3196
3197 3197 def __init__(self, ui, *args, **kwargs):
3198 3198 msg = ("'cmdutil.changeset_printer' is deprecated, "
3199 3199 "use 'logcmdutil.logcmdutil'")
3200 3200 ui.deprecwarn(msg, "4.6")
3201 3201 super(changeset_printer, self).__init__(ui, *args, **kwargs)
3202 3202
3203 3203 def displaygraph(ui, *args, **kwargs):
3204 3204 msg = ("'cmdutil.displaygraph' is deprecated, "
3205 3205 "use 'logcmdutil.displaygraph'")
3206 3206 ui.deprecwarn(msg, "4.6")
3207 3207 return logcmdutil.displaygraph(ui, *args, **kwargs)
3208 3208
3209 3209 def show_changeset(ui, *args, **kwargs):
3210 3210 msg = ("'cmdutil.show_changeset' is deprecated, "
3211 3211 "use 'logcmdutil.changesetdisplayer'")
3212 3212 ui.deprecwarn(msg, "4.6")
3213 3213 return logcmdutil.changesetdisplayer(ui, *args, **kwargs)
@@ -1,595 +1,594
1 1 # formatter.py - generic output formatting for mercurial
2 2 #
3 3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """Generic output formatting for Mercurial
9 9
10 10 The formatter provides API to show data in various ways. The following
11 11 functions should be used in place of ui.write():
12 12
13 13 - fm.write() for unconditional output
14 14 - fm.condwrite() to show some extra data conditionally in plain output
15 15 - fm.context() to provide changectx to template output
16 16 - fm.data() to provide extra data to JSON or template output
17 17 - fm.plain() to show raw text that isn't provided to JSON or template output
18 18
19 19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
20 20 beforehand so the data is converted to the appropriate data type. Use
21 21 fm.isplain() if you need to convert or format data conditionally which isn't
22 22 supported by the formatter API.
23 23
24 24 To build nested structure (i.e. a list of dicts), use fm.nested().
25 25
26 26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
27 27
28 28 fm.condwrite() vs 'if cond:':
29 29
30 30 In most cases, use fm.condwrite() so users can selectively show the data
31 31 in template output. If it's costly to build data, use plain 'if cond:' with
32 32 fm.write().
33 33
34 34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
35 35
36 36 fm.nested() should be used to form a tree structure (a list of dicts of
37 37 lists of dicts...) which can be accessed through template keywords, e.g.
38 38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
39 39 exports a dict-type object to template, which can be accessed by e.g.
40 40 "{get(foo, key)}" function.
41 41
42 42 Doctest helper:
43 43
44 44 >>> def show(fn, verbose=False, **opts):
45 45 ... import sys
46 46 ... from . import ui as uimod
47 47 ... ui = uimod.ui()
48 48 ... ui.verbose = verbose
49 49 ... ui.pushbuffer()
50 50 ... try:
51 51 ... return fn(ui, ui.formatter(pycompat.sysbytes(fn.__name__),
52 52 ... pycompat.byteskwargs(opts)))
53 53 ... finally:
54 54 ... print(pycompat.sysstr(ui.popbuffer()), end='')
55 55
56 56 Basic example:
57 57
58 58 >>> def files(ui, fm):
59 59 ... files = [(b'foo', 123, (0, 0)), (b'bar', 456, (1, 0))]
60 60 ... for f in files:
61 61 ... fm.startitem()
62 62 ... fm.write(b'path', b'%s', f[0])
63 63 ... fm.condwrite(ui.verbose, b'date', b' %s',
64 64 ... fm.formatdate(f[2], b'%Y-%m-%d %H:%M:%S'))
65 65 ... fm.data(size=f[1])
66 66 ... fm.plain(b'\\n')
67 67 ... fm.end()
68 68 >>> show(files)
69 69 foo
70 70 bar
71 71 >>> show(files, verbose=True)
72 72 foo 1970-01-01 00:00:00
73 73 bar 1970-01-01 00:00:01
74 74 >>> show(files, template=b'json')
75 75 [
76 76 {
77 77 "date": [0, 0],
78 78 "path": "foo",
79 79 "size": 123
80 80 },
81 81 {
82 82 "date": [1, 0],
83 83 "path": "bar",
84 84 "size": 456
85 85 }
86 86 ]
87 87 >>> show(files, template=b'path: {path}\\ndate: {date|rfc3339date}\\n')
88 88 path: foo
89 89 date: 1970-01-01T00:00:00+00:00
90 90 path: bar
91 91 date: 1970-01-01T00:00:01+00:00
92 92
93 93 Nested example:
94 94
95 95 >>> def subrepos(ui, fm):
96 96 ... fm.startitem()
97 97 ... fm.write(b'reponame', b'[%s]\\n', b'baz')
98 98 ... files(ui, fm.nested(b'files'))
99 99 ... fm.end()
100 100 >>> show(subrepos)
101 101 [baz]
102 102 foo
103 103 bar
104 104 >>> show(subrepos, template=b'{reponame}: {join(files % "{path}", ", ")}\\n')
105 105 baz: foo, bar
106 106 """
107 107
108 108 from __future__ import absolute_import, print_function
109 109
110 110 import collections
111 111 import contextlib
112 112 import itertools
113 113 import os
114 114
115 115 from .i18n import _
116 116 from .node import (
117 117 hex,
118 118 short,
119 119 )
120 120
121 121 from . import (
122 122 error,
123 123 pycompat,
124 124 templatefilters,
125 125 templatekw,
126 126 templater,
127 127 templateutil,
128 128 util,
129 129 )
130 130 from .utils import dateutil
131 131
132 132 pickle = util.pickle
133 133
134 134 class _nullconverter(object):
135 135 '''convert non-primitive data types to be processed by formatter'''
136 136
137 137 # set to True if context object should be stored as item
138 138 storecontext = False
139 139
140 140 @staticmethod
141 141 def formatdate(date, fmt):
142 142 '''convert date tuple to appropriate format'''
143 143 return date
144 144 @staticmethod
145 145 def formatdict(data, key, value, fmt, sep):
146 146 '''convert dict or key-value pairs to appropriate dict format'''
147 147 # use plain dict instead of util.sortdict so that data can be
148 148 # serialized as a builtin dict in pickle output
149 149 return dict(data)
150 150 @staticmethod
151 151 def formatlist(data, name, fmt, sep):
152 152 '''convert iterable to appropriate list format'''
153 153 return list(data)
154 154
155 155 class baseformatter(object):
156 156 def __init__(self, ui, topic, opts, converter):
157 157 self._ui = ui
158 158 self._topic = topic
159 159 self._style = opts.get("style")
160 160 self._template = opts.get("template")
161 161 self._converter = converter
162 162 self._item = None
163 163 # function to convert node to string suitable for this output
164 164 self.hexfunc = hex
165 165 def __enter__(self):
166 166 return self
167 167 def __exit__(self, exctype, excvalue, traceback):
168 168 if exctype is None:
169 169 self.end()
170 170 def _showitem(self):
171 171 '''show a formatted item once all data is collected'''
172 172 def startitem(self):
173 173 '''begin an item in the format list'''
174 174 if self._item is not None:
175 175 self._showitem()
176 176 self._item = {}
177 177 def formatdate(self, date, fmt='%a %b %d %H:%M:%S %Y %1%2'):
178 178 '''convert date tuple to appropriate format'''
179 179 return self._converter.formatdate(date, fmt)
180 180 def formatdict(self, data, key='key', value='value', fmt=None, sep=' '):
181 181 '''convert dict or key-value pairs to appropriate dict format'''
182 182 return self._converter.formatdict(data, key, value, fmt, sep)
183 183 def formatlist(self, data, name, fmt=None, sep=' '):
184 184 '''convert iterable to appropriate list format'''
185 185 # name is mandatory argument for now, but it could be optional if
186 186 # we have default template keyword, e.g. {item}
187 187 return self._converter.formatlist(data, name, fmt, sep)
188 188 def context(self, **ctxs):
189 189 '''insert context objects to be used to render template keywords'''
190 190 ctxs = pycompat.byteskwargs(ctxs)
191 191 assert all(k in {'ctx', 'fctx'} for k in ctxs)
192 192 if self._converter.storecontext:
193 193 self._item.update(ctxs)
194 194 def data(self, **data):
195 195 '''insert data into item that's not shown in default output'''
196 196 data = pycompat.byteskwargs(data)
197 197 self._item.update(data)
198 198 def write(self, fields, deftext, *fielddata, **opts):
199 199 '''do default text output while assigning data to item'''
200 200 fieldkeys = fields.split()
201 201 assert len(fieldkeys) == len(fielddata)
202 202 self._item.update(zip(fieldkeys, fielddata))
203 203 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
204 204 '''do conditional write (primarily for plain formatter)'''
205 205 fieldkeys = fields.split()
206 206 assert len(fieldkeys) == len(fielddata)
207 207 self._item.update(zip(fieldkeys, fielddata))
208 208 def plain(self, text, **opts):
209 209 '''show raw text for non-templated mode'''
210 210 def isplain(self):
211 211 '''check for plain formatter usage'''
212 212 return False
213 213 def nested(self, field):
214 214 '''sub formatter to store nested data in the specified field'''
215 215 self._item[field] = data = []
216 216 return _nestedformatter(self._ui, self._converter, data)
217 217 def end(self):
218 218 '''end output for the formatter'''
219 219 if self._item is not None:
220 220 self._showitem()
221 221
222 222 def nullformatter(ui, topic):
223 223 '''formatter that prints nothing'''
224 224 return baseformatter(ui, topic, opts={}, converter=_nullconverter)
225 225
226 226 class _nestedformatter(baseformatter):
227 227 '''build sub items and store them in the parent formatter'''
228 228 def __init__(self, ui, converter, data):
229 229 baseformatter.__init__(self, ui, topic='', opts={}, converter=converter)
230 230 self._data = data
231 231 def _showitem(self):
232 232 self._data.append(self._item)
233 233
234 234 def _iteritems(data):
235 235 '''iterate key-value pairs in stable order'''
236 236 if isinstance(data, dict):
237 237 return sorted(data.iteritems())
238 238 return data
239 239
240 240 class _plainconverter(object):
241 241 '''convert non-primitive data types to text'''
242 242
243 243 storecontext = False
244 244
245 245 @staticmethod
246 246 def formatdate(date, fmt):
247 247 '''stringify date tuple in the given format'''
248 248 return dateutil.datestr(date, fmt)
249 249 @staticmethod
250 250 def formatdict(data, key, value, fmt, sep):
251 251 '''stringify key-value pairs separated by sep'''
252 252 prefmt = pycompat.identity
253 253 if fmt is None:
254 254 fmt = '%s=%s'
255 255 prefmt = pycompat.bytestr
256 256 return sep.join(fmt % (prefmt(k), prefmt(v))
257 257 for k, v in _iteritems(data))
258 258 @staticmethod
259 259 def formatlist(data, name, fmt, sep):
260 260 '''stringify iterable separated by sep'''
261 261 prefmt = pycompat.identity
262 262 if fmt is None:
263 263 fmt = '%s'
264 264 prefmt = pycompat.bytestr
265 265 return sep.join(fmt % prefmt(e) for e in data)
266 266
267 267 class plainformatter(baseformatter):
268 268 '''the default text output scheme'''
269 269 def __init__(self, ui, out, topic, opts):
270 270 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
271 271 if ui.debugflag:
272 272 self.hexfunc = hex
273 273 else:
274 274 self.hexfunc = short
275 275 if ui is out:
276 276 self._write = ui.write
277 277 else:
278 278 self._write = lambda s, **opts: out.write(s)
279 279 def startitem(self):
280 280 pass
281 281 def data(self, **data):
282 282 pass
283 283 def write(self, fields, deftext, *fielddata, **opts):
284 284 self._write(deftext % fielddata, **opts)
285 285 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
286 286 '''do conditional write'''
287 287 if cond:
288 288 self._write(deftext % fielddata, **opts)
289 289 def plain(self, text, **opts):
290 290 self._write(text, **opts)
291 291 def isplain(self):
292 292 return True
293 293 def nested(self, field):
294 294 # nested data will be directly written to ui
295 295 return self
296 296 def end(self):
297 297 pass
298 298
299 299 class debugformatter(baseformatter):
300 300 def __init__(self, ui, out, topic, opts):
301 301 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
302 302 self._out = out
303 303 self._out.write("%s = [\n" % self._topic)
304 304 def _showitem(self):
305 305 self._out.write(' %s,\n' % pycompat.byterepr(self._item))
306 306 def end(self):
307 307 baseformatter.end(self)
308 308 self._out.write("]\n")
309 309
310 310 class pickleformatter(baseformatter):
311 311 def __init__(self, ui, out, topic, opts):
312 312 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
313 313 self._out = out
314 314 self._data = []
315 315 def _showitem(self):
316 316 self._data.append(self._item)
317 317 def end(self):
318 318 baseformatter.end(self)
319 319 self._out.write(pickle.dumps(self._data))
320 320
321 321 class jsonformatter(baseformatter):
322 322 def __init__(self, ui, out, topic, opts):
323 323 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
324 324 self._out = out
325 325 self._out.write("[")
326 326 self._first = True
327 327 def _showitem(self):
328 328 if self._first:
329 329 self._first = False
330 330 else:
331 331 self._out.write(",")
332 332
333 333 self._out.write("\n {\n")
334 334 first = True
335 335 for k, v in sorted(self._item.items()):
336 336 if first:
337 337 first = False
338 338 else:
339 339 self._out.write(",\n")
340 340 u = templatefilters.json(v, paranoid=False)
341 341 self._out.write(' "%s": %s' % (k, u))
342 342 self._out.write("\n }")
343 343 def end(self):
344 344 baseformatter.end(self)
345 345 self._out.write("\n]\n")
346 346
347 347 class _templateconverter(object):
348 348 '''convert non-primitive data types to be processed by templater'''
349 349
350 350 storecontext = True
351 351
352 352 @staticmethod
353 353 def formatdate(date, fmt):
354 354 '''return date tuple'''
355 355 return date
356 356 @staticmethod
357 357 def formatdict(data, key, value, fmt, sep):
358 358 '''build object that can be evaluated as either plain string or dict'''
359 359 data = util.sortdict(_iteritems(data))
360 360 def f():
361 361 yield _plainconverter.formatdict(data, key, value, fmt, sep)
362 362 return templateutil.hybriddict(data, key=key, value=value, fmt=fmt,
363 363 gen=f)
364 364 @staticmethod
365 365 def formatlist(data, name, fmt, sep):
366 366 '''build object that can be evaluated as either plain string or list'''
367 367 data = list(data)
368 368 def f():
369 369 yield _plainconverter.formatlist(data, name, fmt, sep)
370 370 return templateutil.hybridlist(data, name=name, fmt=fmt, gen=f)
371 371
372 372 class templateformatter(baseformatter):
373 373 def __init__(self, ui, out, topic, opts):
374 374 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
375 375 self._out = out
376 376 spec = lookuptemplate(ui, topic, opts.get('template', ''))
377 377 self._tref = spec.ref
378 378 self._t = loadtemplater(ui, spec, defaults=templatekw.keywords,
379 379 resources=templateresources(ui),
380 380 cache=templatekw.defaulttempl)
381 381 self._parts = templatepartsmap(spec, self._t,
382 382 ['docheader', 'docfooter', 'separator'])
383 383 self._counter = itertools.count()
384 384 self._renderitem('docheader', {})
385 385
386 386 def _showitem(self):
387 387 item = self._item.copy()
388 388 item['index'] = index = next(self._counter)
389 389 if index > 0:
390 390 self._renderitem('separator', {})
391 391 self._renderitem(self._tref, item)
392 392
393 393 def _renderitem(self, part, item):
394 394 if part not in self._parts:
395 395 return
396 396 ref = self._parts[part]
397
398 props = {}
399 # explicitly-defined fields precede templatekw
400 props.update(item)
401 if 'ctx' in item or 'fctx' in item:
402 # but template resources must be always available
403 props['revcache'] = {}
404 self._out.write(self._t.render(ref, props))
397 self._out.write(self._t.render(ref, item))
405 398
406 399 def end(self):
407 400 baseformatter.end(self)
408 401 self._renderitem('docfooter', {})
409 402
410 403 templatespec = collections.namedtuple(r'templatespec',
411 404 r'ref tmpl mapfile')
412 405
413 406 def lookuptemplate(ui, topic, tmpl):
414 407 """Find the template matching the given -T/--template spec 'tmpl'
415 408
416 409 'tmpl' can be any of the following:
417 410
418 411 - a literal template (e.g. '{rev}')
419 412 - a map-file name or path (e.g. 'changelog')
420 413 - a reference to [templates] in config file
421 414 - a path to raw template file
422 415
423 416 A map file defines a stand-alone template environment. If a map file
424 417 selected, all templates defined in the file will be loaded, and the
425 418 template matching the given topic will be rendered. Aliases won't be
426 419 loaded from user config, but from the map file.
427 420
428 421 If no map file selected, all templates in [templates] section will be
429 422 available as well as aliases in [templatealias].
430 423 """
431 424
432 425 # looks like a literal template?
433 426 if '{' in tmpl:
434 427 return templatespec('', tmpl, None)
435 428
436 429 # perhaps a stock style?
437 430 if not os.path.split(tmpl)[0]:
438 431 mapname = (templater.templatepath('map-cmdline.' + tmpl)
439 432 or templater.templatepath(tmpl))
440 433 if mapname and os.path.isfile(mapname):
441 434 return templatespec(topic, None, mapname)
442 435
443 436 # perhaps it's a reference to [templates]
444 437 if ui.config('templates', tmpl):
445 438 return templatespec(tmpl, None, None)
446 439
447 440 if tmpl == 'list':
448 441 ui.write(_("available styles: %s\n") % templater.stylelist())
449 442 raise error.Abort(_("specify a template"))
450 443
451 444 # perhaps it's a path to a map or a template
452 445 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
453 446 # is it a mapfile for a style?
454 447 if os.path.basename(tmpl).startswith("map-"):
455 448 return templatespec(topic, None, os.path.realpath(tmpl))
456 449 with util.posixfile(tmpl, 'rb') as f:
457 450 tmpl = f.read()
458 451 return templatespec('', tmpl, None)
459 452
460 453 # constant string?
461 454 return templatespec('', tmpl, None)
462 455
463 456 def templatepartsmap(spec, t, partnames):
464 457 """Create a mapping of {part: ref}"""
465 458 partsmap = {spec.ref: spec.ref} # initial ref must exist in t
466 459 if spec.mapfile:
467 460 partsmap.update((p, p) for p in partnames if p in t)
468 461 elif spec.ref:
469 462 for part in partnames:
470 463 ref = '%s:%s' % (spec.ref, part) # select config sub-section
471 464 if ref in t:
472 465 partsmap[part] = ref
473 466 return partsmap
474 467
475 468 def loadtemplater(ui, spec, defaults=None, resources=None, cache=None):
476 469 """Create a templater from either a literal template or loading from
477 470 a map file"""
478 471 assert not (spec.tmpl and spec.mapfile)
479 472 if spec.mapfile:
480 473 frommapfile = templater.templater.frommapfile
481 474 return frommapfile(spec.mapfile, defaults=defaults, resources=resources,
482 475 cache=cache)
483 476 return maketemplater(ui, spec.tmpl, defaults=defaults, resources=resources,
484 477 cache=cache)
485 478
486 479 def maketemplater(ui, tmpl, defaults=None, resources=None, cache=None):
487 480 """Create a templater from a string template 'tmpl'"""
488 481 aliases = ui.configitems('templatealias')
489 482 t = templater.templater(defaults=defaults, resources=resources,
490 483 cache=cache, aliases=aliases)
491 484 t.cache.update((k, templater.unquotestring(v))
492 485 for k, v in ui.configitems('templates'))
493 486 if tmpl:
494 487 t.cache[''] = tmpl
495 488 return t
496 489
497 490 class templateresources(templater.resourcemapper):
498 491 """Resource mapper designed for the default templatekw and function"""
499 492
500 493 def __init__(self, ui, repo=None):
501 494 self._resmap = {
502 495 'cache': {}, # for templatekw/funcs to store reusable data
503 496 'repo': repo,
504 497 'ui': ui,
505 498 }
506 499
507 500 def availablekeys(self, context, mapping):
508 501 return {k for k, g in self._gettermap.iteritems()
509 502 if g(self, context, mapping, k) is not None}
510 503
511 504 def knownkeys(self):
512 505 return self._knownkeys
513 506
514 507 def lookup(self, context, mapping, key):
515 508 get = self._gettermap.get(key)
516 509 if not get:
517 510 return None
518 511 return get(self, context, mapping, key)
519 512
520 513 def populatemap(self, context, origmapping, newmapping):
521 return {}
514 mapping = {}
515 if self._hasctx(newmapping):
516 mapping['revcache'] = {} # per-ctx cache
517 return mapping
522 518
523 519 def _getsome(self, context, mapping, key):
524 520 v = mapping.get(key)
525 521 if v is not None:
526 522 return v
527 523 return self._resmap.get(key)
528 524
525 def _hasctx(self, mapping):
526 return 'ctx' in mapping or 'fctx' in mapping
527
529 528 def _getctx(self, context, mapping, key):
530 529 ctx = mapping.get('ctx')
531 530 if ctx is not None:
532 531 return ctx
533 532 fctx = mapping.get('fctx')
534 533 if fctx is not None:
535 534 return fctx.changectx()
536 535
537 536 def _getrepo(self, context, mapping, key):
538 537 ctx = self._getctx(context, mapping, 'ctx')
539 538 if ctx is not None:
540 539 return ctx.repo()
541 540 return self._getsome(context, mapping, key)
542 541
543 542 _gettermap = {
544 543 'cache': _getsome,
545 544 'ctx': _getctx,
546 545 'fctx': _getsome,
547 546 'repo': _getrepo,
548 'revcache': _getsome, # per-ctx cache; set later
547 'revcache': _getsome,
549 548 'ui': _getsome,
550 549 }
551 550 _knownkeys = set(_gettermap.keys())
552 551
553 552 def formatter(ui, out, topic, opts):
554 553 template = opts.get("template", "")
555 554 if template == "json":
556 555 return jsonformatter(ui, out, topic, opts)
557 556 elif template == "pickle":
558 557 return pickleformatter(ui, out, topic, opts)
559 558 elif template == "debug":
560 559 return debugformatter(ui, out, topic, opts)
561 560 elif template != "":
562 561 return templateformatter(ui, out, topic, opts)
563 562 # developer config: ui.formatdebug
564 563 elif ui.configbool('ui', 'formatdebug'):
565 564 return debugformatter(ui, out, topic, opts)
566 565 # deprecated config: ui.formatjson
567 566 elif ui.configbool('ui', 'formatjson'):
568 567 return jsonformatter(ui, out, topic, opts)
569 568 return plainformatter(ui, out, topic, opts)
570 569
571 570 @contextlib.contextmanager
572 571 def openformatter(ui, filename, topic, opts):
573 572 """Create a formatter that writes outputs to the specified file
574 573
575 574 Must be invoked using the 'with' statement.
576 575 """
577 576 with util.posixfile(filename, 'wb') as out:
578 577 with formatter(ui, out, topic, opts) as fm:
579 578 yield fm
580 579
581 580 @contextlib.contextmanager
582 581 def _neverending(fm):
583 582 yield fm
584 583
585 584 def maybereopen(fm, filename, opts):
586 585 """Create a formatter backed by file if filename specified, else return
587 586 the given formatter
588 587
589 588 Must be invoked using the 'with' statement. This will never call fm.end()
590 589 of the given formatter.
591 590 """
592 591 if filename:
593 592 return openformatter(fm._ui, filename, fm._topic, opts)
594 593 else:
595 594 return _neverending(fm)
@@ -1,704 +1,703
1 1 # hgweb/webutil.py - utility library for the web interface.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from __future__ import absolute_import
10 10
11 11 import copy
12 12 import difflib
13 13 import os
14 14 import re
15 15
16 16 from ..i18n import _
17 17 from ..node import hex, nullid, short
18 18
19 19 from .common import (
20 20 ErrorResponse,
21 21 HTTP_BAD_REQUEST,
22 22 HTTP_NOT_FOUND,
23 23 paritygen,
24 24 )
25 25
26 26 from .. import (
27 27 context,
28 28 error,
29 29 match,
30 30 mdiff,
31 31 obsutil,
32 32 patch,
33 33 pathutil,
34 34 pycompat,
35 35 templatefilters,
36 36 templatekw,
37 37 ui as uimod,
38 38 util,
39 39 )
40 40
41 41 from ..utils import (
42 42 stringutil,
43 43 )
44 44
45 45 def up(p):
46 46 if p[0:1] != "/":
47 47 p = "/" + p
48 48 if p[-1:] == "/":
49 49 p = p[:-1]
50 50 up = os.path.dirname(p)
51 51 if up == "/":
52 52 return "/"
53 53 return up + "/"
54 54
55 55 def _navseq(step, firststep=None):
56 56 if firststep:
57 57 yield firststep
58 58 if firststep >= 20 and firststep <= 40:
59 59 firststep = 50
60 60 yield firststep
61 61 assert step > 0
62 62 assert firststep > 0
63 63 while step <= firststep:
64 64 step *= 10
65 65 while True:
66 66 yield 1 * step
67 67 yield 3 * step
68 68 step *= 10
69 69
70 70 class revnav(object):
71 71
72 72 def __init__(self, repo):
73 73 """Navigation generation object
74 74
75 75 :repo: repo object we generate nav for
76 76 """
77 77 # used for hex generation
78 78 self._revlog = repo.changelog
79 79
80 80 def __nonzero__(self):
81 81 """return True if any revision to navigate over"""
82 82 return self._first() is not None
83 83
84 84 __bool__ = __nonzero__
85 85
86 86 def _first(self):
87 87 """return the minimum non-filtered changeset or None"""
88 88 try:
89 89 return next(iter(self._revlog))
90 90 except StopIteration:
91 91 return None
92 92
93 93 def hex(self, rev):
94 94 return hex(self._revlog.node(rev))
95 95
96 96 def gen(self, pos, pagelen, limit):
97 97 """computes label and revision id for navigation link
98 98
99 99 :pos: is the revision relative to which we generate navigation.
100 100 :pagelen: the size of each navigation page
101 101 :limit: how far shall we link
102 102
103 103 The return is:
104 104 - a single element tuple
105 105 - containing a dictionary with a `before` and `after` key
106 106 - values are generator functions taking arbitrary number of kwargs
107 107 - yield items are dictionaries with `label` and `node` keys
108 108 """
109 109 if not self:
110 110 # empty repo
111 111 return ({'before': (), 'after': ()},)
112 112
113 113 targets = []
114 114 for f in _navseq(1, pagelen):
115 115 if f > limit:
116 116 break
117 117 targets.append(pos + f)
118 118 targets.append(pos - f)
119 119 targets.sort()
120 120
121 121 first = self._first()
122 122 navbefore = [("(%i)" % first, self.hex(first))]
123 123 navafter = []
124 124 for rev in targets:
125 125 if rev not in self._revlog:
126 126 continue
127 127 if pos < rev < limit:
128 128 navafter.append(("+%d" % abs(rev - pos), self.hex(rev)))
129 129 if 0 < rev < pos:
130 130 navbefore.append(("-%d" % abs(rev - pos), self.hex(rev)))
131 131
132 132
133 133 navafter.append(("tip", "tip"))
134 134
135 135 data = lambda i: {"label": i[0], "node": i[1]}
136 136 return ({'before': lambda **map: (data(i) for i in navbefore),
137 137 'after': lambda **map: (data(i) for i in navafter)},)
138 138
139 139 class filerevnav(revnav):
140 140
141 141 def __init__(self, repo, path):
142 142 """Navigation generation object
143 143
144 144 :repo: repo object we generate nav for
145 145 :path: path of the file we generate nav for
146 146 """
147 147 # used for iteration
148 148 self._changelog = repo.unfiltered().changelog
149 149 # used for hex generation
150 150 self._revlog = repo.file(path)
151 151
152 152 def hex(self, rev):
153 153 return hex(self._changelog.node(self._revlog.linkrev(rev)))
154 154
155 155 class _siblings(object):
156 156 def __init__(self, siblings=None, hiderev=None):
157 157 if siblings is None:
158 158 siblings = []
159 159 self.siblings = [s for s in siblings if s.node() != nullid]
160 160 if len(self.siblings) == 1 and self.siblings[0].rev() == hiderev:
161 161 self.siblings = []
162 162
163 163 def __iter__(self):
164 164 for s in self.siblings:
165 165 d = {
166 166 'node': s.hex(),
167 167 'rev': s.rev(),
168 168 'user': s.user(),
169 169 'date': s.date(),
170 170 'description': s.description(),
171 171 'branch': s.branch(),
172 172 }
173 173 if util.safehasattr(s, 'path'):
174 174 d['file'] = s.path()
175 175 yield d
176 176
177 177 def __len__(self):
178 178 return len(self.siblings)
179 179
180 180 def difffeatureopts(req, ui, section):
181 181 diffopts = patch.difffeatureopts(ui, untrusted=True,
182 182 section=section, whitespace=True)
183 183
184 184 for k in ('ignorews', 'ignorewsamount', 'ignorewseol', 'ignoreblanklines'):
185 185 v = req.qsparams.get(k)
186 186 if v is not None:
187 187 v = stringutil.parsebool(v)
188 188 setattr(diffopts, k, v if v is not None else True)
189 189
190 190 return diffopts
191 191
192 192 def annotate(req, fctx, ui):
193 193 diffopts = difffeatureopts(req, ui, 'annotate')
194 194 return fctx.annotate(follow=True, diffopts=diffopts)
195 195
196 196 def parents(ctx, hide=None):
197 197 if isinstance(ctx, context.basefilectx):
198 198 introrev = ctx.introrev()
199 199 if ctx.changectx().rev() != introrev:
200 200 return _siblings([ctx.repo()[introrev]], hide)
201 201 return _siblings(ctx.parents(), hide)
202 202
203 203 def children(ctx, hide=None):
204 204 return _siblings(ctx.children(), hide)
205 205
206 206 def renamelink(fctx):
207 207 r = fctx.renamed()
208 208 if r:
209 209 return [{'file': r[0], 'node': hex(r[1])}]
210 210 return []
211 211
212 212 def nodetagsdict(repo, node):
213 213 return [{"name": i} for i in repo.nodetags(node)]
214 214
215 215 def nodebookmarksdict(repo, node):
216 216 return [{"name": i} for i in repo.nodebookmarks(node)]
217 217
218 218 def nodebranchdict(repo, ctx):
219 219 branches = []
220 220 branch = ctx.branch()
221 221 # If this is an empty repo, ctx.node() == nullid,
222 222 # ctx.branch() == 'default'.
223 223 try:
224 224 branchnode = repo.branchtip(branch)
225 225 except error.RepoLookupError:
226 226 branchnode = None
227 227 if branchnode == ctx.node():
228 228 branches.append({"name": branch})
229 229 return branches
230 230
231 231 def nodeinbranch(repo, ctx):
232 232 branches = []
233 233 branch = ctx.branch()
234 234 try:
235 235 branchnode = repo.branchtip(branch)
236 236 except error.RepoLookupError:
237 237 branchnode = None
238 238 if branch != 'default' and branchnode != ctx.node():
239 239 branches.append({"name": branch})
240 240 return branches
241 241
242 242 def nodebranchnodefault(ctx):
243 243 branches = []
244 244 branch = ctx.branch()
245 245 if branch != 'default':
246 246 branches.append({"name": branch})
247 247 return branches
248 248
249 249 def showtag(repo, tmpl, t1, node=nullid, **args):
250 250 args = pycompat.byteskwargs(args)
251 251 for t in repo.nodetags(node):
252 252 lm = args.copy()
253 253 lm['tag'] = t
254 254 yield tmpl.generate(t1, lm)
255 255
256 256 def showbookmark(repo, tmpl, t1, node=nullid, **args):
257 257 args = pycompat.byteskwargs(args)
258 258 for t in repo.nodebookmarks(node):
259 259 lm = args.copy()
260 260 lm['bookmark'] = t
261 261 yield tmpl.generate(t1, lm)
262 262
263 263 def branchentries(repo, stripecount, limit=0):
264 264 tips = []
265 265 heads = repo.heads()
266 266 parity = paritygen(stripecount)
267 267 sortkey = lambda item: (not item[1], item[0].rev())
268 268
269 269 def entries(**map):
270 270 count = 0
271 271 if not tips:
272 272 for tag, hs, tip, closed in repo.branchmap().iterbranches():
273 273 tips.append((repo[tip], closed))
274 274 for ctx, closed in sorted(tips, key=sortkey, reverse=True):
275 275 if limit > 0 and count >= limit:
276 276 return
277 277 count += 1
278 278 if closed:
279 279 status = 'closed'
280 280 elif ctx.node() not in heads:
281 281 status = 'inactive'
282 282 else:
283 283 status = 'open'
284 284 yield {
285 285 'parity': next(parity),
286 286 'branch': ctx.branch(),
287 287 'status': status,
288 288 'node': ctx.hex(),
289 289 'date': ctx.date()
290 290 }
291 291
292 292 return entries
293 293
294 294 def cleanpath(repo, path):
295 295 path = path.lstrip('/')
296 296 return pathutil.canonpath(repo.root, '', path)
297 297
298 298 def changeidctx(repo, changeid):
299 299 try:
300 300 ctx = repo[changeid]
301 301 except error.RepoError:
302 302 man = repo.manifestlog._revlog
303 303 ctx = repo[man.linkrev(man.rev(man.lookup(changeid)))]
304 304
305 305 return ctx
306 306
307 307 def changectx(repo, req):
308 308 changeid = "tip"
309 309 if 'node' in req.qsparams:
310 310 changeid = req.qsparams['node']
311 311 ipos = changeid.find(':')
312 312 if ipos != -1:
313 313 changeid = changeid[(ipos + 1):]
314 314 elif 'manifest' in req.qsparams:
315 315 changeid = req.qsparams['manifest']
316 316
317 317 return changeidctx(repo, changeid)
318 318
319 319 def basechangectx(repo, req):
320 320 if 'node' in req.qsparams:
321 321 changeid = req.qsparams['node']
322 322 ipos = changeid.find(':')
323 323 if ipos != -1:
324 324 changeid = changeid[:ipos]
325 325 return changeidctx(repo, changeid)
326 326
327 327 return None
328 328
329 329 def filectx(repo, req):
330 330 if 'file' not in req.qsparams:
331 331 raise ErrorResponse(HTTP_NOT_FOUND, 'file not given')
332 332 path = cleanpath(repo, req.qsparams['file'])
333 333 if 'node' in req.qsparams:
334 334 changeid = req.qsparams['node']
335 335 elif 'filenode' in req.qsparams:
336 336 changeid = req.qsparams['filenode']
337 337 else:
338 338 raise ErrorResponse(HTTP_NOT_FOUND, 'node or filenode not given')
339 339 try:
340 340 fctx = repo[changeid][path]
341 341 except error.RepoError:
342 342 fctx = repo.filectx(path, fileid=changeid)
343 343
344 344 return fctx
345 345
346 346 def linerange(req):
347 347 linerange = req.qsparams.getall('linerange')
348 348 if not linerange:
349 349 return None
350 350 if len(linerange) > 1:
351 351 raise ErrorResponse(HTTP_BAD_REQUEST,
352 352 'redundant linerange parameter')
353 353 try:
354 354 fromline, toline = map(int, linerange[0].split(':', 1))
355 355 except ValueError:
356 356 raise ErrorResponse(HTTP_BAD_REQUEST,
357 357 'invalid linerange parameter')
358 358 try:
359 359 return util.processlinerange(fromline, toline)
360 360 except error.ParseError as exc:
361 361 raise ErrorResponse(HTTP_BAD_REQUEST, pycompat.bytestr(exc))
362 362
363 363 def formatlinerange(fromline, toline):
364 364 return '%d:%d' % (fromline + 1, toline)
365 365
366 366 def succsandmarkers(context, mapping):
367 367 repo = context.resource(mapping, 'repo')
368 368 for item in templatekw.showsuccsandmarkers(context, mapping):
369 369 item['successors'] = _siblings(repo[successor]
370 370 for successor in item['successors'])
371 371 yield item
372 372
373 373 # teach templater succsandmarkers is switched to (context, mapping) API
374 374 succsandmarkers._requires = {'repo', 'ctx'}
375 375
376 376 def whyunstable(context, mapping):
377 377 repo = context.resource(mapping, 'repo')
378 378 ctx = context.resource(mapping, 'ctx')
379 379
380 380 entries = obsutil.whyunstable(repo, ctx)
381 381 for entry in entries:
382 382 if entry.get('divergentnodes'):
383 383 entry['divergentnodes'] = _siblings(entry['divergentnodes'])
384 384 yield entry
385 385
386 386 whyunstable._requires = {'repo', 'ctx'}
387 387
388 388 def commonentry(repo, ctx):
389 389 node = ctx.node()
390 390 return {
391 391 # TODO: perhaps ctx.changectx() should be assigned if ctx is a
392 392 # filectx, but I'm not pretty sure if that would always work because
393 393 # fctx.parents() != fctx.changectx.parents() for example.
394 394 'ctx': ctx,
395 'revcache': {},
396 395 'rev': ctx.rev(),
397 396 'node': hex(node),
398 397 'author': ctx.user(),
399 398 'desc': ctx.description(),
400 399 'date': ctx.date(),
401 400 'extra': ctx.extra(),
402 401 'phase': ctx.phasestr(),
403 402 'obsolete': ctx.obsolete(),
404 403 'succsandmarkers': succsandmarkers,
405 404 'instabilities': [{"instability": i} for i in ctx.instabilities()],
406 405 'whyunstable': whyunstable,
407 406 'branch': nodebranchnodefault(ctx),
408 407 'inbranch': nodeinbranch(repo, ctx),
409 408 'branches': nodebranchdict(repo, ctx),
410 409 'tags': nodetagsdict(repo, node),
411 410 'bookmarks': nodebookmarksdict(repo, node),
412 411 'parent': lambda **x: parents(ctx),
413 412 'child': lambda **x: children(ctx),
414 413 }
415 414
416 415 def changelistentry(web, ctx):
417 416 '''Obtain a dictionary to be used for entries in a changelist.
418 417
419 418 This function is called when producing items for the "entries" list passed
420 419 to the "shortlog" and "changelog" templates.
421 420 '''
422 421 repo = web.repo
423 422 rev = ctx.rev()
424 423 n = ctx.node()
425 424 showtags = showtag(repo, web.tmpl, 'changelogtag', n)
426 425 files = listfilediffs(web.tmpl, ctx.files(), n, web.maxfiles)
427 426
428 427 entry = commonentry(repo, ctx)
429 428 entry.update(
430 429 allparents=lambda **x: parents(ctx),
431 430 parent=lambda **x: parents(ctx, rev - 1),
432 431 child=lambda **x: children(ctx, rev + 1),
433 432 changelogtag=showtags,
434 433 files=files,
435 434 )
436 435 return entry
437 436
438 437 def symrevorshortnode(req, ctx):
439 438 if 'node' in req.qsparams:
440 439 return templatefilters.revescape(req.qsparams['node'])
441 440 else:
442 441 return short(ctx.node())
443 442
444 443 def changesetentry(web, ctx):
445 444 '''Obtain a dictionary to be used to render the "changeset" template.'''
446 445
447 446 showtags = showtag(web.repo, web.tmpl, 'changesettag', ctx.node())
448 447 showbookmarks = showbookmark(web.repo, web.tmpl, 'changesetbookmark',
449 448 ctx.node())
450 449 showbranch = nodebranchnodefault(ctx)
451 450
452 451 files = []
453 452 parity = paritygen(web.stripecount)
454 453 for blockno, f in enumerate(ctx.files()):
455 454 template = 'filenodelink' if f in ctx else 'filenolink'
456 455 files.append(web.tmpl.generate(template, {
457 456 'node': ctx.hex(),
458 457 'file': f,
459 458 'blockno': blockno + 1,
460 459 'parity': next(parity),
461 460 }))
462 461
463 462 basectx = basechangectx(web.repo, web.req)
464 463 if basectx is None:
465 464 basectx = ctx.p1()
466 465
467 466 style = web.config('web', 'style')
468 467 if 'style' in web.req.qsparams:
469 468 style = web.req.qsparams['style']
470 469
471 470 diff = diffs(web, ctx, basectx, None, style)
472 471
473 472 parity = paritygen(web.stripecount)
474 473 diffstatsgen = diffstatgen(ctx, basectx)
475 474 diffstats = diffstat(web.tmpl, ctx, diffstatsgen, parity)
476 475
477 476 return dict(
478 477 diff=diff,
479 478 symrev=symrevorshortnode(web.req, ctx),
480 479 basenode=basectx.hex(),
481 480 changesettag=showtags,
482 481 changesetbookmark=showbookmarks,
483 482 changesetbranch=showbranch,
484 483 files=files,
485 484 diffsummary=lambda **x: diffsummary(diffstatsgen),
486 485 diffstat=diffstats,
487 486 archives=web.archivelist(ctx.hex()),
488 487 **pycompat.strkwargs(commonentry(web.repo, ctx)))
489 488
490 489 def listfilediffs(tmpl, files, node, max):
491 490 for f in files[:max]:
492 491 yield tmpl.generate('filedifflink', {'node': hex(node), 'file': f})
493 492 if len(files) > max:
494 493 yield tmpl.generate('fileellipses', {})
495 494
496 495 def diffs(web, ctx, basectx, files, style, linerange=None,
497 496 lineidprefix=''):
498 497
499 498 def prettyprintlines(lines, blockno):
500 499 for lineno, l in enumerate(lines, 1):
501 500 difflineno = "%d.%d" % (blockno, lineno)
502 501 if l.startswith('+'):
503 502 ltype = "difflineplus"
504 503 elif l.startswith('-'):
505 504 ltype = "difflineminus"
506 505 elif l.startswith('@'):
507 506 ltype = "difflineat"
508 507 else:
509 508 ltype = "diffline"
510 509 yield web.tmpl.generate(ltype, {
511 510 'line': l,
512 511 'lineno': lineno,
513 512 'lineid': lineidprefix + "l%s" % difflineno,
514 513 'linenumber': "% 8s" % difflineno,
515 514 })
516 515
517 516 repo = web.repo
518 517 if files:
519 518 m = match.exact(repo.root, repo.getcwd(), files)
520 519 else:
521 520 m = match.always(repo.root, repo.getcwd())
522 521
523 522 diffopts = patch.diffopts(repo.ui, untrusted=True)
524 523 node1 = basectx.node()
525 524 node2 = ctx.node()
526 525 parity = paritygen(web.stripecount)
527 526
528 527 diffhunks = patch.diffhunks(repo, node1, node2, m, opts=diffopts)
529 528 for blockno, (fctx1, fctx2, header, hunks) in enumerate(diffhunks, 1):
530 529 if style != 'raw':
531 530 header = header[1:]
532 531 lines = [h + '\n' for h in header]
533 532 for hunkrange, hunklines in hunks:
534 533 if linerange is not None and hunkrange is not None:
535 534 s1, l1, s2, l2 = hunkrange
536 535 if not mdiff.hunkinrange((s2, l2), linerange):
537 536 continue
538 537 lines.extend(hunklines)
539 538 if lines:
540 539 yield web.tmpl.generate('diffblock', {
541 540 'parity': next(parity),
542 541 'blockno': blockno,
543 542 'lines': prettyprintlines(lines, blockno),
544 543 })
545 544
546 545 def compare(tmpl, context, leftlines, rightlines):
547 546 '''Generator function that provides side-by-side comparison data.'''
548 547
549 548 def compline(type, leftlineno, leftline, rightlineno, rightline):
550 549 lineid = leftlineno and ("l%d" % leftlineno) or ''
551 550 lineid += rightlineno and ("r%d" % rightlineno) or ''
552 551 llno = '%d' % leftlineno if leftlineno else ''
553 552 rlno = '%d' % rightlineno if rightlineno else ''
554 553 return tmpl.generate('comparisonline', {
555 554 'type': type,
556 555 'lineid': lineid,
557 556 'leftlineno': leftlineno,
558 557 'leftlinenumber': "% 6s" % llno,
559 558 'leftline': leftline or '',
560 559 'rightlineno': rightlineno,
561 560 'rightlinenumber': "% 6s" % rlno,
562 561 'rightline': rightline or '',
563 562 })
564 563
565 564 def getblock(opcodes):
566 565 for type, llo, lhi, rlo, rhi in opcodes:
567 566 len1 = lhi - llo
568 567 len2 = rhi - rlo
569 568 count = min(len1, len2)
570 569 for i in xrange(count):
571 570 yield compline(type=type,
572 571 leftlineno=llo + i + 1,
573 572 leftline=leftlines[llo + i],
574 573 rightlineno=rlo + i + 1,
575 574 rightline=rightlines[rlo + i])
576 575 if len1 > len2:
577 576 for i in xrange(llo + count, lhi):
578 577 yield compline(type=type,
579 578 leftlineno=i + 1,
580 579 leftline=leftlines[i],
581 580 rightlineno=None,
582 581 rightline=None)
583 582 elif len2 > len1:
584 583 for i in xrange(rlo + count, rhi):
585 584 yield compline(type=type,
586 585 leftlineno=None,
587 586 leftline=None,
588 587 rightlineno=i + 1,
589 588 rightline=rightlines[i])
590 589
591 590 s = difflib.SequenceMatcher(None, leftlines, rightlines)
592 591 if context < 0:
593 592 yield tmpl.generate('comparisonblock',
594 593 {'lines': getblock(s.get_opcodes())})
595 594 else:
596 595 for oc in s.get_grouped_opcodes(n=context):
597 596 yield tmpl.generate('comparisonblock', {'lines': getblock(oc)})
598 597
599 598 def diffstatgen(ctx, basectx):
600 599 '''Generator function that provides the diffstat data.'''
601 600
602 601 stats = patch.diffstatdata(
603 602 util.iterlines(ctx.diff(basectx, noprefix=False)))
604 603 maxname, maxtotal, addtotal, removetotal, binary = patch.diffstatsum(stats)
605 604 while True:
606 605 yield stats, maxname, maxtotal, addtotal, removetotal, binary
607 606
608 607 def diffsummary(statgen):
609 608 '''Return a short summary of the diff.'''
610 609
611 610 stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
612 611 return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % (
613 612 len(stats), addtotal, removetotal)
614 613
615 614 def diffstat(tmpl, ctx, statgen, parity):
616 615 '''Return a diffstat template for each file in the diff.'''
617 616
618 617 stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
619 618 files = ctx.files()
620 619
621 620 def pct(i):
622 621 if maxtotal == 0:
623 622 return 0
624 623 return (float(i) / maxtotal) * 100
625 624
626 625 fileno = 0
627 626 for filename, adds, removes, isbinary in stats:
628 627 template = 'diffstatlink' if filename in files else 'diffstatnolink'
629 628 total = adds + removes
630 629 fileno += 1
631 630 yield tmpl.generate(template, {
632 631 'node': ctx.hex(),
633 632 'file': filename,
634 633 'fileno': fileno,
635 634 'total': total,
636 635 'addpct': pct(adds),
637 636 'removepct': pct(removes),
638 637 'parity': next(parity),
639 638 })
640 639
641 640 class sessionvars(object):
642 641 def __init__(self, vars, start='?'):
643 642 self.start = start
644 643 self.vars = vars
645 644 def __getitem__(self, key):
646 645 return self.vars[key]
647 646 def __setitem__(self, key, value):
648 647 self.vars[key] = value
649 648 def __copy__(self):
650 649 return sessionvars(copy.copy(self.vars), self.start)
651 650 def __iter__(self):
652 651 separator = self.start
653 652 for key, value in sorted(self.vars.iteritems()):
654 653 yield {'name': key,
655 654 'value': pycompat.bytestr(value),
656 655 'separator': separator,
657 656 }
658 657 separator = '&'
659 658
660 659 class wsgiui(uimod.ui):
661 660 # default termwidth breaks under mod_wsgi
662 661 def termwidth(self):
663 662 return 80
664 663
665 664 def getwebsubs(repo):
666 665 websubtable = []
667 666 websubdefs = repo.ui.configitems('websub')
668 667 # we must maintain interhg backwards compatibility
669 668 websubdefs += repo.ui.configitems('interhg')
670 669 for key, pattern in websubdefs:
671 670 # grab the delimiter from the character after the "s"
672 671 unesc = pattern[1:2]
673 672 delim = re.escape(unesc)
674 673
675 674 # identify portions of the pattern, taking care to avoid escaped
676 675 # delimiters. the replace format and flags are optional, but
677 676 # delimiters are required.
678 677 match = re.match(
679 678 br'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$'
680 679 % (delim, delim, delim), pattern)
681 680 if not match:
682 681 repo.ui.warn(_("websub: invalid pattern for %s: %s\n")
683 682 % (key, pattern))
684 683 continue
685 684
686 685 # we need to unescape the delimiter for regexp and format
687 686 delim_re = re.compile(br'(?<!\\)\\%s' % delim)
688 687 regexp = delim_re.sub(unesc, match.group(1))
689 688 format = delim_re.sub(unesc, match.group(2))
690 689
691 690 # the pattern allows for 6 regexp flags, so set them if necessary
692 691 flagin = match.group(3)
693 692 flags = 0
694 693 if flagin:
695 694 for flag in flagin.upper():
696 695 flags |= re.__dict__[flag]
697 696
698 697 try:
699 698 regexp = re.compile(regexp, flags)
700 699 websubtable.append((regexp, format))
701 700 except re.error:
702 701 repo.ui.warn(_("websub: invalid regexp for %s: %s\n")
703 702 % (key, regexp))
704 703 return websubtable
@@ -1,941 +1,941
1 1 # logcmdutil.py - utility for log-like commands
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import itertools
11 11 import os
12 12
13 13 from .i18n import _
14 14 from .node import (
15 15 hex,
16 16 nullid,
17 17 )
18 18
19 19 from . import (
20 20 dagop,
21 21 encoding,
22 22 error,
23 23 formatter,
24 24 graphmod,
25 25 match as matchmod,
26 26 mdiff,
27 27 patch,
28 28 pathutil,
29 29 pycompat,
30 30 revset,
31 31 revsetlang,
32 32 scmutil,
33 33 smartset,
34 34 templatekw,
35 35 templater,
36 36 util,
37 37 )
38 38 from .utils import (
39 39 dateutil,
40 40 stringutil,
41 41 )
42 42
43 43 def getlimit(opts):
44 44 """get the log limit according to option -l/--limit"""
45 45 limit = opts.get('limit')
46 46 if limit:
47 47 try:
48 48 limit = int(limit)
49 49 except ValueError:
50 50 raise error.Abort(_('limit must be a positive integer'))
51 51 if limit <= 0:
52 52 raise error.Abort(_('limit must be positive'))
53 53 else:
54 54 limit = None
55 55 return limit
56 56
57 57 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
58 58 changes=None, stat=False, fp=None, prefix='',
59 59 root='', listsubrepos=False, hunksfilterfn=None):
60 60 '''show diff or diffstat.'''
61 61 if root:
62 62 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
63 63 else:
64 64 relroot = ''
65 65 if relroot != '':
66 66 # XXX relative roots currently don't work if the root is within a
67 67 # subrepo
68 68 uirelroot = match.uipath(relroot)
69 69 relroot += '/'
70 70 for matchroot in match.files():
71 71 if not matchroot.startswith(relroot):
72 72 ui.warn(_('warning: %s not inside relative root %s\n') % (
73 73 match.uipath(matchroot), uirelroot))
74 74
75 75 if stat:
76 76 diffopts = diffopts.copy(context=0, noprefix=False)
77 77 width = 80
78 78 if not ui.plain():
79 79 width = ui.termwidth()
80 80
81 81 chunks = patch.diff(repo, node1, node2, match, changes, opts=diffopts,
82 82 prefix=prefix, relroot=relroot,
83 83 hunksfilterfn=hunksfilterfn)
84 84
85 85 if fp is not None or ui.canwritewithoutlabels():
86 86 out = fp or ui
87 87 if stat:
88 88 chunks = [patch.diffstat(util.iterlines(chunks), width=width)]
89 89 for chunk in util.filechunkiter(util.chunkbuffer(chunks)):
90 90 out.write(chunk)
91 91 else:
92 92 if stat:
93 93 chunks = patch.diffstatui(util.iterlines(chunks), width=width)
94 94 else:
95 95 chunks = patch.difflabel(lambda chunks, **kwargs: chunks, chunks,
96 96 opts=diffopts)
97 97 if ui.canbatchlabeledwrites():
98 98 def gen():
99 99 for chunk, label in chunks:
100 100 yield ui.label(chunk, label=label)
101 101 for chunk in util.filechunkiter(util.chunkbuffer(gen())):
102 102 ui.write(chunk)
103 103 else:
104 104 for chunk, label in chunks:
105 105 ui.write(chunk, label=label)
106 106
107 107 if listsubrepos:
108 108 ctx1 = repo[node1]
109 109 ctx2 = repo[node2]
110 110 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
111 111 tempnode2 = node2
112 112 try:
113 113 if node2 is not None:
114 114 tempnode2 = ctx2.substate[subpath][1]
115 115 except KeyError:
116 116 # A subrepo that existed in node1 was deleted between node1 and
117 117 # node2 (inclusive). Thus, ctx2's substate won't contain that
118 118 # subpath. The best we can do is to ignore it.
119 119 tempnode2 = None
120 120 submatch = matchmod.subdirmatcher(subpath, match)
121 121 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
122 122 stat=stat, fp=fp, prefix=prefix)
123 123
124 124 class changesetdiffer(object):
125 125 """Generate diff of changeset with pre-configured filtering functions"""
126 126
127 127 def _makefilematcher(self, ctx):
128 128 return scmutil.matchall(ctx.repo())
129 129
130 130 def _makehunksfilter(self, ctx):
131 131 return None
132 132
133 133 def showdiff(self, ui, ctx, diffopts, stat=False):
134 134 repo = ctx.repo()
135 135 node = ctx.node()
136 136 prev = ctx.p1().node()
137 137 diffordiffstat(ui, repo, diffopts, prev, node,
138 138 match=self._makefilematcher(ctx), stat=stat,
139 139 hunksfilterfn=self._makehunksfilter(ctx))
140 140
141 141 def changesetlabels(ctx):
142 142 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
143 143 if ctx.obsolete():
144 144 labels.append('changeset.obsolete')
145 145 if ctx.isunstable():
146 146 labels.append('changeset.unstable')
147 147 for instability in ctx.instabilities():
148 148 labels.append('instability.%s' % instability)
149 149 return ' '.join(labels)
150 150
151 151 class changesetprinter(object):
152 152 '''show changeset information when templating not requested.'''
153 153
154 154 def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False):
155 155 self.ui = ui
156 156 self.repo = repo
157 157 self.buffered = buffered
158 158 self._differ = differ or changesetdiffer()
159 159 self.diffopts = diffopts or {}
160 160 self.header = {}
161 161 self.hunk = {}
162 162 self.lastheader = None
163 163 self.footer = None
164 164 self._columns = templatekw.getlogcolumns()
165 165
166 166 def flush(self, ctx):
167 167 rev = ctx.rev()
168 168 if rev in self.header:
169 169 h = self.header[rev]
170 170 if h != self.lastheader:
171 171 self.lastheader = h
172 172 self.ui.write(h)
173 173 del self.header[rev]
174 174 if rev in self.hunk:
175 175 self.ui.write(self.hunk[rev])
176 176 del self.hunk[rev]
177 177
178 178 def close(self):
179 179 if self.footer:
180 180 self.ui.write(self.footer)
181 181
182 182 def show(self, ctx, copies=None, **props):
183 183 props = pycompat.byteskwargs(props)
184 184 if self.buffered:
185 185 self.ui.pushbuffer(labeled=True)
186 186 self._show(ctx, copies, props)
187 187 self.hunk[ctx.rev()] = self.ui.popbuffer()
188 188 else:
189 189 self._show(ctx, copies, props)
190 190
191 191 def _show(self, ctx, copies, props):
192 192 '''show a single changeset or file revision'''
193 193 changenode = ctx.node()
194 194 rev = ctx.rev()
195 195
196 196 if self.ui.quiet:
197 197 self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
198 198 label='log.node')
199 199 return
200 200
201 201 columns = self._columns
202 202 self.ui.write(columns['changeset'] % scmutil.formatchangeid(ctx),
203 203 label=changesetlabels(ctx))
204 204
205 205 # branches are shown first before any other names due to backwards
206 206 # compatibility
207 207 branch = ctx.branch()
208 208 # don't show the default branch name
209 209 if branch != 'default':
210 210 self.ui.write(columns['branch'] % branch, label='log.branch')
211 211
212 212 for nsname, ns in self.repo.names.iteritems():
213 213 # branches has special logic already handled above, so here we just
214 214 # skip it
215 215 if nsname == 'branches':
216 216 continue
217 217 # we will use the templatename as the color name since those two
218 218 # should be the same
219 219 for name in ns.names(self.repo, changenode):
220 220 self.ui.write(ns.logfmt % name,
221 221 label='log.%s' % ns.colorname)
222 222 if self.ui.debugflag:
223 223 self.ui.write(columns['phase'] % ctx.phasestr(), label='log.phase')
224 224 for pctx in scmutil.meaningfulparents(self.repo, ctx):
225 225 label = 'log.parent changeset.%s' % pctx.phasestr()
226 226 self.ui.write(columns['parent'] % scmutil.formatchangeid(pctx),
227 227 label=label)
228 228
229 229 if self.ui.debugflag and rev is not None:
230 230 mnode = ctx.manifestnode()
231 231 mrev = self.repo.manifestlog._revlog.rev(mnode)
232 232 self.ui.write(columns['manifest']
233 233 % scmutil.formatrevnode(self.ui, mrev, mnode),
234 234 label='ui.debug log.manifest')
235 235 self.ui.write(columns['user'] % ctx.user(), label='log.user')
236 236 self.ui.write(columns['date'] % dateutil.datestr(ctx.date()),
237 237 label='log.date')
238 238
239 239 if ctx.isunstable():
240 240 instabilities = ctx.instabilities()
241 241 self.ui.write(columns['instability'] % ', '.join(instabilities),
242 242 label='log.instability')
243 243
244 244 elif ctx.obsolete():
245 245 self._showobsfate(ctx)
246 246
247 247 self._exthook(ctx)
248 248
249 249 if self.ui.debugflag:
250 250 files = ctx.p1().status(ctx)[:3]
251 251 for key, value in zip(['files', 'files+', 'files-'], files):
252 252 if value:
253 253 self.ui.write(columns[key] % " ".join(value),
254 254 label='ui.debug log.files')
255 255 elif ctx.files() and self.ui.verbose:
256 256 self.ui.write(columns['files'] % " ".join(ctx.files()),
257 257 label='ui.note log.files')
258 258 if copies and self.ui.verbose:
259 259 copies = ['%s (%s)' % c for c in copies]
260 260 self.ui.write(columns['copies'] % ' '.join(copies),
261 261 label='ui.note log.copies')
262 262
263 263 extra = ctx.extra()
264 264 if extra and self.ui.debugflag:
265 265 for key, value in sorted(extra.items()):
266 266 self.ui.write(columns['extra']
267 267 % (key, stringutil.escapestr(value)),
268 268 label='ui.debug log.extra')
269 269
270 270 description = ctx.description().strip()
271 271 if description:
272 272 if self.ui.verbose:
273 273 self.ui.write(_("description:\n"),
274 274 label='ui.note log.description')
275 275 self.ui.write(description,
276 276 label='ui.note log.description')
277 277 self.ui.write("\n\n")
278 278 else:
279 279 self.ui.write(columns['summary'] % description.splitlines()[0],
280 280 label='log.summary')
281 281 self.ui.write("\n")
282 282
283 283 self._showpatch(ctx)
284 284
285 285 def _showobsfate(self, ctx):
286 286 # TODO: do not depend on templater
287 287 tres = formatter.templateresources(self.repo.ui, self.repo)
288 288 t = formatter.maketemplater(self.repo.ui, '{join(obsfate, "\n")}',
289 289 defaults=templatekw.keywords,
290 290 resources=tres)
291 obsfate = t.renderdefault({'ctx': ctx, 'revcache': {}}).splitlines()
291 obsfate = t.renderdefault({'ctx': ctx}).splitlines()
292 292
293 293 if obsfate:
294 294 for obsfateline in obsfate:
295 295 self.ui.write(self._columns['obsolete'] % obsfateline,
296 296 label='log.obsfate')
297 297
298 298 def _exthook(self, ctx):
299 299 '''empty method used by extension as a hook point
300 300 '''
301 301
302 302 def _showpatch(self, ctx):
303 303 stat = self.diffopts.get('stat')
304 304 diff = self.diffopts.get('patch')
305 305 diffopts = patch.diffallopts(self.ui, self.diffopts)
306 306 if stat:
307 307 self._differ.showdiff(self.ui, ctx, diffopts, stat=True)
308 308 if stat and diff:
309 309 self.ui.write("\n")
310 310 if diff:
311 311 self._differ.showdiff(self.ui, ctx, diffopts, stat=False)
312 312 if stat or diff:
313 313 self.ui.write("\n")
314 314
315 315 class jsonchangeset(changesetprinter):
316 316 '''format changeset information.'''
317 317
318 318 def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False):
319 319 changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
320 320 self.cache = {}
321 321 self._first = True
322 322
323 323 def close(self):
324 324 if not self._first:
325 325 self.ui.write("\n]\n")
326 326 else:
327 327 self.ui.write("[]\n")
328 328
329 329 def _show(self, ctx, copies, props):
330 330 '''show a single changeset or file revision'''
331 331 rev = ctx.rev()
332 332 if rev is None:
333 333 jrev = jnode = 'null'
334 334 else:
335 335 jrev = '%d' % rev
336 336 jnode = '"%s"' % hex(ctx.node())
337 337 j = encoding.jsonescape
338 338
339 339 if self._first:
340 340 self.ui.write("[\n {")
341 341 self._first = False
342 342 else:
343 343 self.ui.write(",\n {")
344 344
345 345 if self.ui.quiet:
346 346 self.ui.write(('\n "rev": %s') % jrev)
347 347 self.ui.write((',\n "node": %s') % jnode)
348 348 self.ui.write('\n }')
349 349 return
350 350
351 351 self.ui.write(('\n "rev": %s') % jrev)
352 352 self.ui.write((',\n "node": %s') % jnode)
353 353 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
354 354 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
355 355 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
356 356 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
357 357 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
358 358
359 359 self.ui.write((',\n "bookmarks": [%s]') %
360 360 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
361 361 self.ui.write((',\n "tags": [%s]') %
362 362 ", ".join('"%s"' % j(t) for t in ctx.tags()))
363 363 self.ui.write((',\n "parents": [%s]') %
364 364 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
365 365
366 366 if self.ui.debugflag:
367 367 if rev is None:
368 368 jmanifestnode = 'null'
369 369 else:
370 370 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
371 371 self.ui.write((',\n "manifest": %s') % jmanifestnode)
372 372
373 373 self.ui.write((',\n "extra": {%s}') %
374 374 ", ".join('"%s": "%s"' % (j(k), j(v))
375 375 for k, v in ctx.extra().items()))
376 376
377 377 files = ctx.p1().status(ctx)
378 378 self.ui.write((',\n "modified": [%s]') %
379 379 ", ".join('"%s"' % j(f) for f in files[0]))
380 380 self.ui.write((',\n "added": [%s]') %
381 381 ", ".join('"%s"' % j(f) for f in files[1]))
382 382 self.ui.write((',\n "removed": [%s]') %
383 383 ", ".join('"%s"' % j(f) for f in files[2]))
384 384
385 385 elif self.ui.verbose:
386 386 self.ui.write((',\n "files": [%s]') %
387 387 ", ".join('"%s"' % j(f) for f in ctx.files()))
388 388
389 389 if copies:
390 390 self.ui.write((',\n "copies": {%s}') %
391 391 ", ".join('"%s": "%s"' % (j(k), j(v))
392 392 for k, v in copies))
393 393
394 394 stat = self.diffopts.get('stat')
395 395 diff = self.diffopts.get('patch')
396 396 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
397 397 if stat:
398 398 self.ui.pushbuffer()
399 399 self._differ.showdiff(self.ui, ctx, diffopts, stat=True)
400 400 self.ui.write((',\n "diffstat": "%s"')
401 401 % j(self.ui.popbuffer()))
402 402 if diff:
403 403 self.ui.pushbuffer()
404 404 self._differ.showdiff(self.ui, ctx, diffopts, stat=False)
405 405 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
406 406
407 407 self.ui.write("\n }")
408 408
409 409 class changesettemplater(changesetprinter):
410 410 '''format changeset information.
411 411
412 412 Note: there are a variety of convenience functions to build a
413 413 changesettemplater for common cases. See functions such as:
414 414 maketemplater, changesetdisplayer, buildcommittemplate, or other
415 415 functions that use changesest_templater.
416 416 '''
417 417
418 418 # Arguments before "buffered" used to be positional. Consider not
419 419 # adding/removing arguments before "buffered" to not break callers.
420 420 def __init__(self, ui, repo, tmplspec, differ=None, diffopts=None,
421 421 buffered=False):
422 422 changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
423 423 # tres is shared with _graphnodeformatter()
424 424 self._tresources = tres = formatter.templateresources(ui, repo)
425 425 self.t = formatter.loadtemplater(ui, tmplspec,
426 426 defaults=templatekw.keywords,
427 427 resources=tres,
428 428 cache=templatekw.defaulttempl)
429 429 self._counter = itertools.count()
430 430
431 431 self._tref = tmplspec.ref
432 432 self._parts = {'header': '', 'footer': '',
433 433 tmplspec.ref: tmplspec.ref,
434 434 'docheader': '', 'docfooter': '',
435 435 'separator': ''}
436 436 if tmplspec.mapfile:
437 437 # find correct templates for current mode, for backward
438 438 # compatibility with 'log -v/-q/--debug' using a mapfile
439 439 tmplmodes = [
440 440 (True, ''),
441 441 (self.ui.verbose, '_verbose'),
442 442 (self.ui.quiet, '_quiet'),
443 443 (self.ui.debugflag, '_debug'),
444 444 ]
445 445 for mode, postfix in tmplmodes:
446 446 for t in self._parts:
447 447 cur = t + postfix
448 448 if mode and cur in self.t:
449 449 self._parts[t] = cur
450 450 else:
451 451 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
452 452 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
453 453 self._parts.update(m)
454 454
455 455 if self._parts['docheader']:
456 456 self.ui.write(self.t.render(self._parts['docheader'], {}))
457 457
458 458 def close(self):
459 459 if self._parts['docfooter']:
460 460 if not self.footer:
461 461 self.footer = ""
462 462 self.footer += self.t.render(self._parts['docfooter'], {})
463 463 return super(changesettemplater, self).close()
464 464
465 465 def _show(self, ctx, copies, props):
466 466 '''show a single changeset or file revision'''
467 467 props = props.copy()
468 468 props['ctx'] = ctx
469 469 props['index'] = index = next(self._counter)
470 470 props['revcache'] = {'copies': copies}
471 471
472 472 # write separator, which wouldn't work well with the header part below
473 473 # since there's inherently a conflict between header (across items) and
474 474 # separator (per item)
475 475 if self._parts['separator'] and index > 0:
476 476 self.ui.write(self.t.render(self._parts['separator'], {}))
477 477
478 478 # write header
479 479 if self._parts['header']:
480 480 h = self.t.render(self._parts['header'], props)
481 481 if self.buffered:
482 482 self.header[ctx.rev()] = h
483 483 else:
484 484 if self.lastheader != h:
485 485 self.lastheader = h
486 486 self.ui.write(h)
487 487
488 488 # write changeset metadata, then patch if requested
489 489 key = self._parts[self._tref]
490 490 self.ui.write(self.t.render(key, props))
491 491 self._showpatch(ctx)
492 492
493 493 if self._parts['footer']:
494 494 if not self.footer:
495 495 self.footer = self.t.render(self._parts['footer'], props)
496 496
497 497 def templatespec(tmpl, mapfile):
498 498 if mapfile:
499 499 return formatter.templatespec('changeset', tmpl, mapfile)
500 500 else:
501 501 return formatter.templatespec('', tmpl, None)
502 502
503 503 def _lookuptemplate(ui, tmpl, style):
504 504 """Find the template matching the given template spec or style
505 505
506 506 See formatter.lookuptemplate() for details.
507 507 """
508 508
509 509 # ui settings
510 510 if not tmpl and not style: # template are stronger than style
511 511 tmpl = ui.config('ui', 'logtemplate')
512 512 if tmpl:
513 513 return templatespec(templater.unquotestring(tmpl), None)
514 514 else:
515 515 style = util.expandpath(ui.config('ui', 'style'))
516 516
517 517 if not tmpl and style:
518 518 mapfile = style
519 519 if not os.path.split(mapfile)[0]:
520 520 mapname = (templater.templatepath('map-cmdline.' + mapfile)
521 521 or templater.templatepath(mapfile))
522 522 if mapname:
523 523 mapfile = mapname
524 524 return templatespec(None, mapfile)
525 525
526 526 if not tmpl:
527 527 return templatespec(None, None)
528 528
529 529 return formatter.lookuptemplate(ui, 'changeset', tmpl)
530 530
531 531 def maketemplater(ui, repo, tmpl, buffered=False):
532 532 """Create a changesettemplater from a literal template 'tmpl'
533 533 byte-string."""
534 534 spec = templatespec(tmpl, None)
535 535 return changesettemplater(ui, repo, spec, buffered=buffered)
536 536
537 537 def changesetdisplayer(ui, repo, opts, differ=None, buffered=False):
538 538 """show one changeset using template or regular display.
539 539
540 540 Display format will be the first non-empty hit of:
541 541 1. option 'template'
542 542 2. option 'style'
543 543 3. [ui] setting 'logtemplate'
544 544 4. [ui] setting 'style'
545 545 If all of these values are either the unset or the empty string,
546 546 regular display via changesetprinter() is done.
547 547 """
548 548 postargs = (differ, opts, buffered)
549 549 if opts.get('template') == 'json':
550 550 return jsonchangeset(ui, repo, *postargs)
551 551
552 552 spec = _lookuptemplate(ui, opts.get('template'), opts.get('style'))
553 553
554 554 if not spec.ref and not spec.tmpl and not spec.mapfile:
555 555 return changesetprinter(ui, repo, *postargs)
556 556
557 557 return changesettemplater(ui, repo, spec, *postargs)
558 558
559 559 def _makematcher(repo, revs, pats, opts):
560 560 """Build matcher and expanded patterns from log options
561 561
562 562 If --follow, revs are the revisions to follow from.
563 563
564 564 Returns (match, pats, slowpath) where
565 565 - match: a matcher built from the given pats and -I/-X opts
566 566 - pats: patterns used (globs are expanded on Windows)
567 567 - slowpath: True if patterns aren't as simple as scanning filelogs
568 568 """
569 569 # pats/include/exclude are passed to match.match() directly in
570 570 # _matchfiles() revset but walkchangerevs() builds its matcher with
571 571 # scmutil.match(). The difference is input pats are globbed on
572 572 # platforms without shell expansion (windows).
573 573 wctx = repo[None]
574 574 match, pats = scmutil.matchandpats(wctx, pats, opts)
575 575 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
576 576 if not slowpath:
577 577 follow = opts.get('follow') or opts.get('follow_first')
578 578 startctxs = []
579 579 if follow and opts.get('rev'):
580 580 startctxs = [repo[r] for r in revs]
581 581 for f in match.files():
582 582 if follow and startctxs:
583 583 # No idea if the path was a directory at that revision, so
584 584 # take the slow path.
585 585 if any(f not in c for c in startctxs):
586 586 slowpath = True
587 587 continue
588 588 elif follow and f not in wctx:
589 589 # If the file exists, it may be a directory, so let it
590 590 # take the slow path.
591 591 if os.path.exists(repo.wjoin(f)):
592 592 slowpath = True
593 593 continue
594 594 else:
595 595 raise error.Abort(_('cannot follow file not in parent '
596 596 'revision: "%s"') % f)
597 597 filelog = repo.file(f)
598 598 if not filelog:
599 599 # A zero count may be a directory or deleted file, so
600 600 # try to find matching entries on the slow path.
601 601 if follow:
602 602 raise error.Abort(
603 603 _('cannot follow nonexistent file: "%s"') % f)
604 604 slowpath = True
605 605
606 606 # We decided to fall back to the slowpath because at least one
607 607 # of the paths was not a file. Check to see if at least one of them
608 608 # existed in history - in that case, we'll continue down the
609 609 # slowpath; otherwise, we can turn off the slowpath
610 610 if slowpath:
611 611 for path in match.files():
612 612 if path == '.' or path in repo.store:
613 613 break
614 614 else:
615 615 slowpath = False
616 616
617 617 return match, pats, slowpath
618 618
619 619 def _fileancestors(repo, revs, match, followfirst):
620 620 fctxs = []
621 621 for r in revs:
622 622 ctx = repo[r]
623 623 fctxs.extend(ctx[f].introfilectx() for f in ctx.walk(match))
624 624
625 625 # When displaying a revision with --patch --follow FILE, we have
626 626 # to know which file of the revision must be diffed. With
627 627 # --follow, we want the names of the ancestors of FILE in the
628 628 # revision, stored in "fcache". "fcache" is populated as a side effect
629 629 # of the graph traversal.
630 630 fcache = {}
631 631 def filematcher(ctx):
632 632 return scmutil.matchfiles(repo, fcache.get(ctx.rev(), []))
633 633
634 634 def revgen():
635 635 for rev, cs in dagop.filectxancestors(fctxs, followfirst=followfirst):
636 636 fcache[rev] = [c.path() for c in cs]
637 637 yield rev
638 638 return smartset.generatorset(revgen(), iterasc=False), filematcher
639 639
640 640 def _makenofollowfilematcher(repo, pats, opts):
641 641 '''hook for extensions to override the filematcher for non-follow cases'''
642 642 return None
643 643
644 644 _opt2logrevset = {
645 645 'no_merges': ('not merge()', None),
646 646 'only_merges': ('merge()', None),
647 647 '_matchfiles': (None, '_matchfiles(%ps)'),
648 648 'date': ('date(%s)', None),
649 649 'branch': ('branch(%s)', '%lr'),
650 650 '_patslog': ('filelog(%s)', '%lr'),
651 651 'keyword': ('keyword(%s)', '%lr'),
652 652 'prune': ('ancestors(%s)', 'not %lr'),
653 653 'user': ('user(%s)', '%lr'),
654 654 }
655 655
656 656 def _makerevset(repo, match, pats, slowpath, opts):
657 657 """Return a revset string built from log options and file patterns"""
658 658 opts = dict(opts)
659 659 # follow or not follow?
660 660 follow = opts.get('follow') or opts.get('follow_first')
661 661
662 662 # branch and only_branch are really aliases and must be handled at
663 663 # the same time
664 664 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
665 665 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
666 666
667 667 if slowpath:
668 668 # See walkchangerevs() slow path.
669 669 #
670 670 # pats/include/exclude cannot be represented as separate
671 671 # revset expressions as their filtering logic applies at file
672 672 # level. For instance "-I a -X b" matches a revision touching
673 673 # "a" and "b" while "file(a) and not file(b)" does
674 674 # not. Besides, filesets are evaluated against the working
675 675 # directory.
676 676 matchargs = ['r:', 'd:relpath']
677 677 for p in pats:
678 678 matchargs.append('p:' + p)
679 679 for p in opts.get('include', []):
680 680 matchargs.append('i:' + p)
681 681 for p in opts.get('exclude', []):
682 682 matchargs.append('x:' + p)
683 683 opts['_matchfiles'] = matchargs
684 684 elif not follow:
685 685 opts['_patslog'] = list(pats)
686 686
687 687 expr = []
688 688 for op, val in sorted(opts.iteritems()):
689 689 if not val:
690 690 continue
691 691 if op not in _opt2logrevset:
692 692 continue
693 693 revop, listop = _opt2logrevset[op]
694 694 if revop and '%' not in revop:
695 695 expr.append(revop)
696 696 elif not listop:
697 697 expr.append(revsetlang.formatspec(revop, val))
698 698 else:
699 699 if revop:
700 700 val = [revsetlang.formatspec(revop, v) for v in val]
701 701 expr.append(revsetlang.formatspec(listop, val))
702 702
703 703 if expr:
704 704 expr = '(' + ' and '.join(expr) + ')'
705 705 else:
706 706 expr = None
707 707 return expr
708 708
709 709 def _initialrevs(repo, opts):
710 710 """Return the initial set of revisions to be filtered or followed"""
711 711 follow = opts.get('follow') or opts.get('follow_first')
712 712 if opts.get('rev'):
713 713 revs = scmutil.revrange(repo, opts['rev'])
714 714 elif follow and repo.dirstate.p1() == nullid:
715 715 revs = smartset.baseset()
716 716 elif follow:
717 717 revs = repo.revs('.')
718 718 else:
719 719 revs = smartset.spanset(repo)
720 720 revs.reverse()
721 721 return revs
722 722
723 723 def getrevs(repo, pats, opts):
724 724 """Return (revs, differ) where revs is a smartset
725 725
726 726 differ is a changesetdiffer with pre-configured file matcher.
727 727 """
728 728 follow = opts.get('follow') or opts.get('follow_first')
729 729 followfirst = opts.get('follow_first')
730 730 limit = getlimit(opts)
731 731 revs = _initialrevs(repo, opts)
732 732 if not revs:
733 733 return smartset.baseset(), None
734 734 match, pats, slowpath = _makematcher(repo, revs, pats, opts)
735 735 filematcher = None
736 736 if follow:
737 737 if slowpath or match.always():
738 738 revs = dagop.revancestors(repo, revs, followfirst=followfirst)
739 739 else:
740 740 revs, filematcher = _fileancestors(repo, revs, match, followfirst)
741 741 revs.reverse()
742 742 if filematcher is None:
743 743 filematcher = _makenofollowfilematcher(repo, pats, opts)
744 744 if filematcher is None:
745 745 def filematcher(ctx):
746 746 return match
747 747
748 748 expr = _makerevset(repo, match, pats, slowpath, opts)
749 749 if opts.get('graph') and opts.get('rev'):
750 750 # User-specified revs might be unsorted, but don't sort before
751 751 # _makerevset because it might depend on the order of revs
752 752 if not (revs.isdescending() or revs.istopo()):
753 753 revs.sort(reverse=True)
754 754 if expr:
755 755 matcher = revset.match(None, expr)
756 756 revs = matcher(repo, revs)
757 757 if limit is not None:
758 758 revs = revs.slice(0, limit)
759 759
760 760 differ = changesetdiffer()
761 761 differ._makefilematcher = filematcher
762 762 return revs, differ
763 763
764 764 def _parselinerangeopt(repo, opts):
765 765 """Parse --line-range log option and return a list of tuples (filename,
766 766 (fromline, toline)).
767 767 """
768 768 linerangebyfname = []
769 769 for pat in opts.get('line_range', []):
770 770 try:
771 771 pat, linerange = pat.rsplit(',', 1)
772 772 except ValueError:
773 773 raise error.Abort(_('malformatted line-range pattern %s') % pat)
774 774 try:
775 775 fromline, toline = map(int, linerange.split(':'))
776 776 except ValueError:
777 777 raise error.Abort(_("invalid line range for %s") % pat)
778 778 msg = _("line range pattern '%s' must match exactly one file") % pat
779 779 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
780 780 linerangebyfname.append(
781 781 (fname, util.processlinerange(fromline, toline)))
782 782 return linerangebyfname
783 783
784 784 def getlinerangerevs(repo, userrevs, opts):
785 785 """Return (revs, differ).
786 786
787 787 "revs" are revisions obtained by processing "line-range" log options and
788 788 walking block ancestors of each specified file/line-range.
789 789
790 790 "differ" is a changesetdiffer with pre-configured file matcher and hunks
791 791 filter.
792 792 """
793 793 wctx = repo[None]
794 794
795 795 # Two-levels map of "rev -> file ctx -> [line range]".
796 796 linerangesbyrev = {}
797 797 for fname, (fromline, toline) in _parselinerangeopt(repo, opts):
798 798 if fname not in wctx:
799 799 raise error.Abort(_('cannot follow file not in parent '
800 800 'revision: "%s"') % fname)
801 801 fctx = wctx.filectx(fname)
802 802 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
803 803 rev = fctx.introrev()
804 804 if rev not in userrevs:
805 805 continue
806 806 linerangesbyrev.setdefault(
807 807 rev, {}).setdefault(
808 808 fctx.path(), []).append(linerange)
809 809
810 810 def nofilterhunksfn(fctx, hunks):
811 811 return hunks
812 812
813 813 def hunksfilter(ctx):
814 814 fctxlineranges = linerangesbyrev.get(ctx.rev())
815 815 if fctxlineranges is None:
816 816 return nofilterhunksfn
817 817
818 818 def filterfn(fctx, hunks):
819 819 lineranges = fctxlineranges.get(fctx.path())
820 820 if lineranges is not None:
821 821 for hr, lines in hunks:
822 822 if hr is None: # binary
823 823 yield hr, lines
824 824 continue
825 825 if any(mdiff.hunkinrange(hr[2:], lr)
826 826 for lr in lineranges):
827 827 yield hr, lines
828 828 else:
829 829 for hunk in hunks:
830 830 yield hunk
831 831
832 832 return filterfn
833 833
834 834 def filematcher(ctx):
835 835 files = list(linerangesbyrev.get(ctx.rev(), []))
836 836 return scmutil.matchfiles(repo, files)
837 837
838 838 revs = sorted(linerangesbyrev, reverse=True)
839 839
840 840 differ = changesetdiffer()
841 841 differ._makefilematcher = filematcher
842 842 differ._makehunksfilter = hunksfilter
843 843 return revs, differ
844 844
845 845 def _graphnodeformatter(ui, displayer):
846 846 spec = ui.config('ui', 'graphnodetemplate')
847 847 if not spec:
848 848 return templatekw.getgraphnode # fast path for "{graphnode}"
849 849
850 850 spec = templater.unquotestring(spec)
851 851 if isinstance(displayer, changesettemplater):
852 852 # reuse cache of slow templates
853 853 tres = displayer._tresources
854 854 else:
855 855 tres = formatter.templateresources(ui)
856 856 templ = formatter.maketemplater(ui, spec, defaults=templatekw.keywords,
857 857 resources=tres)
858 858 def formatnode(repo, ctx):
859 props = {'ctx': ctx, 'repo': repo, 'revcache': {}}
859 props = {'ctx': ctx, 'repo': repo}
860 860 return templ.renderdefault(props)
861 861 return formatnode
862 862
863 863 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None, props=None):
864 864 props = props or {}
865 865 formatnode = _graphnodeformatter(ui, displayer)
866 866 state = graphmod.asciistate()
867 867 styles = state['styles']
868 868
869 869 # only set graph styling if HGPLAIN is not set.
870 870 if ui.plain('graph'):
871 871 # set all edge styles to |, the default pre-3.8 behaviour
872 872 styles.update(dict.fromkeys(styles, '|'))
873 873 else:
874 874 edgetypes = {
875 875 'parent': graphmod.PARENT,
876 876 'grandparent': graphmod.GRANDPARENT,
877 877 'missing': graphmod.MISSINGPARENT
878 878 }
879 879 for name, key in edgetypes.items():
880 880 # experimental config: experimental.graphstyle.*
881 881 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
882 882 styles[key])
883 883 if not styles[key]:
884 884 styles[key] = None
885 885
886 886 # experimental config: experimental.graphshorten
887 887 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
888 888
889 889 for rev, type, ctx, parents in dag:
890 890 char = formatnode(repo, ctx)
891 891 copies = None
892 892 if getrenamed and ctx.rev():
893 893 copies = []
894 894 for fn in ctx.files():
895 895 rename = getrenamed(fn, ctx.rev())
896 896 if rename:
897 897 copies.append((fn, rename[0]))
898 898 edges = edgefn(type, char, state, rev, parents)
899 899 firstedge = next(edges)
900 900 width = firstedge[2]
901 901 displayer.show(ctx, copies=copies,
902 902 graphwidth=width, **pycompat.strkwargs(props))
903 903 lines = displayer.hunk.pop(rev).split('\n')
904 904 if not lines[-1]:
905 905 del lines[-1]
906 906 displayer.flush(ctx)
907 907 for type, char, width, coldata in itertools.chain([firstedge], edges):
908 908 graphmod.ascii(ui, state, type, char, lines, coldata)
909 909 lines = []
910 910 displayer.close()
911 911
912 912 def displaygraphrevs(ui, repo, revs, displayer, getrenamed):
913 913 revdag = graphmod.dagwalker(repo, revs)
914 914 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed)
915 915
916 916 def displayrevs(ui, repo, revs, displayer, getrenamed):
917 917 for rev in revs:
918 918 ctx = repo[rev]
919 919 copies = None
920 920 if getrenamed is not None and rev:
921 921 copies = []
922 922 for fn in ctx.files():
923 923 rename = getrenamed(fn, rev)
924 924 if rename:
925 925 copies.append((fn, rename[0]))
926 926 displayer.show(ctx, copies=copies)
927 927 displayer.flush(ctx)
928 928 displayer.close()
929 929
930 930 def checkunsupportedgraphflags(pats, opts):
931 931 for op in ["newest_first"]:
932 932 if op in opts and opts[op]:
933 933 raise error.Abort(_("-G/--graph option is incompatible with --%s")
934 934 % op.replace("_", "-"))
935 935
936 936 def graphrevs(repo, nodes, opts):
937 937 limit = getlimit(opts)
938 938 nodes.reverse()
939 939 if limit is not None:
940 940 nodes = nodes[:limit]
941 941 return graphmod.nodes(repo, nodes)
@@ -1,804 +1,804
1 1 # templatekw.py - common changeset template keywords
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 from .i18n import _
11 11 from .node import (
12 12 hex,
13 13 nullid,
14 14 )
15 15
16 16 from . import (
17 17 encoding,
18 18 error,
19 19 hbisect,
20 20 i18n,
21 21 obsutil,
22 22 patch,
23 23 pycompat,
24 24 registrar,
25 25 scmutil,
26 26 templateutil,
27 27 util,
28 28 )
29 29 from .utils import (
30 30 stringutil,
31 31 )
32 32
33 33 _hybrid = templateutil.hybrid
34 34 _mappable = templateutil.mappable
35 35 hybriddict = templateutil.hybriddict
36 36 hybridlist = templateutil.hybridlist
37 37 compatdict = templateutil.compatdict
38 38 compatlist = templateutil.compatlist
39 39 _showcompatlist = templateutil._showcompatlist
40 40
41 41 def _showlist(name, values, templ, mapping, plural=None, separator=' '):
42 42 ui = mapping.get('ui')
43 43 if ui:
44 44 ui.deprecwarn("templatekw._showlist() is deprecated, use "
45 45 "templateutil._showcompatlist()", '4.6')
46 46 context = templ # this is actually a template context, not a templater
47 47 return _showcompatlist(context, mapping, name, values, plural, separator)
48 48
49 49 def showdict(name, data, mapping, plural=None, key='key', value='value',
50 50 fmt=None, separator=' '):
51 51 ui = mapping.get('ui')
52 52 if ui:
53 53 ui.deprecwarn("templatekw.showdict() is deprecated, use "
54 54 "templateutil.compatdict()", '4.6')
55 55 c = [{key: k, value: v} for k, v in data.iteritems()]
56 56 f = _showlist(name, c, mapping['templ'], mapping, plural, separator)
57 57 return hybriddict(data, key=key, value=value, fmt=fmt, gen=f)
58 58
59 59 def showlist(name, values, mapping, plural=None, element=None, separator=' '):
60 60 ui = mapping.get('ui')
61 61 if ui:
62 62 ui.deprecwarn("templatekw.showlist() is deprecated, use "
63 63 "templateutil.compatlist()", '4.6')
64 64 if not element:
65 65 element = name
66 66 f = _showlist(name, values, mapping['templ'], mapping, plural, separator)
67 67 return hybridlist(values, name=element, gen=f)
68 68
69 69 def getlatesttags(context, mapping, pattern=None):
70 70 '''return date, distance and name for the latest tag of rev'''
71 71 repo = context.resource(mapping, 'repo')
72 72 ctx = context.resource(mapping, 'ctx')
73 73 cache = context.resource(mapping, 'cache')
74 74
75 75 cachename = 'latesttags'
76 76 if pattern is not None:
77 77 cachename += '-' + pattern
78 78 match = stringutil.stringmatcher(pattern)[2]
79 79 else:
80 80 match = util.always
81 81
82 82 if cachename not in cache:
83 83 # Cache mapping from rev to a tuple with tag date, tag
84 84 # distance and tag name
85 85 cache[cachename] = {-1: (0, 0, ['null'])}
86 86 latesttags = cache[cachename]
87 87
88 88 rev = ctx.rev()
89 89 todo = [rev]
90 90 while todo:
91 91 rev = todo.pop()
92 92 if rev in latesttags:
93 93 continue
94 94 ctx = repo[rev]
95 95 tags = [t for t in ctx.tags()
96 96 if (repo.tagtype(t) and repo.tagtype(t) != 'local'
97 97 and match(t))]
98 98 if tags:
99 99 latesttags[rev] = ctx.date()[0], 0, [t for t in sorted(tags)]
100 100 continue
101 101 try:
102 102 ptags = [latesttags[p.rev()] for p in ctx.parents()]
103 103 if len(ptags) > 1:
104 104 if ptags[0][2] == ptags[1][2]:
105 105 # The tuples are laid out so the right one can be found by
106 106 # comparison in this case.
107 107 pdate, pdist, ptag = max(ptags)
108 108 else:
109 109 def key(x):
110 110 changessincetag = len(repo.revs('only(%d, %s)',
111 111 ctx.rev(), x[2][0]))
112 112 # Smallest number of changes since tag wins. Date is
113 113 # used as tiebreaker.
114 114 return [-changessincetag, x[0]]
115 115 pdate, pdist, ptag = max(ptags, key=key)
116 116 else:
117 117 pdate, pdist, ptag = ptags[0]
118 118 except KeyError:
119 119 # Cache miss - recurse
120 120 todo.append(rev)
121 121 todo.extend(p.rev() for p in ctx.parents())
122 122 continue
123 123 latesttags[rev] = pdate, pdist + 1, ptag
124 124 return latesttags[rev]
125 125
126 126 def getrenamedfn(repo, endrev=None):
127 127 rcache = {}
128 128 if endrev is None:
129 129 endrev = len(repo)
130 130
131 131 def getrenamed(fn, rev):
132 132 '''looks up all renames for a file (up to endrev) the first
133 133 time the file is given. It indexes on the changerev and only
134 134 parses the manifest if linkrev != changerev.
135 135 Returns rename info for fn at changerev rev.'''
136 136 if fn not in rcache:
137 137 rcache[fn] = {}
138 138 fl = repo.file(fn)
139 139 for i in fl:
140 140 lr = fl.linkrev(i)
141 141 renamed = fl.renamed(fl.node(i))
142 142 rcache[fn][lr] = renamed
143 143 if lr >= endrev:
144 144 break
145 145 if rev in rcache[fn]:
146 146 return rcache[fn][rev]
147 147
148 148 # If linkrev != rev (i.e. rev not found in rcache) fallback to
149 149 # filectx logic.
150 150 try:
151 151 return repo[rev][fn].renamed()
152 152 except error.LookupError:
153 153 return None
154 154
155 155 return getrenamed
156 156
157 157 def getlogcolumns():
158 158 """Return a dict of log column labels"""
159 159 _ = pycompat.identity # temporarily disable gettext
160 160 # i18n: column positioning for "hg log"
161 161 columns = _('bookmark: %s\n'
162 162 'branch: %s\n'
163 163 'changeset: %s\n'
164 164 'copies: %s\n'
165 165 'date: %s\n'
166 166 'extra: %s=%s\n'
167 167 'files+: %s\n'
168 168 'files-: %s\n'
169 169 'files: %s\n'
170 170 'instability: %s\n'
171 171 'manifest: %s\n'
172 172 'obsolete: %s\n'
173 173 'parent: %s\n'
174 174 'phase: %s\n'
175 175 'summary: %s\n'
176 176 'tag: %s\n'
177 177 'user: %s\n')
178 178 return dict(zip([s.split(':', 1)[0] for s in columns.splitlines()],
179 179 i18n._(columns).splitlines(True)))
180 180
181 181 # default templates internally used for rendering of lists
182 182 defaulttempl = {
183 183 'parent': '{rev}:{node|formatnode} ',
184 184 'manifest': '{rev}:{node|formatnode}',
185 185 'file_copy': '{name} ({source})',
186 186 'envvar': '{key}={value}',
187 187 'extra': '{key}={value|stringescape}'
188 188 }
189 189 # filecopy is preserved for compatibility reasons
190 190 defaulttempl['filecopy'] = defaulttempl['file_copy']
191 191
192 192 # keywords are callables (see registrar.templatekeyword for details)
193 193 keywords = {}
194 194 templatekeyword = registrar.templatekeyword(keywords)
195 195
196 196 @templatekeyword('author', requires={'ctx'})
197 197 def showauthor(context, mapping):
198 198 """String. The unmodified author of the changeset."""
199 199 ctx = context.resource(mapping, 'ctx')
200 200 return ctx.user()
201 201
202 202 @templatekeyword('bisect', requires={'repo', 'ctx'})
203 203 def showbisect(context, mapping):
204 204 """String. The changeset bisection status."""
205 205 repo = context.resource(mapping, 'repo')
206 206 ctx = context.resource(mapping, 'ctx')
207 207 return hbisect.label(repo, ctx.node())
208 208
209 209 @templatekeyword('branch', requires={'ctx'})
210 210 def showbranch(context, mapping):
211 211 """String. The name of the branch on which the changeset was
212 212 committed.
213 213 """
214 214 ctx = context.resource(mapping, 'ctx')
215 215 return ctx.branch()
216 216
217 217 @templatekeyword('branches', requires={'ctx'})
218 218 def showbranches(context, mapping):
219 219 """List of strings. The name of the branch on which the
220 220 changeset was committed. Will be empty if the branch name was
221 221 default. (DEPRECATED)
222 222 """
223 223 ctx = context.resource(mapping, 'ctx')
224 224 branch = ctx.branch()
225 225 if branch != 'default':
226 226 return compatlist(context, mapping, 'branch', [branch],
227 227 plural='branches')
228 228 return compatlist(context, mapping, 'branch', [], plural='branches')
229 229
230 230 @templatekeyword('bookmarks', requires={'repo', 'ctx'})
231 231 def showbookmarks(context, mapping):
232 232 """List of strings. Any bookmarks associated with the
233 233 changeset. Also sets 'active', the name of the active bookmark.
234 234 """
235 235 repo = context.resource(mapping, 'repo')
236 236 ctx = context.resource(mapping, 'ctx')
237 237 bookmarks = ctx.bookmarks()
238 238 active = repo._activebookmark
239 239 makemap = lambda v: {'bookmark': v, 'active': active, 'current': active}
240 240 f = _showcompatlist(context, mapping, 'bookmark', bookmarks)
241 241 return _hybrid(f, bookmarks, makemap, pycompat.identity)
242 242
243 243 @templatekeyword('children', requires={'ctx'})
244 244 def showchildren(context, mapping):
245 245 """List of strings. The children of the changeset."""
246 246 ctx = context.resource(mapping, 'ctx')
247 247 childrevs = ['%d:%s' % (cctx.rev(), cctx) for cctx in ctx.children()]
248 248 return compatlist(context, mapping, 'children', childrevs, element='child')
249 249
250 250 # Deprecated, but kept alive for help generation a purpose.
251 251 @templatekeyword('currentbookmark', requires={'repo', 'ctx'})
252 252 def showcurrentbookmark(context, mapping):
253 253 """String. The active bookmark, if it is associated with the changeset.
254 254 (DEPRECATED)"""
255 255 return showactivebookmark(context, mapping)
256 256
257 257 @templatekeyword('activebookmark', requires={'repo', 'ctx'})
258 258 def showactivebookmark(context, mapping):
259 259 """String. The active bookmark, if it is associated with the changeset."""
260 260 repo = context.resource(mapping, 'repo')
261 261 ctx = context.resource(mapping, 'ctx')
262 262 active = repo._activebookmark
263 263 if active and active in ctx.bookmarks():
264 264 return active
265 265 return ''
266 266
267 267 @templatekeyword('date', requires={'ctx'})
268 268 def showdate(context, mapping):
269 269 """Date information. The date when the changeset was committed."""
270 270 ctx = context.resource(mapping, 'ctx')
271 271 return ctx.date()
272 272
273 273 @templatekeyword('desc', requires={'ctx'})
274 274 def showdescription(context, mapping):
275 275 """String. The text of the changeset description."""
276 276 ctx = context.resource(mapping, 'ctx')
277 277 s = ctx.description()
278 278 if isinstance(s, encoding.localstr):
279 279 # try hard to preserve utf-8 bytes
280 280 return encoding.tolocal(encoding.fromlocal(s).strip())
281 281 else:
282 282 return s.strip()
283 283
284 284 @templatekeyword('diffstat', requires={'ctx'})
285 285 def showdiffstat(context, mapping):
286 286 """String. Statistics of changes with the following format:
287 287 "modified files: +added/-removed lines"
288 288 """
289 289 ctx = context.resource(mapping, 'ctx')
290 290 stats = patch.diffstatdata(util.iterlines(ctx.diff(noprefix=False)))
291 291 maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats)
292 292 return '%d: +%d/-%d' % (len(stats), adds, removes)
293 293
294 294 @templatekeyword('envvars', requires={'ui'})
295 295 def showenvvars(context, mapping):
296 296 """A dictionary of environment variables. (EXPERIMENTAL)"""
297 297 ui = context.resource(mapping, 'ui')
298 298 env = ui.exportableenviron()
299 299 env = util.sortdict((k, env[k]) for k in sorted(env))
300 300 return compatdict(context, mapping, 'envvar', env, plural='envvars')
301 301
302 302 @templatekeyword('extras', requires={'ctx'})
303 303 def showextras(context, mapping):
304 304 """List of dicts with key, value entries of the 'extras'
305 305 field of this changeset."""
306 306 ctx = context.resource(mapping, 'ctx')
307 307 extras = ctx.extra()
308 308 extras = util.sortdict((k, extras[k]) for k in sorted(extras))
309 309 makemap = lambda k: {'key': k, 'value': extras[k]}
310 310 c = [makemap(k) for k in extras]
311 311 f = _showcompatlist(context, mapping, 'extra', c, plural='extras')
312 312 return _hybrid(f, extras, makemap,
313 313 lambda k: '%s=%s' % (k, stringutil.escapestr(extras[k])))
314 314
315 315 def _showfilesbystat(context, mapping, name, index):
316 316 repo = context.resource(mapping, 'repo')
317 317 ctx = context.resource(mapping, 'ctx')
318 318 revcache = context.resource(mapping, 'revcache')
319 319 if 'files' not in revcache:
320 320 revcache['files'] = repo.status(ctx.p1(), ctx)[:3]
321 321 files = revcache['files'][index]
322 322 return compatlist(context, mapping, name, files, element='file')
323 323
324 324 @templatekeyword('file_adds', requires={'repo', 'ctx', 'revcache'})
325 325 def showfileadds(context, mapping):
326 326 """List of strings. Files added by this changeset."""
327 327 return _showfilesbystat(context, mapping, 'file_add', 1)
328 328
329 329 @templatekeyword('file_copies',
330 330 requires={'repo', 'ctx', 'cache', 'revcache'})
331 331 def showfilecopies(context, mapping):
332 332 """List of strings. Files copied in this changeset with
333 333 their sources.
334 334 """
335 335 repo = context.resource(mapping, 'repo')
336 336 ctx = context.resource(mapping, 'ctx')
337 337 cache = context.resource(mapping, 'cache')
338 338 copies = context.resource(mapping, 'revcache').get('copies')
339 339 if copies is None:
340 340 if 'getrenamed' not in cache:
341 341 cache['getrenamed'] = getrenamedfn(repo)
342 342 copies = []
343 343 getrenamed = cache['getrenamed']
344 344 for fn in ctx.files():
345 345 rename = getrenamed(fn, ctx.rev())
346 346 if rename:
347 347 copies.append((fn, rename[0]))
348 348
349 349 copies = util.sortdict(copies)
350 350 return compatdict(context, mapping, 'file_copy', copies,
351 351 key='name', value='source', fmt='%s (%s)',
352 352 plural='file_copies')
353 353
354 354 # showfilecopiesswitch() displays file copies only if copy records are
355 355 # provided before calling the templater, usually with a --copies
356 356 # command line switch.
357 357 @templatekeyword('file_copies_switch', requires={'revcache'})
358 358 def showfilecopiesswitch(context, mapping):
359 359 """List of strings. Like "file_copies" but displayed
360 360 only if the --copied switch is set.
361 361 """
362 362 copies = context.resource(mapping, 'revcache').get('copies') or []
363 363 copies = util.sortdict(copies)
364 364 return compatdict(context, mapping, 'file_copy', copies,
365 365 key='name', value='source', fmt='%s (%s)',
366 366 plural='file_copies')
367 367
368 368 @templatekeyword('file_dels', requires={'repo', 'ctx', 'revcache'})
369 369 def showfiledels(context, mapping):
370 370 """List of strings. Files removed by this changeset."""
371 371 return _showfilesbystat(context, mapping, 'file_del', 2)
372 372
373 373 @templatekeyword('file_mods', requires={'repo', 'ctx', 'revcache'})
374 374 def showfilemods(context, mapping):
375 375 """List of strings. Files modified by this changeset."""
376 376 return _showfilesbystat(context, mapping, 'file_mod', 0)
377 377
378 378 @templatekeyword('files', requires={'ctx'})
379 379 def showfiles(context, mapping):
380 380 """List of strings. All files modified, added, or removed by this
381 381 changeset.
382 382 """
383 383 ctx = context.resource(mapping, 'ctx')
384 384 return compatlist(context, mapping, 'file', ctx.files())
385 385
386 386 @templatekeyword('graphnode', requires={'repo', 'ctx'})
387 387 def showgraphnode(context, mapping):
388 388 """String. The character representing the changeset node in an ASCII
389 389 revision graph."""
390 390 repo = context.resource(mapping, 'repo')
391 391 ctx = context.resource(mapping, 'ctx')
392 392 return getgraphnode(repo, ctx)
393 393
394 394 def getgraphnode(repo, ctx):
395 395 wpnodes = repo.dirstate.parents()
396 396 if wpnodes[1] == nullid:
397 397 wpnodes = wpnodes[:1]
398 398 if ctx.node() in wpnodes:
399 399 return '@'
400 400 elif ctx.obsolete():
401 401 return 'x'
402 402 elif ctx.isunstable():
403 403 return '*'
404 404 elif ctx.closesbranch():
405 405 return '_'
406 406 else:
407 407 return 'o'
408 408
409 409 @templatekeyword('graphwidth', requires=())
410 410 def showgraphwidth(context, mapping):
411 411 """Integer. The width of the graph drawn by 'log --graph' or zero."""
412 412 # just hosts documentation; should be overridden by template mapping
413 413 return 0
414 414
415 415 @templatekeyword('index', requires=())
416 416 def showindex(context, mapping):
417 417 """Integer. The current iteration of the loop. (0 indexed)"""
418 418 # just hosts documentation; should be overridden by template mapping
419 419 raise error.Abort(_("can't use index in this context"))
420 420
421 421 @templatekeyword('latesttag', requires={'repo', 'ctx', 'cache'})
422 422 def showlatesttag(context, mapping):
423 423 """List of strings. The global tags on the most recent globally
424 424 tagged ancestor of this changeset. If no such tags exist, the list
425 425 consists of the single string "null".
426 426 """
427 427 return showlatesttags(context, mapping, None)
428 428
429 429 def showlatesttags(context, mapping, pattern):
430 430 """helper method for the latesttag keyword and function"""
431 431 latesttags = getlatesttags(context, mapping, pattern)
432 432
433 433 # latesttag[0] is an implementation detail for sorting csets on different
434 434 # branches in a stable manner- it is the date the tagged cset was created,
435 435 # not the date the tag was created. Therefore it isn't made visible here.
436 436 makemap = lambda v: {
437 437 'changes': _showchangessincetag,
438 438 'distance': latesttags[1],
439 439 'latesttag': v, # BC with {latesttag % '{latesttag}'}
440 440 'tag': v
441 441 }
442 442
443 443 tags = latesttags[2]
444 444 f = _showcompatlist(context, mapping, 'latesttag', tags, separator=':')
445 445 return _hybrid(f, tags, makemap, pycompat.identity)
446 446
447 447 @templatekeyword('latesttagdistance', requires={'repo', 'ctx', 'cache'})
448 448 def showlatesttagdistance(context, mapping):
449 449 """Integer. Longest path to the latest tag."""
450 450 return getlatesttags(context, mapping)[1]
451 451
452 452 @templatekeyword('changessincelatesttag', requires={'repo', 'ctx', 'cache'})
453 453 def showchangessincelatesttag(context, mapping):
454 454 """Integer. All ancestors not in the latest tag."""
455 455 tag = getlatesttags(context, mapping)[2][0]
456 456 mapping = context.overlaymap(mapping, {'tag': tag})
457 457 return _showchangessincetag(context, mapping)
458 458
459 459 def _showchangessincetag(context, mapping):
460 460 repo = context.resource(mapping, 'repo')
461 461 ctx = context.resource(mapping, 'ctx')
462 462 offset = 0
463 463 revs = [ctx.rev()]
464 464 tag = context.symbol(mapping, 'tag')
465 465
466 466 # The only() revset doesn't currently support wdir()
467 467 if ctx.rev() is None:
468 468 offset = 1
469 469 revs = [p.rev() for p in ctx.parents()]
470 470
471 471 return len(repo.revs('only(%ld, %s)', revs, tag)) + offset
472 472
473 473 # teach templater latesttags.changes is switched to (context, mapping) API
474 474 _showchangessincetag._requires = {'repo', 'ctx'}
475 475
476 476 @templatekeyword('manifest', requires={'repo', 'ctx'})
477 477 def showmanifest(context, mapping):
478 478 repo = context.resource(mapping, 'repo')
479 479 ctx = context.resource(mapping, 'ctx')
480 480 mnode = ctx.manifestnode()
481 481 if mnode is None:
482 482 # just avoid crash, we might want to use the 'ff...' hash in future
483 483 return
484 484 mrev = repo.manifestlog._revlog.rev(mnode)
485 485 mhex = hex(mnode)
486 486 mapping = context.overlaymap(mapping, {'rev': mrev, 'node': mhex})
487 487 f = context.process('manifest', mapping)
488 488 # TODO: perhaps 'ctx' should be dropped from mapping because manifest
489 489 # rev and node are completely different from changeset's.
490 490 return _mappable(f, None, f, lambda x: {'rev': mrev, 'node': mhex})
491 491
492 492 @templatekeyword('obsfate', requires={'ui', 'repo', 'ctx'})
493 493 def showobsfate(context, mapping):
494 494 # this function returns a list containing pre-formatted obsfate strings.
495 495 #
496 496 # This function will be replaced by templates fragments when we will have
497 497 # the verbosity templatekw available.
498 498 succsandmarkers = showsuccsandmarkers(context, mapping)
499 499
500 500 ui = context.resource(mapping, 'ui')
501 501 values = []
502 502
503 503 for x in succsandmarkers:
504 504 values.append(obsutil.obsfateprinter(x['successors'], x['markers'], ui))
505 505
506 506 return compatlist(context, mapping, "fate", values)
507 507
508 508 def shownames(context, mapping, namespace):
509 509 """helper method to generate a template keyword for a namespace"""
510 510 repo = context.resource(mapping, 'repo')
511 511 ctx = context.resource(mapping, 'ctx')
512 512 ns = repo.names[namespace]
513 513 names = ns.names(repo, ctx.node())
514 514 return compatlist(context, mapping, ns.templatename, names,
515 515 plural=namespace)
516 516
517 517 @templatekeyword('namespaces', requires={'repo', 'ctx'})
518 518 def shownamespaces(context, mapping):
519 519 """Dict of lists. Names attached to this changeset per
520 520 namespace."""
521 521 repo = context.resource(mapping, 'repo')
522 522 ctx = context.resource(mapping, 'ctx')
523 523
524 524 namespaces = util.sortdict()
525 525 def makensmapfn(ns):
526 526 # 'name' for iterating over namespaces, templatename for local reference
527 527 return lambda v: {'name': v, ns.templatename: v}
528 528
529 529 for k, ns in repo.names.iteritems():
530 530 names = ns.names(repo, ctx.node())
531 531 f = _showcompatlist(context, mapping, 'name', names)
532 532 namespaces[k] = _hybrid(f, names, makensmapfn(ns), pycompat.identity)
533 533
534 534 f = _showcompatlist(context, mapping, 'namespace', list(namespaces))
535 535
536 536 def makemap(ns):
537 537 return {
538 538 'namespace': ns,
539 539 'names': namespaces[ns],
540 540 'builtin': repo.names[ns].builtin,
541 541 'colorname': repo.names[ns].colorname,
542 542 }
543 543
544 544 return _hybrid(f, namespaces, makemap, pycompat.identity)
545 545
546 546 @templatekeyword('node', requires={'ctx'})
547 547 def shownode(context, mapping):
548 548 """String. The changeset identification hash, as a 40 hexadecimal
549 549 digit string.
550 550 """
551 551 ctx = context.resource(mapping, 'ctx')
552 552 return ctx.hex()
553 553
554 554 @templatekeyword('obsolete', requires={'ctx'})
555 555 def showobsolete(context, mapping):
556 556 """String. Whether the changeset is obsolete. (EXPERIMENTAL)"""
557 557 ctx = context.resource(mapping, 'ctx')
558 558 if ctx.obsolete():
559 559 return 'obsolete'
560 560 return ''
561 561
562 562 @templatekeyword('peerurls', requires={'repo'})
563 563 def showpeerurls(context, mapping):
564 564 """A dictionary of repository locations defined in the [paths] section
565 565 of your configuration file."""
566 566 repo = context.resource(mapping, 'repo')
567 567 # see commands.paths() for naming of dictionary keys
568 568 paths = repo.ui.paths
569 569 urls = util.sortdict((k, p.rawloc) for k, p in sorted(paths.iteritems()))
570 570 def makemap(k):
571 571 p = paths[k]
572 572 d = {'name': k, 'url': p.rawloc}
573 573 d.update((o, v) for o, v in sorted(p.suboptions.iteritems()))
574 574 return d
575 575 return _hybrid(None, urls, makemap, lambda k: '%s=%s' % (k, urls[k]))
576 576
577 577 @templatekeyword("predecessors", requires={'repo', 'ctx'})
578 578 def showpredecessors(context, mapping):
579 579 """Returns the list if the closest visible successors. (EXPERIMENTAL)"""
580 580 repo = context.resource(mapping, 'repo')
581 581 ctx = context.resource(mapping, 'ctx')
582 582 predecessors = sorted(obsutil.closestpredecessors(repo, ctx.node()))
583 583 predecessors = map(hex, predecessors)
584 584
585 585 return _hybrid(None, predecessors,
586 lambda x: {'ctx': repo[x], 'revcache': {}},
586 lambda x: {'ctx': repo[x]},
587 587 lambda x: scmutil.formatchangeid(repo[x]))
588 588
589 589 @templatekeyword('reporoot', requires={'repo'})
590 590 def showreporoot(context, mapping):
591 591 """String. The root directory of the current repository."""
592 592 repo = context.resource(mapping, 'repo')
593 593 return repo.root
594 594
595 595 @templatekeyword("successorssets", requires={'repo', 'ctx'})
596 596 def showsuccessorssets(context, mapping):
597 597 """Returns a string of sets of successors for a changectx. Format used
598 598 is: [ctx1, ctx2], [ctx3] if ctx has been splitted into ctx1 and ctx2
599 599 while also diverged into ctx3. (EXPERIMENTAL)"""
600 600 repo = context.resource(mapping, 'repo')
601 601 ctx = context.resource(mapping, 'ctx')
602 602 if not ctx.obsolete():
603 603 return ''
604 604
605 605 ssets = obsutil.successorssets(repo, ctx.node(), closest=True)
606 606 ssets = [[hex(n) for n in ss] for ss in ssets]
607 607
608 608 data = []
609 609 for ss in ssets:
610 h = _hybrid(None, ss, lambda x: {'ctx': repo[x], 'revcache': {}},
610 h = _hybrid(None, ss, lambda x: {'ctx': repo[x]},
611 611 lambda x: scmutil.formatchangeid(repo[x]))
612 612 data.append(h)
613 613
614 614 # Format the successorssets
615 615 def render(d):
616 616 t = []
617 617 for i in d.gen():
618 618 t.append(i)
619 619 return "".join(t)
620 620
621 621 def gen(data):
622 622 yield "; ".join(render(d) for d in data)
623 623
624 624 return _hybrid(gen(data), data, lambda x: {'successorset': x},
625 625 pycompat.identity)
626 626
627 627 @templatekeyword("succsandmarkers", requires={'repo', 'ctx'})
628 628 def showsuccsandmarkers(context, mapping):
629 629 """Returns a list of dict for each final successor of ctx. The dict
630 630 contains successors node id in "successors" keys and the list of
631 631 obs-markers from ctx to the set of successors in "markers".
632 632 (EXPERIMENTAL)
633 633 """
634 634 repo = context.resource(mapping, 'repo')
635 635 ctx = context.resource(mapping, 'ctx')
636 636
637 637 values = obsutil.successorsandmarkers(repo, ctx)
638 638
639 639 if values is None:
640 640 values = []
641 641
642 642 # Format successors and markers to avoid exposing binary to templates
643 643 data = []
644 644 for i in values:
645 645 # Format successors
646 646 successors = i['successors']
647 647
648 648 successors = [hex(n) for n in successors]
649 649 successors = _hybrid(None, successors,
650 lambda x: {'ctx': repo[x], 'revcache': {}},
650 lambda x: {'ctx': repo[x]},
651 651 lambda x: scmutil.formatchangeid(repo[x]))
652 652
653 653 # Format markers
654 654 finalmarkers = []
655 655 for m in i['markers']:
656 656 hexprec = hex(m[0])
657 657 hexsucs = tuple(hex(n) for n in m[1])
658 658 hexparents = None
659 659 if m[5] is not None:
660 660 hexparents = tuple(hex(n) for n in m[5])
661 661 newmarker = (hexprec, hexsucs) + m[2:5] + (hexparents,) + m[6:]
662 662 finalmarkers.append(newmarker)
663 663
664 664 data.append({'successors': successors, 'markers': finalmarkers})
665 665
666 666 f = _showcompatlist(context, mapping, 'succsandmarkers', data)
667 667 return _hybrid(f, data, lambda x: x, pycompat.identity)
668 668
669 669 @templatekeyword('p1rev', requires={'ctx'})
670 670 def showp1rev(context, mapping):
671 671 """Integer. The repository-local revision number of the changeset's
672 672 first parent, or -1 if the changeset has no parents."""
673 673 ctx = context.resource(mapping, 'ctx')
674 674 return ctx.p1().rev()
675 675
676 676 @templatekeyword('p2rev', requires={'ctx'})
677 677 def showp2rev(context, mapping):
678 678 """Integer. The repository-local revision number of the changeset's
679 679 second parent, or -1 if the changeset has no second parent."""
680 680 ctx = context.resource(mapping, 'ctx')
681 681 return ctx.p2().rev()
682 682
683 683 @templatekeyword('p1node', requires={'ctx'})
684 684 def showp1node(context, mapping):
685 685 """String. The identification hash of the changeset's first parent,
686 686 as a 40 digit hexadecimal string. If the changeset has no parents, all
687 687 digits are 0."""
688 688 ctx = context.resource(mapping, 'ctx')
689 689 return ctx.p1().hex()
690 690
691 691 @templatekeyword('p2node', requires={'ctx'})
692 692 def showp2node(context, mapping):
693 693 """String. The identification hash of the changeset's second
694 694 parent, as a 40 digit hexadecimal string. If the changeset has no second
695 695 parent, all digits are 0."""
696 696 ctx = context.resource(mapping, 'ctx')
697 697 return ctx.p2().hex()
698 698
699 699 @templatekeyword('parents', requires={'repo', 'ctx'})
700 700 def showparents(context, mapping):
701 701 """List of strings. The parents of the changeset in "rev:node"
702 702 format. If the changeset has only one "natural" parent (the predecessor
703 703 revision) nothing is shown."""
704 704 repo = context.resource(mapping, 'repo')
705 705 ctx = context.resource(mapping, 'ctx')
706 706 pctxs = scmutil.meaningfulparents(repo, ctx)
707 707 prevs = [p.rev() for p in pctxs]
708 708 parents = [[('rev', p.rev()),
709 709 ('node', p.hex()),
710 710 ('phase', p.phasestr())]
711 711 for p in pctxs]
712 712 f = _showcompatlist(context, mapping, 'parent', parents)
713 return _hybrid(f, prevs, lambda x: {'ctx': repo[x], 'revcache': {}},
713 return _hybrid(f, prevs, lambda x: {'ctx': repo[x]},
714 714 lambda x: scmutil.formatchangeid(repo[x]), keytype=int)
715 715
716 716 @templatekeyword('phase', requires={'ctx'})
717 717 def showphase(context, mapping):
718 718 """String. The changeset phase name."""
719 719 ctx = context.resource(mapping, 'ctx')
720 720 return ctx.phasestr()
721 721
722 722 @templatekeyword('phaseidx', requires={'ctx'})
723 723 def showphaseidx(context, mapping):
724 724 """Integer. The changeset phase index. (ADVANCED)"""
725 725 ctx = context.resource(mapping, 'ctx')
726 726 return ctx.phase()
727 727
728 728 @templatekeyword('rev', requires={'ctx'})
729 729 def showrev(context, mapping):
730 730 """Integer. The repository-local changeset revision number."""
731 731 ctx = context.resource(mapping, 'ctx')
732 732 return scmutil.intrev(ctx)
733 733
734 734 def showrevslist(context, mapping, name, revs):
735 735 """helper to generate a list of revisions in which a mapped template will
736 736 be evaluated"""
737 737 repo = context.resource(mapping, 'repo')
738 738 f = _showcompatlist(context, mapping, name, ['%d' % r for r in revs])
739 739 return _hybrid(f, revs,
740 lambda x: {name: x, 'ctx': repo[x], 'revcache': {}},
740 lambda x: {name: x, 'ctx': repo[x]},
741 741 pycompat.identity, keytype=int)
742 742
743 743 @templatekeyword('subrepos', requires={'ctx'})
744 744 def showsubrepos(context, mapping):
745 745 """List of strings. Updated subrepositories in the changeset."""
746 746 ctx = context.resource(mapping, 'ctx')
747 747 substate = ctx.substate
748 748 if not substate:
749 749 return compatlist(context, mapping, 'subrepo', [])
750 750 psubstate = ctx.parents()[0].substate or {}
751 751 subrepos = []
752 752 for sub in substate:
753 753 if sub not in psubstate or substate[sub] != psubstate[sub]:
754 754 subrepos.append(sub) # modified or newly added in ctx
755 755 for sub in psubstate:
756 756 if sub not in substate:
757 757 subrepos.append(sub) # removed in ctx
758 758 return compatlist(context, mapping, 'subrepo', sorted(subrepos))
759 759
760 760 # don't remove "showtags" definition, even though namespaces will put
761 761 # a helper function for "tags" keyword into "keywords" map automatically,
762 762 # because online help text is built without namespaces initialization
763 763 @templatekeyword('tags', requires={'repo', 'ctx'})
764 764 def showtags(context, mapping):
765 765 """List of strings. Any tags associated with the changeset."""
766 766 return shownames(context, mapping, 'tags')
767 767
768 768 @templatekeyword('termwidth', requires={'ui'})
769 769 def showtermwidth(context, mapping):
770 770 """Integer. The width of the current terminal."""
771 771 ui = context.resource(mapping, 'ui')
772 772 return ui.termwidth()
773 773
774 774 @templatekeyword('instabilities', requires={'ctx'})
775 775 def showinstabilities(context, mapping):
776 776 """List of strings. Evolution instabilities affecting the changeset.
777 777 (EXPERIMENTAL)
778 778 """
779 779 ctx = context.resource(mapping, 'ctx')
780 780 return compatlist(context, mapping, 'instability', ctx.instabilities(),
781 781 plural='instabilities')
782 782
783 783 @templatekeyword('verbosity', requires={'ui'})
784 784 def showverbosity(context, mapping):
785 785 """String. The current output verbosity in 'debug', 'quiet', 'verbose',
786 786 or ''."""
787 787 ui = context.resource(mapping, 'ui')
788 788 # see logcmdutil.changesettemplater for priority of these flags
789 789 if ui.debugflag:
790 790 return 'debug'
791 791 elif ui.quiet:
792 792 return 'quiet'
793 793 elif ui.verbose:
794 794 return 'verbose'
795 795 return ''
796 796
797 797 def loadkeyword(ui, extname, registrarobj):
798 798 """Load template keyword from specified registrarobj
799 799 """
800 800 for name, func in registrarobj._table.iteritems():
801 801 keywords[name] = func
802 802
803 803 # tell hggettext to extract docstrings from these functions:
804 804 i18nfunctions = keywords.values()
General Comments 0
You need to be logged in to leave comments. Login now