##// END OF EJS Templates
tersestatus: make methods part of the dirnode class...
Denis Laxalde -
r34684:3d6d4b12 default
parent child Browse files
Show More
@@ -1,3872 +1,3877 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import itertools
12 12 import os
13 13 import re
14 14 import tempfile
15 15
16 16 from .i18n import _
17 17 from .node import (
18 18 hex,
19 19 nullid,
20 20 nullrev,
21 21 short,
22 22 )
23 23
24 24 from . import (
25 25 bookmarks,
26 26 changelog,
27 27 copies,
28 28 crecord as crecordmod,
29 29 dirstateguard,
30 30 encoding,
31 31 error,
32 32 formatter,
33 33 graphmod,
34 34 match as matchmod,
35 35 obsolete,
36 36 patch,
37 37 pathutil,
38 38 pycompat,
39 39 registrar,
40 40 revlog,
41 41 revset,
42 42 scmutil,
43 43 smartset,
44 44 templatekw,
45 45 templater,
46 46 util,
47 47 vfs as vfsmod,
48 48 )
49 49 stringio = util.stringio
50 50
51 51 # templates of common command options
52 52
53 53 dryrunopts = [
54 54 ('n', 'dry-run', None,
55 55 _('do not perform actions, just print output')),
56 56 ]
57 57
58 58 remoteopts = [
59 59 ('e', 'ssh', '',
60 60 _('specify ssh command to use'), _('CMD')),
61 61 ('', 'remotecmd', '',
62 62 _('specify hg command to run on the remote side'), _('CMD')),
63 63 ('', 'insecure', None,
64 64 _('do not verify server certificate (ignoring web.cacerts config)')),
65 65 ]
66 66
67 67 walkopts = [
68 68 ('I', 'include', [],
69 69 _('include names matching the given patterns'), _('PATTERN')),
70 70 ('X', 'exclude', [],
71 71 _('exclude names matching the given patterns'), _('PATTERN')),
72 72 ]
73 73
74 74 commitopts = [
75 75 ('m', 'message', '',
76 76 _('use text as commit message'), _('TEXT')),
77 77 ('l', 'logfile', '',
78 78 _('read commit message from file'), _('FILE')),
79 79 ]
80 80
81 81 commitopts2 = [
82 82 ('d', 'date', '',
83 83 _('record the specified date as commit date'), _('DATE')),
84 84 ('u', 'user', '',
85 85 _('record the specified user as committer'), _('USER')),
86 86 ]
87 87
88 88 # hidden for now
89 89 formatteropts = [
90 90 ('T', 'template', '',
91 91 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
92 92 ]
93 93
94 94 templateopts = [
95 95 ('', 'style', '',
96 96 _('display using template map file (DEPRECATED)'), _('STYLE')),
97 97 ('T', 'template', '',
98 98 _('display with template'), _('TEMPLATE')),
99 99 ]
100 100
101 101 logopts = [
102 102 ('p', 'patch', None, _('show patch')),
103 103 ('g', 'git', None, _('use git extended diff format')),
104 104 ('l', 'limit', '',
105 105 _('limit number of changes displayed'), _('NUM')),
106 106 ('M', 'no-merges', None, _('do not show merges')),
107 107 ('', 'stat', None, _('output diffstat-style summary of changes')),
108 108 ('G', 'graph', None, _("show the revision DAG")),
109 109 ] + templateopts
110 110
111 111 diffopts = [
112 112 ('a', 'text', None, _('treat all files as text')),
113 113 ('g', 'git', None, _('use git extended diff format')),
114 114 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
115 115 ('', 'nodates', None, _('omit dates from diff headers'))
116 116 ]
117 117
118 118 diffwsopts = [
119 119 ('w', 'ignore-all-space', None,
120 120 _('ignore white space when comparing lines')),
121 121 ('b', 'ignore-space-change', None,
122 122 _('ignore changes in the amount of white space')),
123 123 ('B', 'ignore-blank-lines', None,
124 124 _('ignore changes whose lines are all blank')),
125 125 ('Z', 'ignore-space-at-eol', None,
126 126 _('ignore changes in whitespace at EOL')),
127 127 ]
128 128
129 129 diffopts2 = [
130 130 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
131 131 ('p', 'show-function', None, _('show which function each change is in')),
132 132 ('', 'reverse', None, _('produce a diff that undoes the changes')),
133 133 ] + diffwsopts + [
134 134 ('U', 'unified', '',
135 135 _('number of lines of context to show'), _('NUM')),
136 136 ('', 'stat', None, _('output diffstat-style summary of changes')),
137 137 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
138 138 ]
139 139
140 140 mergetoolopts = [
141 141 ('t', 'tool', '', _('specify merge tool')),
142 142 ]
143 143
144 144 similarityopts = [
145 145 ('s', 'similarity', '',
146 146 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
147 147 ]
148 148
149 149 subrepoopts = [
150 150 ('S', 'subrepos', None,
151 151 _('recurse into subrepositories'))
152 152 ]
153 153
154 154 debugrevlogopts = [
155 155 ('c', 'changelog', False, _('open changelog')),
156 156 ('m', 'manifest', False, _('open manifest')),
157 157 ('', 'dir', '', _('open directory manifest')),
158 158 ]
159 159
160 160 # special string such that everything below this line will be ingored in the
161 161 # editor text
162 162 _linebelow = "^HG: ------------------------ >8 ------------------------$"
163 163
164 164 def ishunk(x):
165 165 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
166 166 return isinstance(x, hunkclasses)
167 167
168 168 def newandmodified(chunks, originalchunks):
169 169 newlyaddedandmodifiedfiles = set()
170 170 for chunk in chunks:
171 171 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
172 172 originalchunks:
173 173 newlyaddedandmodifiedfiles.add(chunk.header.filename())
174 174 return newlyaddedandmodifiedfiles
175 175
176 176 def parsealiases(cmd):
177 177 return cmd.lstrip("^").split("|")
178 178
179 179 def setupwrapcolorwrite(ui):
180 180 # wrap ui.write so diff output can be labeled/colorized
181 181 def wrapwrite(orig, *args, **kw):
182 182 label = kw.pop('label', '')
183 183 for chunk, l in patch.difflabel(lambda: args):
184 184 orig(chunk, label=label + l)
185 185
186 186 oldwrite = ui.write
187 187 def wrap(*args, **kwargs):
188 188 return wrapwrite(oldwrite, *args, **kwargs)
189 189 setattr(ui, 'write', wrap)
190 190 return oldwrite
191 191
192 192 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
193 193 if usecurses:
194 194 if testfile:
195 195 recordfn = crecordmod.testdecorator(testfile,
196 196 crecordmod.testchunkselector)
197 197 else:
198 198 recordfn = crecordmod.chunkselector
199 199
200 200 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
201 201
202 202 else:
203 203 return patch.filterpatch(ui, originalhunks, operation)
204 204
205 205 def recordfilter(ui, originalhunks, operation=None):
206 206 """ Prompts the user to filter the originalhunks and return a list of
207 207 selected hunks.
208 208 *operation* is used for to build ui messages to indicate the user what
209 209 kind of filtering they are doing: reverting, committing, shelving, etc.
210 210 (see patch.filterpatch).
211 211 """
212 212 usecurses = crecordmod.checkcurses(ui)
213 213 testfile = ui.config('experimental', 'crecordtest')
214 214 oldwrite = setupwrapcolorwrite(ui)
215 215 try:
216 216 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
217 217 testfile, operation)
218 218 finally:
219 219 ui.write = oldwrite
220 220 return newchunks, newopts
221 221
222 222 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
223 223 filterfn, *pats, **opts):
224 224 from . import merge as mergemod
225 225 opts = pycompat.byteskwargs(opts)
226 226 if not ui.interactive():
227 227 if cmdsuggest:
228 228 msg = _('running non-interactively, use %s instead') % cmdsuggest
229 229 else:
230 230 msg = _('running non-interactively')
231 231 raise error.Abort(msg)
232 232
233 233 # make sure username is set before going interactive
234 234 if not opts.get('user'):
235 235 ui.username() # raise exception, username not provided
236 236
237 237 def recordfunc(ui, repo, message, match, opts):
238 238 """This is generic record driver.
239 239
240 240 Its job is to interactively filter local changes, and
241 241 accordingly prepare working directory into a state in which the
242 242 job can be delegated to a non-interactive commit command such as
243 243 'commit' or 'qrefresh'.
244 244
245 245 After the actual job is done by non-interactive command, the
246 246 working directory is restored to its original state.
247 247
248 248 In the end we'll record interesting changes, and everything else
249 249 will be left in place, so the user can continue working.
250 250 """
251 251
252 252 checkunfinished(repo, commit=True)
253 253 wctx = repo[None]
254 254 merge = len(wctx.parents()) > 1
255 255 if merge:
256 256 raise error.Abort(_('cannot partially commit a merge '
257 257 '(use "hg commit" instead)'))
258 258
259 259 def fail(f, msg):
260 260 raise error.Abort('%s: %s' % (f, msg))
261 261
262 262 force = opts.get('force')
263 263 if not force:
264 264 vdirs = []
265 265 match.explicitdir = vdirs.append
266 266 match.bad = fail
267 267
268 268 status = repo.status(match=match)
269 269 if not force:
270 270 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
271 271 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
272 272 diffopts.nodates = True
273 273 diffopts.git = True
274 274 diffopts.showfunc = True
275 275 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
276 276 originalchunks = patch.parsepatch(originaldiff)
277 277
278 278 # 1. filter patch, since we are intending to apply subset of it
279 279 try:
280 280 chunks, newopts = filterfn(ui, originalchunks)
281 281 except error.PatchError as err:
282 282 raise error.Abort(_('error parsing patch: %s') % err)
283 283 opts.update(newopts)
284 284
285 285 # We need to keep a backup of files that have been newly added and
286 286 # modified during the recording process because there is a previous
287 287 # version without the edit in the workdir
288 288 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
289 289 contenders = set()
290 290 for h in chunks:
291 291 try:
292 292 contenders.update(set(h.files()))
293 293 except AttributeError:
294 294 pass
295 295
296 296 changed = status.modified + status.added + status.removed
297 297 newfiles = [f for f in changed if f in contenders]
298 298 if not newfiles:
299 299 ui.status(_('no changes to record\n'))
300 300 return 0
301 301
302 302 modified = set(status.modified)
303 303
304 304 # 2. backup changed files, so we can restore them in the end
305 305
306 306 if backupall:
307 307 tobackup = changed
308 308 else:
309 309 tobackup = [f for f in newfiles if f in modified or f in \
310 310 newlyaddedandmodifiedfiles]
311 311 backups = {}
312 312 if tobackup:
313 313 backupdir = repo.vfs.join('record-backups')
314 314 try:
315 315 os.mkdir(backupdir)
316 316 except OSError as err:
317 317 if err.errno != errno.EEXIST:
318 318 raise
319 319 try:
320 320 # backup continues
321 321 for f in tobackup:
322 322 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
323 323 dir=backupdir)
324 324 os.close(fd)
325 325 ui.debug('backup %r as %r\n' % (f, tmpname))
326 326 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
327 327 backups[f] = tmpname
328 328
329 329 fp = stringio()
330 330 for c in chunks:
331 331 fname = c.filename()
332 332 if fname in backups:
333 333 c.write(fp)
334 334 dopatch = fp.tell()
335 335 fp.seek(0)
336 336
337 337 # 2.5 optionally review / modify patch in text editor
338 338 if opts.get('review', False):
339 339 patchtext = (crecordmod.diffhelptext
340 340 + crecordmod.patchhelptext
341 341 + fp.read())
342 342 reviewedpatch = ui.edit(patchtext, "",
343 343 action="diff",
344 344 repopath=repo.path)
345 345 fp.truncate(0)
346 346 fp.write(reviewedpatch)
347 347 fp.seek(0)
348 348
349 349 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
350 350 # 3a. apply filtered patch to clean repo (clean)
351 351 if backups:
352 352 # Equivalent to hg.revert
353 353 m = scmutil.matchfiles(repo, backups.keys())
354 354 mergemod.update(repo, repo.dirstate.p1(),
355 355 False, True, matcher=m)
356 356
357 357 # 3b. (apply)
358 358 if dopatch:
359 359 try:
360 360 ui.debug('applying patch\n')
361 361 ui.debug(fp.getvalue())
362 362 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
363 363 except error.PatchError as err:
364 364 raise error.Abort(str(err))
365 365 del fp
366 366
367 367 # 4. We prepared working directory according to filtered
368 368 # patch. Now is the time to delegate the job to
369 369 # commit/qrefresh or the like!
370 370
371 371 # Make all of the pathnames absolute.
372 372 newfiles = [repo.wjoin(nf) for nf in newfiles]
373 373 return commitfunc(ui, repo, *newfiles, **opts)
374 374 finally:
375 375 # 5. finally restore backed-up files
376 376 try:
377 377 dirstate = repo.dirstate
378 378 for realname, tmpname in backups.iteritems():
379 379 ui.debug('restoring %r to %r\n' % (tmpname, realname))
380 380
381 381 if dirstate[realname] == 'n':
382 382 # without normallookup, restoring timestamp
383 383 # may cause partially committed files
384 384 # to be treated as unmodified
385 385 dirstate.normallookup(realname)
386 386
387 387 # copystat=True here and above are a hack to trick any
388 388 # editors that have f open that we haven't modified them.
389 389 #
390 390 # Also note that this racy as an editor could notice the
391 391 # file's mtime before we've finished writing it.
392 392 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
393 393 os.unlink(tmpname)
394 394 if tobackup:
395 395 os.rmdir(backupdir)
396 396 except OSError:
397 397 pass
398 398
399 399 def recordinwlock(ui, repo, message, match, opts):
400 400 with repo.wlock():
401 401 return recordfunc(ui, repo, message, match, opts)
402 402
403 403 return commit(ui, repo, recordinwlock, pats, opts)
404 404
405 405
406 406 # extracted at module level as it's required each time a file will be added
407 407 # to dirnode class object below
408 408 pathsep = pycompat.ossep
409 409
410 410 class dirnode(object):
411 411 """
412 412 represents a directory in user working copy
413 413
414 414 stores information which is required for purpose of tersing the status
415 415
416 416 path is the path to the directory
417 417
418 418 statuses is a set of statuses of all files in this directory (this includes
419 419 all the files in all the subdirectories too)
420 420
421 421 files is a list of files which are direct child of this directory
422 422
423 423 subdirs is a dictionary of sub-directory name as the key and it's own
424 424 dirnode object as the value
425 425 """
426 426
427 427 def __init__(self, dirpath):
428 428 self.path = dirpath
429 429 self.statuses = set([])
430 430 self.files = []
431 431 self.subdirs = {}
432 432
433 433 def _addfileindir(self, filename, status):
434 434 """ adds a file in this directory as the direct child """
435 435 self.files.append((filename, status))
436 436
437 437 def addfile(self, filename, status):
438 438 """
439 439 adds a file which is present in this directory to its direct parent
440 440 dirnode object
441 441
442 442 if the file is not direct child of this directory, we traverse to the
443 443 directory of which this file is a direct child of and add the file there
444 444 """
445 445
446 446 # the filename contains a path separator, it means it's not the direct
447 447 # child of this directory
448 448 if pathsep in filename:
449 449 subdir, filep = filename.split(pathsep, 1)
450 450
451 451 # does the dirnode object for subdir exists
452 452 if subdir not in self.subdirs:
453 453 subdirpath = os.path.join(self.path, subdir)
454 454 self.subdirs[subdir] = dirnode(subdirpath)
455 455
456 456 # try adding the file in subdir
457 457 self.subdirs[subdir].addfile(filep, status)
458 458
459 459 else:
460 460 self._addfileindir(filename, status)
461 461
462 462 if status not in self.statuses:
463 463 self.statuses.add(status)
464 464
465 def _addfilestotersed(path, files, tersedict):
466 """ adds files to the their respective status list in the final tersed list
465 def _addfilestotersed(self, tersedict):
466 """
467 adds files to the their respective status list in the final tersed list
467 468
468 469 path is the path of parent directory of the file
469 470 files is a list of tuple where each tuple is (filename, status)
470 471 tersedict is a dictonary which contains each status abbreviation as key and
471 472 list of files and tersed dirs in that status as value
472 473 """
473 for f, st in files:
474 tersedict[st].append(os.path.join(path, f))
475
476 def _processtersestatus(subdir, tersedict, terseargs):
477 """a recursive function which process status for a certain directory.
478
479 subdir is an oject of dirnode class defined below. each object of dirnode
480 class has a set of statuses which files in that directory has. This ease our
481 check whether we can terse that directory or not.
482
483 tersedict is a dictonary which contains each status abbreviation as key and
484 list of files and tersed dirs in that status as value. In each function call
485 we are passing the same dict and adding files and dirs to it.
486
487 terseargs is the string of arguments passed by the user with `--terse` flag.
474 for f, st in self.files:
475 tersedict[st].append(os.path.join(self.path, f))
476
477 def _processtersestatus(self, tersedict, terseargs):
478 """
479 a recursive function which process status for a certain directory.
480
481 self is an oject of dirnode class defined below. each object of dirnode
482 class has a set of statuses which files in that directory has. This ease
483 our check whether we can terse that directory or not.
484
485 tersedict is a dictonary which contains each status abbreviation as key
486 and list of files and tersed dirs in that status as value. In each
487 function call we are passing the same dict and adding files and dirs
488 to it.
489
490 terseargs is the string of arguments passed by the user with `--terse`
491 flag.
488 492
489 493 Following are the cases which can happen:
490 494
491 495 1) All the files in the directory (including all the files in its
492 496 subdirectories) share the same status and the user has asked us to terse
493 497 that status. -> we add the directory name to status list and return
494 498
495 499 2) If '1)' does not happen, we do following:
496 500
497 501 a) Add all the files which are in this directory (only the ones in
498 502 this directory, not the subdirs) to their respective status list
499 503
500 b) Recurse the function on all the subdirectories of this directory
504 b) Recurse the function on all the subdirectories of this
505 directory
501 506 """
502 507
503 if len(subdir.statuses) == 1:
504 onlyst = subdir.statuses.pop()
505
506 # Making sure we terse only when the status abbreviation is passed as
507 # terse argument
508 if len(self.statuses) == 1:
509 onlyst = self.statuses.pop()
510
511 # Making sure we terse only when the status abbreviation is
512 # passed as terse argument
508 513 if onlyst in terseargs:
509 tersedict[onlyst].append(subdir.path + pycompat.ossep)
514 tersedict[onlyst].append(self.path + pycompat.ossep)
510 515 return
511 516
512 517 # add the files to status list
513 _addfilestotersed(subdir.path, subdir.files, tersedict)
518 self._addfilestotersed(tersedict)
514 519
515 520 #recurse on the subdirs
516 for dirobj in subdir.subdirs.values():
517 _processtersestatus(dirobj, tersedict, terseargs)
521 for dirobj in self.subdirs.values():
522 dirobj._processtersestatus(tersedict, terseargs)
518 523
519 524 def tersedir(statuslist, terseargs):
520 525 """
521 526 terses the status if all the files in a directory shares the same status
522 527
523 528 statuslist is scmutil.status() object which contains a list of files for
524 529 each status.
525 530 terseargs is string which is passed by the user as the argument to `--terse`
526 531 flag.
527 532
528 533 The function makes a tree of objects of dirnode class, and at each node it
529 534 stores the information required to know whether we can terse a certain
530 535 directory or not.
531 536
532 537 tersedict (defined in the function) is a dictionary which has one word key
533 538 for each status and a list of files and dir in that status as the respective
534 539 value. The dictionary is passed to other helper functions which builds it.
535 540 """
536 541 # the order matters here as that is used to produce final list
537 542 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
538 543
539 544 # checking the argument validity
540 545 for s in terseargs:
541 546 if s not in allst:
542 547 raise error.Abort(_("'%s' not recognized") % s)
543 548
544 549 # creating a dirnode object for the root of the repo
545 550 rootobj = dirnode('')
546 551 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
547 552 'ignored', 'removed')
548 553
549 554 tersedict = {}
550 555 for attrname in pstatus:
551 556 for f in getattr(statuslist, attrname):
552 557 rootobj.addfile(f, attrname[0])
553 558 tersedict[attrname[0]] = []
554 559
555 560 # we won't be tersing the root dir, so add files in it
556 _addfilestotersed(rootobj.path, rootobj.files, tersedict)
561 rootobj._addfilestotersed(tersedict)
557 562
558 563 # process each sub-directory and build tersedict
559 564 for subdir in rootobj.subdirs.values():
560 _processtersestatus(subdir, tersedict, terseargs)
565 subdir._processtersestatus(tersedict, terseargs)
561 566
562 567 tersedlist = []
563 568 for st in allst:
564 569 tersedict[st].sort()
565 570 tersedlist.append(tersedict[st])
566 571
567 572 return tersedlist
568 573
569 574 def _commentlines(raw):
570 575 '''Surround lineswith a comment char and a new line'''
571 576 lines = raw.splitlines()
572 577 commentedlines = ['# %s' % line for line in lines]
573 578 return '\n'.join(commentedlines) + '\n'
574 579
575 580 def _conflictsmsg(repo):
576 581 # avoid merge cycle
577 582 from . import merge as mergemod
578 583 mergestate = mergemod.mergestate.read(repo)
579 584 if not mergestate.active():
580 585 return
581 586
582 587 m = scmutil.match(repo[None])
583 588 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
584 589 if unresolvedlist:
585 590 mergeliststr = '\n'.join(
586 591 [' %s' % os.path.relpath(
587 592 os.path.join(repo.root, path),
588 593 pycompat.getcwd()) for path in unresolvedlist])
589 594 msg = _('''Unresolved merge conflicts:
590 595
591 596 %s
592 597
593 598 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
594 599 else:
595 600 msg = _('No unresolved merge conflicts.')
596 601
597 602 return _commentlines(msg)
598 603
599 604 def _helpmessage(continuecmd, abortcmd):
600 605 msg = _('To continue: %s\n'
601 606 'To abort: %s') % (continuecmd, abortcmd)
602 607 return _commentlines(msg)
603 608
604 609 def _rebasemsg():
605 610 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
606 611
607 612 def _histeditmsg():
608 613 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
609 614
610 615 def _unshelvemsg():
611 616 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
612 617
613 618 def _updatecleanmsg(dest=None):
614 619 warning = _('warning: this will discard uncommitted changes')
615 620 return 'hg update --clean %s (%s)' % (dest or '.', warning)
616 621
617 622 def _graftmsg():
618 623 # tweakdefaults requires `update` to have a rev hence the `.`
619 624 return _helpmessage('hg graft --continue', _updatecleanmsg())
620 625
621 626 def _mergemsg():
622 627 # tweakdefaults requires `update` to have a rev hence the `.`
623 628 return _helpmessage('hg commit', _updatecleanmsg())
624 629
625 630 def _bisectmsg():
626 631 msg = _('To mark the changeset good: hg bisect --good\n'
627 632 'To mark the changeset bad: hg bisect --bad\n'
628 633 'To abort: hg bisect --reset\n')
629 634 return _commentlines(msg)
630 635
631 636 def fileexistspredicate(filename):
632 637 return lambda repo: repo.vfs.exists(filename)
633 638
634 639 def _mergepredicate(repo):
635 640 return len(repo[None].parents()) > 1
636 641
637 642 STATES = (
638 643 # (state, predicate to detect states, helpful message function)
639 644 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
640 645 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
641 646 ('graft', fileexistspredicate('graftstate'), _graftmsg),
642 647 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
643 648 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
644 649 # The merge state is part of a list that will be iterated over.
645 650 # They need to be last because some of the other unfinished states may also
646 651 # be in a merge or update state (eg. rebase, histedit, graft, etc).
647 652 # We want those to have priority.
648 653 ('merge', _mergepredicate, _mergemsg),
649 654 )
650 655
651 656 def _getrepostate(repo):
652 657 # experimental config: commands.status.skipstates
653 658 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
654 659 for state, statedetectionpredicate, msgfn in STATES:
655 660 if state in skip:
656 661 continue
657 662 if statedetectionpredicate(repo):
658 663 return (state, statedetectionpredicate, msgfn)
659 664
660 665 def morestatus(repo, fm):
661 666 statetuple = _getrepostate(repo)
662 667 label = 'status.morestatus'
663 668 if statetuple:
664 669 fm.startitem()
665 670 state, statedetectionpredicate, helpfulmsg = statetuple
666 671 statemsg = _('The repository is in an unfinished *%s* state.') % state
667 672 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
668 673 conmsg = _conflictsmsg(repo)
669 674 if conmsg:
670 675 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
671 676 if helpfulmsg:
672 677 helpmsg = helpfulmsg()
673 678 fm.write('helpmsg', '%s\n', helpmsg, label=label)
674 679
675 680 def findpossible(cmd, table, strict=False):
676 681 """
677 682 Return cmd -> (aliases, command table entry)
678 683 for each matching command.
679 684 Return debug commands (or their aliases) only if no normal command matches.
680 685 """
681 686 choice = {}
682 687 debugchoice = {}
683 688
684 689 if cmd in table:
685 690 # short-circuit exact matches, "log" alias beats "^log|history"
686 691 keys = [cmd]
687 692 else:
688 693 keys = table.keys()
689 694
690 695 allcmds = []
691 696 for e in keys:
692 697 aliases = parsealiases(e)
693 698 allcmds.extend(aliases)
694 699 found = None
695 700 if cmd in aliases:
696 701 found = cmd
697 702 elif not strict:
698 703 for a in aliases:
699 704 if a.startswith(cmd):
700 705 found = a
701 706 break
702 707 if found is not None:
703 708 if aliases[0].startswith("debug") or found.startswith("debug"):
704 709 debugchoice[found] = (aliases, table[e])
705 710 else:
706 711 choice[found] = (aliases, table[e])
707 712
708 713 if not choice and debugchoice:
709 714 choice = debugchoice
710 715
711 716 return choice, allcmds
712 717
713 718 def findcmd(cmd, table, strict=True):
714 719 """Return (aliases, command table entry) for command string."""
715 720 choice, allcmds = findpossible(cmd, table, strict)
716 721
717 722 if cmd in choice:
718 723 return choice[cmd]
719 724
720 725 if len(choice) > 1:
721 726 clist = sorted(choice)
722 727 raise error.AmbiguousCommand(cmd, clist)
723 728
724 729 if choice:
725 730 return list(choice.values())[0]
726 731
727 732 raise error.UnknownCommand(cmd, allcmds)
728 733
729 734 def findrepo(p):
730 735 while not os.path.isdir(os.path.join(p, ".hg")):
731 736 oldp, p = p, os.path.dirname(p)
732 737 if p == oldp:
733 738 return None
734 739
735 740 return p
736 741
737 742 def bailifchanged(repo, merge=True, hint=None):
738 743 """ enforce the precondition that working directory must be clean.
739 744
740 745 'merge' can be set to false if a pending uncommitted merge should be
741 746 ignored (such as when 'update --check' runs).
742 747
743 748 'hint' is the usual hint given to Abort exception.
744 749 """
745 750
746 751 if merge and repo.dirstate.p2() != nullid:
747 752 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
748 753 modified, added, removed, deleted = repo.status()[:4]
749 754 if modified or added or removed or deleted:
750 755 raise error.Abort(_('uncommitted changes'), hint=hint)
751 756 ctx = repo[None]
752 757 for s in sorted(ctx.substate):
753 758 ctx.sub(s).bailifchanged(hint=hint)
754 759
755 760 def logmessage(ui, opts):
756 761 """ get the log message according to -m and -l option """
757 762 message = opts.get('message')
758 763 logfile = opts.get('logfile')
759 764
760 765 if message and logfile:
761 766 raise error.Abort(_('options --message and --logfile are mutually '
762 767 'exclusive'))
763 768 if not message and logfile:
764 769 try:
765 770 if isstdiofilename(logfile):
766 771 message = ui.fin.read()
767 772 else:
768 773 message = '\n'.join(util.readfile(logfile).splitlines())
769 774 except IOError as inst:
770 775 raise error.Abort(_("can't read commit message '%s': %s") %
771 776 (logfile, encoding.strtolocal(inst.strerror)))
772 777 return message
773 778
774 779 def mergeeditform(ctxorbool, baseformname):
775 780 """return appropriate editform name (referencing a committemplate)
776 781
777 782 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
778 783 merging is committed.
779 784
780 785 This returns baseformname with '.merge' appended if it is a merge,
781 786 otherwise '.normal' is appended.
782 787 """
783 788 if isinstance(ctxorbool, bool):
784 789 if ctxorbool:
785 790 return baseformname + ".merge"
786 791 elif 1 < len(ctxorbool.parents()):
787 792 return baseformname + ".merge"
788 793
789 794 return baseformname + ".normal"
790 795
791 796 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
792 797 editform='', **opts):
793 798 """get appropriate commit message editor according to '--edit' option
794 799
795 800 'finishdesc' is a function to be called with edited commit message
796 801 (= 'description' of the new changeset) just after editing, but
797 802 before checking empty-ness. It should return actual text to be
798 803 stored into history. This allows to change description before
799 804 storing.
800 805
801 806 'extramsg' is a extra message to be shown in the editor instead of
802 807 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
803 808 is automatically added.
804 809
805 810 'editform' is a dot-separated list of names, to distinguish
806 811 the purpose of commit text editing.
807 812
808 813 'getcommiteditor' returns 'commitforceeditor' regardless of
809 814 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
810 815 they are specific for usage in MQ.
811 816 """
812 817 if edit or finishdesc or extramsg:
813 818 return lambda r, c, s: commitforceeditor(r, c, s,
814 819 finishdesc=finishdesc,
815 820 extramsg=extramsg,
816 821 editform=editform)
817 822 elif editform:
818 823 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
819 824 else:
820 825 return commiteditor
821 826
822 827 def loglimit(opts):
823 828 """get the log limit according to option -l/--limit"""
824 829 limit = opts.get('limit')
825 830 if limit:
826 831 try:
827 832 limit = int(limit)
828 833 except ValueError:
829 834 raise error.Abort(_('limit must be a positive integer'))
830 835 if limit <= 0:
831 836 raise error.Abort(_('limit must be positive'))
832 837 else:
833 838 limit = None
834 839 return limit
835 840
836 841 def makefilename(repo, pat, node, desc=None,
837 842 total=None, seqno=None, revwidth=None, pathname=None):
838 843 node_expander = {
839 844 'H': lambda: hex(node),
840 845 'R': lambda: str(repo.changelog.rev(node)),
841 846 'h': lambda: short(node),
842 847 'm': lambda: re.sub('[^\w]', '_', str(desc))
843 848 }
844 849 expander = {
845 850 '%': lambda: '%',
846 851 'b': lambda: os.path.basename(repo.root),
847 852 }
848 853
849 854 try:
850 855 if node:
851 856 expander.update(node_expander)
852 857 if node:
853 858 expander['r'] = (lambda:
854 859 str(repo.changelog.rev(node)).zfill(revwidth or 0))
855 860 if total is not None:
856 861 expander['N'] = lambda: str(total)
857 862 if seqno is not None:
858 863 expander['n'] = lambda: str(seqno)
859 864 if total is not None and seqno is not None:
860 865 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
861 866 if pathname is not None:
862 867 expander['s'] = lambda: os.path.basename(pathname)
863 868 expander['d'] = lambda: os.path.dirname(pathname) or '.'
864 869 expander['p'] = lambda: pathname
865 870
866 871 newname = []
867 872 patlen = len(pat)
868 873 i = 0
869 874 while i < patlen:
870 875 c = pat[i:i + 1]
871 876 if c == '%':
872 877 i += 1
873 878 c = pat[i:i + 1]
874 879 c = expander[c]()
875 880 newname.append(c)
876 881 i += 1
877 882 return ''.join(newname)
878 883 except KeyError as inst:
879 884 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
880 885 inst.args[0])
881 886
882 887 def isstdiofilename(pat):
883 888 """True if the given pat looks like a filename denoting stdin/stdout"""
884 889 return not pat or pat == '-'
885 890
886 891 class _unclosablefile(object):
887 892 def __init__(self, fp):
888 893 self._fp = fp
889 894
890 895 def close(self):
891 896 pass
892 897
893 898 def __iter__(self):
894 899 return iter(self._fp)
895 900
896 901 def __getattr__(self, attr):
897 902 return getattr(self._fp, attr)
898 903
899 904 def __enter__(self):
900 905 return self
901 906
902 907 def __exit__(self, exc_type, exc_value, exc_tb):
903 908 pass
904 909
905 910 def makefileobj(repo, pat, node=None, desc=None, total=None,
906 911 seqno=None, revwidth=None, mode='wb', modemap=None,
907 912 pathname=None):
908 913
909 914 writable = mode not in ('r', 'rb')
910 915
911 916 if isstdiofilename(pat):
912 917 if writable:
913 918 fp = repo.ui.fout
914 919 else:
915 920 fp = repo.ui.fin
916 921 return _unclosablefile(fp)
917 922 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
918 923 if modemap is not None:
919 924 mode = modemap.get(fn, mode)
920 925 if mode == 'wb':
921 926 modemap[fn] = 'ab'
922 927 return open(fn, mode)
923 928
924 929 def openrevlog(repo, cmd, file_, opts):
925 930 """opens the changelog, manifest, a filelog or a given revlog"""
926 931 cl = opts['changelog']
927 932 mf = opts['manifest']
928 933 dir = opts['dir']
929 934 msg = None
930 935 if cl and mf:
931 936 msg = _('cannot specify --changelog and --manifest at the same time')
932 937 elif cl and dir:
933 938 msg = _('cannot specify --changelog and --dir at the same time')
934 939 elif cl or mf or dir:
935 940 if file_:
936 941 msg = _('cannot specify filename with --changelog or --manifest')
937 942 elif not repo:
938 943 msg = _('cannot specify --changelog or --manifest or --dir '
939 944 'without a repository')
940 945 if msg:
941 946 raise error.Abort(msg)
942 947
943 948 r = None
944 949 if repo:
945 950 if cl:
946 951 r = repo.unfiltered().changelog
947 952 elif dir:
948 953 if 'treemanifest' not in repo.requirements:
949 954 raise error.Abort(_("--dir can only be used on repos with "
950 955 "treemanifest enabled"))
951 956 dirlog = repo.manifestlog._revlog.dirlog(dir)
952 957 if len(dirlog):
953 958 r = dirlog
954 959 elif mf:
955 960 r = repo.manifestlog._revlog
956 961 elif file_:
957 962 filelog = repo.file(file_)
958 963 if len(filelog):
959 964 r = filelog
960 965 if not r:
961 966 if not file_:
962 967 raise error.CommandError(cmd, _('invalid arguments'))
963 968 if not os.path.isfile(file_):
964 969 raise error.Abort(_("revlog '%s' not found") % file_)
965 970 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
966 971 file_[:-2] + ".i")
967 972 return r
968 973
969 974 def copy(ui, repo, pats, opts, rename=False):
970 975 # called with the repo lock held
971 976 #
972 977 # hgsep => pathname that uses "/" to separate directories
973 978 # ossep => pathname that uses os.sep to separate directories
974 979 cwd = repo.getcwd()
975 980 targets = {}
976 981 after = opts.get("after")
977 982 dryrun = opts.get("dry_run")
978 983 wctx = repo[None]
979 984
980 985 def walkpat(pat):
981 986 srcs = []
982 987 if after:
983 988 badstates = '?'
984 989 else:
985 990 badstates = '?r'
986 991 m = scmutil.match(wctx, [pat], opts, globbed=True)
987 992 for abs in wctx.walk(m):
988 993 state = repo.dirstate[abs]
989 994 rel = m.rel(abs)
990 995 exact = m.exact(abs)
991 996 if state in badstates:
992 997 if exact and state == '?':
993 998 ui.warn(_('%s: not copying - file is not managed\n') % rel)
994 999 if exact and state == 'r':
995 1000 ui.warn(_('%s: not copying - file has been marked for'
996 1001 ' remove\n') % rel)
997 1002 continue
998 1003 # abs: hgsep
999 1004 # rel: ossep
1000 1005 srcs.append((abs, rel, exact))
1001 1006 return srcs
1002 1007
1003 1008 # abssrc: hgsep
1004 1009 # relsrc: ossep
1005 1010 # otarget: ossep
1006 1011 def copyfile(abssrc, relsrc, otarget, exact):
1007 1012 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1008 1013 if '/' in abstarget:
1009 1014 # We cannot normalize abstarget itself, this would prevent
1010 1015 # case only renames, like a => A.
1011 1016 abspath, absname = abstarget.rsplit('/', 1)
1012 1017 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1013 1018 reltarget = repo.pathto(abstarget, cwd)
1014 1019 target = repo.wjoin(abstarget)
1015 1020 src = repo.wjoin(abssrc)
1016 1021 state = repo.dirstate[abstarget]
1017 1022
1018 1023 scmutil.checkportable(ui, abstarget)
1019 1024
1020 1025 # check for collisions
1021 1026 prevsrc = targets.get(abstarget)
1022 1027 if prevsrc is not None:
1023 1028 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1024 1029 (reltarget, repo.pathto(abssrc, cwd),
1025 1030 repo.pathto(prevsrc, cwd)))
1026 1031 return
1027 1032
1028 1033 # check for overwrites
1029 1034 exists = os.path.lexists(target)
1030 1035 samefile = False
1031 1036 if exists and abssrc != abstarget:
1032 1037 if (repo.dirstate.normalize(abssrc) ==
1033 1038 repo.dirstate.normalize(abstarget)):
1034 1039 if not rename:
1035 1040 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1036 1041 return
1037 1042 exists = False
1038 1043 samefile = True
1039 1044
1040 1045 if not after and exists or after and state in 'mn':
1041 1046 if not opts['force']:
1042 1047 if state in 'mn':
1043 1048 msg = _('%s: not overwriting - file already committed\n')
1044 1049 if after:
1045 1050 flags = '--after --force'
1046 1051 else:
1047 1052 flags = '--force'
1048 1053 if rename:
1049 1054 hint = _('(hg rename %s to replace the file by '
1050 1055 'recording a rename)\n') % flags
1051 1056 else:
1052 1057 hint = _('(hg copy %s to replace the file by '
1053 1058 'recording a copy)\n') % flags
1054 1059 else:
1055 1060 msg = _('%s: not overwriting - file exists\n')
1056 1061 if rename:
1057 1062 hint = _('(hg rename --after to record the rename)\n')
1058 1063 else:
1059 1064 hint = _('(hg copy --after to record the copy)\n')
1060 1065 ui.warn(msg % reltarget)
1061 1066 ui.warn(hint)
1062 1067 return
1063 1068
1064 1069 if after:
1065 1070 if not exists:
1066 1071 if rename:
1067 1072 ui.warn(_('%s: not recording move - %s does not exist\n') %
1068 1073 (relsrc, reltarget))
1069 1074 else:
1070 1075 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1071 1076 (relsrc, reltarget))
1072 1077 return
1073 1078 elif not dryrun:
1074 1079 try:
1075 1080 if exists:
1076 1081 os.unlink(target)
1077 1082 targetdir = os.path.dirname(target) or '.'
1078 1083 if not os.path.isdir(targetdir):
1079 1084 os.makedirs(targetdir)
1080 1085 if samefile:
1081 1086 tmp = target + "~hgrename"
1082 1087 os.rename(src, tmp)
1083 1088 os.rename(tmp, target)
1084 1089 else:
1085 1090 util.copyfile(src, target)
1086 1091 srcexists = True
1087 1092 except IOError as inst:
1088 1093 if inst.errno == errno.ENOENT:
1089 1094 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1090 1095 srcexists = False
1091 1096 else:
1092 1097 ui.warn(_('%s: cannot copy - %s\n') %
1093 1098 (relsrc, encoding.strtolocal(inst.strerror)))
1094 1099 return True # report a failure
1095 1100
1096 1101 if ui.verbose or not exact:
1097 1102 if rename:
1098 1103 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1099 1104 else:
1100 1105 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1101 1106
1102 1107 targets[abstarget] = abssrc
1103 1108
1104 1109 # fix up dirstate
1105 1110 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1106 1111 dryrun=dryrun, cwd=cwd)
1107 1112 if rename and not dryrun:
1108 1113 if not after and srcexists and not samefile:
1109 1114 repo.wvfs.unlinkpath(abssrc)
1110 1115 wctx.forget([abssrc])
1111 1116
1112 1117 # pat: ossep
1113 1118 # dest ossep
1114 1119 # srcs: list of (hgsep, hgsep, ossep, bool)
1115 1120 # return: function that takes hgsep and returns ossep
1116 1121 def targetpathfn(pat, dest, srcs):
1117 1122 if os.path.isdir(pat):
1118 1123 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1119 1124 abspfx = util.localpath(abspfx)
1120 1125 if destdirexists:
1121 1126 striplen = len(os.path.split(abspfx)[0])
1122 1127 else:
1123 1128 striplen = len(abspfx)
1124 1129 if striplen:
1125 1130 striplen += len(pycompat.ossep)
1126 1131 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1127 1132 elif destdirexists:
1128 1133 res = lambda p: os.path.join(dest,
1129 1134 os.path.basename(util.localpath(p)))
1130 1135 else:
1131 1136 res = lambda p: dest
1132 1137 return res
1133 1138
1134 1139 # pat: ossep
1135 1140 # dest ossep
1136 1141 # srcs: list of (hgsep, hgsep, ossep, bool)
1137 1142 # return: function that takes hgsep and returns ossep
1138 1143 def targetpathafterfn(pat, dest, srcs):
1139 1144 if matchmod.patkind(pat):
1140 1145 # a mercurial pattern
1141 1146 res = lambda p: os.path.join(dest,
1142 1147 os.path.basename(util.localpath(p)))
1143 1148 else:
1144 1149 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1145 1150 if len(abspfx) < len(srcs[0][0]):
1146 1151 # A directory. Either the target path contains the last
1147 1152 # component of the source path or it does not.
1148 1153 def evalpath(striplen):
1149 1154 score = 0
1150 1155 for s in srcs:
1151 1156 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1152 1157 if os.path.lexists(t):
1153 1158 score += 1
1154 1159 return score
1155 1160
1156 1161 abspfx = util.localpath(abspfx)
1157 1162 striplen = len(abspfx)
1158 1163 if striplen:
1159 1164 striplen += len(pycompat.ossep)
1160 1165 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1161 1166 score = evalpath(striplen)
1162 1167 striplen1 = len(os.path.split(abspfx)[0])
1163 1168 if striplen1:
1164 1169 striplen1 += len(pycompat.ossep)
1165 1170 if evalpath(striplen1) > score:
1166 1171 striplen = striplen1
1167 1172 res = lambda p: os.path.join(dest,
1168 1173 util.localpath(p)[striplen:])
1169 1174 else:
1170 1175 # a file
1171 1176 if destdirexists:
1172 1177 res = lambda p: os.path.join(dest,
1173 1178 os.path.basename(util.localpath(p)))
1174 1179 else:
1175 1180 res = lambda p: dest
1176 1181 return res
1177 1182
1178 1183 pats = scmutil.expandpats(pats)
1179 1184 if not pats:
1180 1185 raise error.Abort(_('no source or destination specified'))
1181 1186 if len(pats) == 1:
1182 1187 raise error.Abort(_('no destination specified'))
1183 1188 dest = pats.pop()
1184 1189 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1185 1190 if not destdirexists:
1186 1191 if len(pats) > 1 or matchmod.patkind(pats[0]):
1187 1192 raise error.Abort(_('with multiple sources, destination must be an '
1188 1193 'existing directory'))
1189 1194 if util.endswithsep(dest):
1190 1195 raise error.Abort(_('destination %s is not a directory') % dest)
1191 1196
1192 1197 tfn = targetpathfn
1193 1198 if after:
1194 1199 tfn = targetpathafterfn
1195 1200 copylist = []
1196 1201 for pat in pats:
1197 1202 srcs = walkpat(pat)
1198 1203 if not srcs:
1199 1204 continue
1200 1205 copylist.append((tfn(pat, dest, srcs), srcs))
1201 1206 if not copylist:
1202 1207 raise error.Abort(_('no files to copy'))
1203 1208
1204 1209 errors = 0
1205 1210 for targetpath, srcs in copylist:
1206 1211 for abssrc, relsrc, exact in srcs:
1207 1212 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1208 1213 errors += 1
1209 1214
1210 1215 if errors:
1211 1216 ui.warn(_('(consider using --after)\n'))
1212 1217
1213 1218 return errors != 0
1214 1219
1215 1220 ## facility to let extension process additional data into an import patch
1216 1221 # list of identifier to be executed in order
1217 1222 extrapreimport = [] # run before commit
1218 1223 extrapostimport = [] # run after commit
1219 1224 # mapping from identifier to actual import function
1220 1225 #
1221 1226 # 'preimport' are run before the commit is made and are provided the following
1222 1227 # arguments:
1223 1228 # - repo: the localrepository instance,
1224 1229 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1225 1230 # - extra: the future extra dictionary of the changeset, please mutate it,
1226 1231 # - opts: the import options.
1227 1232 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1228 1233 # mutation of in memory commit and more. Feel free to rework the code to get
1229 1234 # there.
1230 1235 extrapreimportmap = {}
1231 1236 # 'postimport' are run after the commit is made and are provided the following
1232 1237 # argument:
1233 1238 # - ctx: the changectx created by import.
1234 1239 extrapostimportmap = {}
1235 1240
1236 1241 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1237 1242 """Utility function used by commands.import to import a single patch
1238 1243
1239 1244 This function is explicitly defined here to help the evolve extension to
1240 1245 wrap this part of the import logic.
1241 1246
1242 1247 The API is currently a bit ugly because it a simple code translation from
1243 1248 the import command. Feel free to make it better.
1244 1249
1245 1250 :hunk: a patch (as a binary string)
1246 1251 :parents: nodes that will be parent of the created commit
1247 1252 :opts: the full dict of option passed to the import command
1248 1253 :msgs: list to save commit message to.
1249 1254 (used in case we need to save it when failing)
1250 1255 :updatefunc: a function that update a repo to a given node
1251 1256 updatefunc(<repo>, <node>)
1252 1257 """
1253 1258 # avoid cycle context -> subrepo -> cmdutil
1254 1259 from . import context
1255 1260 extractdata = patch.extract(ui, hunk)
1256 1261 tmpname = extractdata.get('filename')
1257 1262 message = extractdata.get('message')
1258 1263 user = opts.get('user') or extractdata.get('user')
1259 1264 date = opts.get('date') or extractdata.get('date')
1260 1265 branch = extractdata.get('branch')
1261 1266 nodeid = extractdata.get('nodeid')
1262 1267 p1 = extractdata.get('p1')
1263 1268 p2 = extractdata.get('p2')
1264 1269
1265 1270 nocommit = opts.get('no_commit')
1266 1271 importbranch = opts.get('import_branch')
1267 1272 update = not opts.get('bypass')
1268 1273 strip = opts["strip"]
1269 1274 prefix = opts["prefix"]
1270 1275 sim = float(opts.get('similarity') or 0)
1271 1276 if not tmpname:
1272 1277 return (None, None, False)
1273 1278
1274 1279 rejects = False
1275 1280
1276 1281 try:
1277 1282 cmdline_message = logmessage(ui, opts)
1278 1283 if cmdline_message:
1279 1284 # pickup the cmdline msg
1280 1285 message = cmdline_message
1281 1286 elif message:
1282 1287 # pickup the patch msg
1283 1288 message = message.strip()
1284 1289 else:
1285 1290 # launch the editor
1286 1291 message = None
1287 1292 ui.debug('message:\n%s\n' % message)
1288 1293
1289 1294 if len(parents) == 1:
1290 1295 parents.append(repo[nullid])
1291 1296 if opts.get('exact'):
1292 1297 if not nodeid or not p1:
1293 1298 raise error.Abort(_('not a Mercurial patch'))
1294 1299 p1 = repo[p1]
1295 1300 p2 = repo[p2 or nullid]
1296 1301 elif p2:
1297 1302 try:
1298 1303 p1 = repo[p1]
1299 1304 p2 = repo[p2]
1300 1305 # Without any options, consider p2 only if the
1301 1306 # patch is being applied on top of the recorded
1302 1307 # first parent.
1303 1308 if p1 != parents[0]:
1304 1309 p1 = parents[0]
1305 1310 p2 = repo[nullid]
1306 1311 except error.RepoError:
1307 1312 p1, p2 = parents
1308 1313 if p2.node() == nullid:
1309 1314 ui.warn(_("warning: import the patch as a normal revision\n"
1310 1315 "(use --exact to import the patch as a merge)\n"))
1311 1316 else:
1312 1317 p1, p2 = parents
1313 1318
1314 1319 n = None
1315 1320 if update:
1316 1321 if p1 != parents[0]:
1317 1322 updatefunc(repo, p1.node())
1318 1323 if p2 != parents[1]:
1319 1324 repo.setparents(p1.node(), p2.node())
1320 1325
1321 1326 if opts.get('exact') or importbranch:
1322 1327 repo.dirstate.setbranch(branch or 'default')
1323 1328
1324 1329 partial = opts.get('partial', False)
1325 1330 files = set()
1326 1331 try:
1327 1332 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1328 1333 files=files, eolmode=None, similarity=sim / 100.0)
1329 1334 except error.PatchError as e:
1330 1335 if not partial:
1331 1336 raise error.Abort(str(e))
1332 1337 if partial:
1333 1338 rejects = True
1334 1339
1335 1340 files = list(files)
1336 1341 if nocommit:
1337 1342 if message:
1338 1343 msgs.append(message)
1339 1344 else:
1340 1345 if opts.get('exact') or p2:
1341 1346 # If you got here, you either use --force and know what
1342 1347 # you are doing or used --exact or a merge patch while
1343 1348 # being updated to its first parent.
1344 1349 m = None
1345 1350 else:
1346 1351 m = scmutil.matchfiles(repo, files or [])
1347 1352 editform = mergeeditform(repo[None], 'import.normal')
1348 1353 if opts.get('exact'):
1349 1354 editor = None
1350 1355 else:
1351 1356 editor = getcommiteditor(editform=editform, **opts)
1352 1357 extra = {}
1353 1358 for idfunc in extrapreimport:
1354 1359 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1355 1360 overrides = {}
1356 1361 if partial:
1357 1362 overrides[('ui', 'allowemptycommit')] = True
1358 1363 with repo.ui.configoverride(overrides, 'import'):
1359 1364 n = repo.commit(message, user,
1360 1365 date, match=m,
1361 1366 editor=editor, extra=extra)
1362 1367 for idfunc in extrapostimport:
1363 1368 extrapostimportmap[idfunc](repo[n])
1364 1369 else:
1365 1370 if opts.get('exact') or importbranch:
1366 1371 branch = branch or 'default'
1367 1372 else:
1368 1373 branch = p1.branch()
1369 1374 store = patch.filestore()
1370 1375 try:
1371 1376 files = set()
1372 1377 try:
1373 1378 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1374 1379 files, eolmode=None)
1375 1380 except error.PatchError as e:
1376 1381 raise error.Abort(str(e))
1377 1382 if opts.get('exact'):
1378 1383 editor = None
1379 1384 else:
1380 1385 editor = getcommiteditor(editform='import.bypass')
1381 1386 memctx = context.memctx(repo, (p1.node(), p2.node()),
1382 1387 message,
1383 1388 files=files,
1384 1389 filectxfn=store,
1385 1390 user=user,
1386 1391 date=date,
1387 1392 branch=branch,
1388 1393 editor=editor)
1389 1394 n = memctx.commit()
1390 1395 finally:
1391 1396 store.close()
1392 1397 if opts.get('exact') and nocommit:
1393 1398 # --exact with --no-commit is still useful in that it does merge
1394 1399 # and branch bits
1395 1400 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1396 1401 elif opts.get('exact') and hex(n) != nodeid:
1397 1402 raise error.Abort(_('patch is damaged or loses information'))
1398 1403 msg = _('applied to working directory')
1399 1404 if n:
1400 1405 # i18n: refers to a short changeset id
1401 1406 msg = _('created %s') % short(n)
1402 1407 return (msg, n, rejects)
1403 1408 finally:
1404 1409 os.unlink(tmpname)
1405 1410
1406 1411 # facility to let extensions include additional data in an exported patch
1407 1412 # list of identifiers to be executed in order
1408 1413 extraexport = []
1409 1414 # mapping from identifier to actual export function
1410 1415 # function as to return a string to be added to the header or None
1411 1416 # it is given two arguments (sequencenumber, changectx)
1412 1417 extraexportmap = {}
1413 1418
1414 1419 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1415 1420 node = scmutil.binnode(ctx)
1416 1421 parents = [p.node() for p in ctx.parents() if p]
1417 1422 branch = ctx.branch()
1418 1423 if switch_parent:
1419 1424 parents.reverse()
1420 1425
1421 1426 if parents:
1422 1427 prev = parents[0]
1423 1428 else:
1424 1429 prev = nullid
1425 1430
1426 1431 write("# HG changeset patch\n")
1427 1432 write("# User %s\n" % ctx.user())
1428 1433 write("# Date %d %d\n" % ctx.date())
1429 1434 write("# %s\n" % util.datestr(ctx.date()))
1430 1435 if branch and branch != 'default':
1431 1436 write("# Branch %s\n" % branch)
1432 1437 write("# Node ID %s\n" % hex(node))
1433 1438 write("# Parent %s\n" % hex(prev))
1434 1439 if len(parents) > 1:
1435 1440 write("# Parent %s\n" % hex(parents[1]))
1436 1441
1437 1442 for headerid in extraexport:
1438 1443 header = extraexportmap[headerid](seqno, ctx)
1439 1444 if header is not None:
1440 1445 write('# %s\n' % header)
1441 1446 write(ctx.description().rstrip())
1442 1447 write("\n\n")
1443 1448
1444 1449 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1445 1450 write(chunk, label=label)
1446 1451
1447 1452 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1448 1453 opts=None, match=None):
1449 1454 '''export changesets as hg patches
1450 1455
1451 1456 Args:
1452 1457 repo: The repository from which we're exporting revisions.
1453 1458 revs: A list of revisions to export as revision numbers.
1454 1459 fntemplate: An optional string to use for generating patch file names.
1455 1460 fp: An optional file-like object to which patches should be written.
1456 1461 switch_parent: If True, show diffs against second parent when not nullid.
1457 1462 Default is false, which always shows diff against p1.
1458 1463 opts: diff options to use for generating the patch.
1459 1464 match: If specified, only export changes to files matching this matcher.
1460 1465
1461 1466 Returns:
1462 1467 Nothing.
1463 1468
1464 1469 Side Effect:
1465 1470 "HG Changeset Patch" data is emitted to one of the following
1466 1471 destinations:
1467 1472 fp is specified: All revs are written to the specified
1468 1473 file-like object.
1469 1474 fntemplate specified: Each rev is written to a unique file named using
1470 1475 the given template.
1471 1476 Neither fp nor template specified: All revs written to repo.ui.write()
1472 1477 '''
1473 1478
1474 1479 total = len(revs)
1475 1480 revwidth = max(len(str(rev)) for rev in revs)
1476 1481 filemode = {}
1477 1482
1478 1483 write = None
1479 1484 dest = '<unnamed>'
1480 1485 if fp:
1481 1486 dest = getattr(fp, 'name', dest)
1482 1487 def write(s, **kw):
1483 1488 fp.write(s)
1484 1489 elif not fntemplate:
1485 1490 write = repo.ui.write
1486 1491
1487 1492 for seqno, rev in enumerate(revs, 1):
1488 1493 ctx = repo[rev]
1489 1494 fo = None
1490 1495 if not fp and fntemplate:
1491 1496 desc_lines = ctx.description().rstrip().split('\n')
1492 1497 desc = desc_lines[0] #Commit always has a first line.
1493 1498 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1494 1499 total=total, seqno=seqno, revwidth=revwidth,
1495 1500 mode='wb', modemap=filemode)
1496 1501 dest = fo.name
1497 1502 def write(s, **kw):
1498 1503 fo.write(s)
1499 1504 if not dest.startswith('<'):
1500 1505 repo.ui.note("%s\n" % dest)
1501 1506 _exportsingle(
1502 1507 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1503 1508 if fo is not None:
1504 1509 fo.close()
1505 1510
1506 1511 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1507 1512 changes=None, stat=False, fp=None, prefix='',
1508 1513 root='', listsubrepos=False):
1509 1514 '''show diff or diffstat.'''
1510 1515 if fp is None:
1511 1516 write = ui.write
1512 1517 else:
1513 1518 def write(s, **kw):
1514 1519 fp.write(s)
1515 1520
1516 1521 if root:
1517 1522 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1518 1523 else:
1519 1524 relroot = ''
1520 1525 if relroot != '':
1521 1526 # XXX relative roots currently don't work if the root is within a
1522 1527 # subrepo
1523 1528 uirelroot = match.uipath(relroot)
1524 1529 relroot += '/'
1525 1530 for matchroot in match.files():
1526 1531 if not matchroot.startswith(relroot):
1527 1532 ui.warn(_('warning: %s not inside relative root %s\n') % (
1528 1533 match.uipath(matchroot), uirelroot))
1529 1534
1530 1535 if stat:
1531 1536 diffopts = diffopts.copy(context=0)
1532 1537 width = 80
1533 1538 if not ui.plain():
1534 1539 width = ui.termwidth()
1535 1540 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1536 1541 prefix=prefix, relroot=relroot)
1537 1542 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1538 1543 width=width):
1539 1544 write(chunk, label=label)
1540 1545 else:
1541 1546 for chunk, label in patch.diffui(repo, node1, node2, match,
1542 1547 changes, diffopts, prefix=prefix,
1543 1548 relroot=relroot):
1544 1549 write(chunk, label=label)
1545 1550
1546 1551 if listsubrepos:
1547 1552 ctx1 = repo[node1]
1548 1553 ctx2 = repo[node2]
1549 1554 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1550 1555 tempnode2 = node2
1551 1556 try:
1552 1557 if node2 is not None:
1553 1558 tempnode2 = ctx2.substate[subpath][1]
1554 1559 except KeyError:
1555 1560 # A subrepo that existed in node1 was deleted between node1 and
1556 1561 # node2 (inclusive). Thus, ctx2's substate won't contain that
1557 1562 # subpath. The best we can do is to ignore it.
1558 1563 tempnode2 = None
1559 1564 submatch = matchmod.subdirmatcher(subpath, match)
1560 1565 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1561 1566 stat=stat, fp=fp, prefix=prefix)
1562 1567
1563 1568 def _changesetlabels(ctx):
1564 1569 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1565 1570 if ctx.obsolete():
1566 1571 labels.append('changeset.obsolete')
1567 1572 if ctx.isunstable():
1568 1573 labels.append('changeset.unstable')
1569 1574 for instability in ctx.instabilities():
1570 1575 labels.append('instability.%s' % instability)
1571 1576 return ' '.join(labels)
1572 1577
1573 1578 class changeset_printer(object):
1574 1579 '''show changeset information when templating not requested.'''
1575 1580
1576 1581 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1577 1582 self.ui = ui
1578 1583 self.repo = repo
1579 1584 self.buffered = buffered
1580 1585 self.matchfn = matchfn
1581 1586 self.diffopts = diffopts
1582 1587 self.header = {}
1583 1588 self.hunk = {}
1584 1589 self.lastheader = None
1585 1590 self.footer = None
1586 1591
1587 1592 def flush(self, ctx):
1588 1593 rev = ctx.rev()
1589 1594 if rev in self.header:
1590 1595 h = self.header[rev]
1591 1596 if h != self.lastheader:
1592 1597 self.lastheader = h
1593 1598 self.ui.write(h)
1594 1599 del self.header[rev]
1595 1600 if rev in self.hunk:
1596 1601 self.ui.write(self.hunk[rev])
1597 1602 del self.hunk[rev]
1598 1603 return 1
1599 1604 return 0
1600 1605
1601 1606 def close(self):
1602 1607 if self.footer:
1603 1608 self.ui.write(self.footer)
1604 1609
1605 1610 def show(self, ctx, copies=None, matchfn=None, **props):
1606 1611 props = pycompat.byteskwargs(props)
1607 1612 if self.buffered:
1608 1613 self.ui.pushbuffer(labeled=True)
1609 1614 self._show(ctx, copies, matchfn, props)
1610 1615 self.hunk[ctx.rev()] = self.ui.popbuffer()
1611 1616 else:
1612 1617 self._show(ctx, copies, matchfn, props)
1613 1618
1614 1619 def _show(self, ctx, copies, matchfn, props):
1615 1620 '''show a single changeset or file revision'''
1616 1621 changenode = ctx.node()
1617 1622 rev = ctx.rev()
1618 1623
1619 1624 if self.ui.quiet:
1620 1625 self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
1621 1626 label='log.node')
1622 1627 return
1623 1628
1624 1629 date = util.datestr(ctx.date())
1625 1630
1626 1631 # i18n: column positioning for "hg log"
1627 1632 self.ui.write(_("changeset: %s\n") % scmutil.formatchangeid(ctx),
1628 1633 label=_changesetlabels(ctx))
1629 1634
1630 1635 # branches are shown first before any other names due to backwards
1631 1636 # compatibility
1632 1637 branch = ctx.branch()
1633 1638 # don't show the default branch name
1634 1639 if branch != 'default':
1635 1640 # i18n: column positioning for "hg log"
1636 1641 self.ui.write(_("branch: %s\n") % branch,
1637 1642 label='log.branch')
1638 1643
1639 1644 for nsname, ns in self.repo.names.iteritems():
1640 1645 # branches has special logic already handled above, so here we just
1641 1646 # skip it
1642 1647 if nsname == 'branches':
1643 1648 continue
1644 1649 # we will use the templatename as the color name since those two
1645 1650 # should be the same
1646 1651 for name in ns.names(self.repo, changenode):
1647 1652 self.ui.write(ns.logfmt % name,
1648 1653 label='log.%s' % ns.colorname)
1649 1654 if self.ui.debugflag:
1650 1655 # i18n: column positioning for "hg log"
1651 1656 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1652 1657 label='log.phase')
1653 1658 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1654 1659 label = 'log.parent changeset.%s' % pctx.phasestr()
1655 1660 # i18n: column positioning for "hg log"
1656 1661 self.ui.write(_("parent: %s\n") % scmutil.formatchangeid(pctx),
1657 1662 label=label)
1658 1663
1659 1664 if self.ui.debugflag and rev is not None:
1660 1665 mnode = ctx.manifestnode()
1661 1666 mrev = self.repo.manifestlog._revlog.rev(mnode)
1662 1667 # i18n: column positioning for "hg log"
1663 1668 self.ui.write(_("manifest: %s\n")
1664 1669 % scmutil.formatrevnode(self.ui, mrev, mnode),
1665 1670 label='ui.debug log.manifest')
1666 1671 # i18n: column positioning for "hg log"
1667 1672 self.ui.write(_("user: %s\n") % ctx.user(),
1668 1673 label='log.user')
1669 1674 # i18n: column positioning for "hg log"
1670 1675 self.ui.write(_("date: %s\n") % date,
1671 1676 label='log.date')
1672 1677
1673 1678 if ctx.isunstable():
1674 1679 # i18n: column positioning for "hg log"
1675 1680 instabilities = ctx.instabilities()
1676 1681 self.ui.write(_("instability: %s\n") % ', '.join(instabilities),
1677 1682 label='log.instability')
1678 1683
1679 1684 self._exthook(ctx)
1680 1685
1681 1686 if self.ui.debugflag:
1682 1687 files = ctx.p1().status(ctx)[:3]
1683 1688 for key, value in zip([# i18n: column positioning for "hg log"
1684 1689 _("files:"),
1685 1690 # i18n: column positioning for "hg log"
1686 1691 _("files+:"),
1687 1692 # i18n: column positioning for "hg log"
1688 1693 _("files-:")], files):
1689 1694 if value:
1690 1695 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1691 1696 label='ui.debug log.files')
1692 1697 elif ctx.files() and self.ui.verbose:
1693 1698 # i18n: column positioning for "hg log"
1694 1699 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1695 1700 label='ui.note log.files')
1696 1701 if copies and self.ui.verbose:
1697 1702 copies = ['%s (%s)' % c for c in copies]
1698 1703 # i18n: column positioning for "hg log"
1699 1704 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1700 1705 label='ui.note log.copies')
1701 1706
1702 1707 extra = ctx.extra()
1703 1708 if extra and self.ui.debugflag:
1704 1709 for key, value in sorted(extra.items()):
1705 1710 # i18n: column positioning for "hg log"
1706 1711 self.ui.write(_("extra: %s=%s\n")
1707 1712 % (key, util.escapestr(value)),
1708 1713 label='ui.debug log.extra')
1709 1714
1710 1715 description = ctx.description().strip()
1711 1716 if description:
1712 1717 if self.ui.verbose:
1713 1718 self.ui.write(_("description:\n"),
1714 1719 label='ui.note log.description')
1715 1720 self.ui.write(description,
1716 1721 label='ui.note log.description')
1717 1722 self.ui.write("\n\n")
1718 1723 else:
1719 1724 # i18n: column positioning for "hg log"
1720 1725 self.ui.write(_("summary: %s\n") %
1721 1726 description.splitlines()[0],
1722 1727 label='log.summary')
1723 1728 self.ui.write("\n")
1724 1729
1725 1730 self.showpatch(ctx, matchfn)
1726 1731
1727 1732 def _exthook(self, ctx):
1728 1733 '''empty method used by extension as a hook point
1729 1734 '''
1730 1735
1731 1736 def showpatch(self, ctx, matchfn):
1732 1737 if not matchfn:
1733 1738 matchfn = self.matchfn
1734 1739 if matchfn:
1735 1740 stat = self.diffopts.get('stat')
1736 1741 diff = self.diffopts.get('patch')
1737 1742 diffopts = patch.diffallopts(self.ui, self.diffopts)
1738 1743 node = ctx.node()
1739 1744 prev = ctx.p1().node()
1740 1745 if stat:
1741 1746 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1742 1747 match=matchfn, stat=True)
1743 1748 if diff:
1744 1749 if stat:
1745 1750 self.ui.write("\n")
1746 1751 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1747 1752 match=matchfn, stat=False)
1748 1753 self.ui.write("\n")
1749 1754
1750 1755 class jsonchangeset(changeset_printer):
1751 1756 '''format changeset information.'''
1752 1757
1753 1758 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1754 1759 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1755 1760 self.cache = {}
1756 1761 self._first = True
1757 1762
1758 1763 def close(self):
1759 1764 if not self._first:
1760 1765 self.ui.write("\n]\n")
1761 1766 else:
1762 1767 self.ui.write("[]\n")
1763 1768
1764 1769 def _show(self, ctx, copies, matchfn, props):
1765 1770 '''show a single changeset or file revision'''
1766 1771 rev = ctx.rev()
1767 1772 if rev is None:
1768 1773 jrev = jnode = 'null'
1769 1774 else:
1770 1775 jrev = '%d' % rev
1771 1776 jnode = '"%s"' % hex(ctx.node())
1772 1777 j = encoding.jsonescape
1773 1778
1774 1779 if self._first:
1775 1780 self.ui.write("[\n {")
1776 1781 self._first = False
1777 1782 else:
1778 1783 self.ui.write(",\n {")
1779 1784
1780 1785 if self.ui.quiet:
1781 1786 self.ui.write(('\n "rev": %s') % jrev)
1782 1787 self.ui.write((',\n "node": %s') % jnode)
1783 1788 self.ui.write('\n }')
1784 1789 return
1785 1790
1786 1791 self.ui.write(('\n "rev": %s') % jrev)
1787 1792 self.ui.write((',\n "node": %s') % jnode)
1788 1793 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1789 1794 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1790 1795 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1791 1796 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1792 1797 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1793 1798
1794 1799 self.ui.write((',\n "bookmarks": [%s]') %
1795 1800 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1796 1801 self.ui.write((',\n "tags": [%s]') %
1797 1802 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1798 1803 self.ui.write((',\n "parents": [%s]') %
1799 1804 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1800 1805
1801 1806 if self.ui.debugflag:
1802 1807 if rev is None:
1803 1808 jmanifestnode = 'null'
1804 1809 else:
1805 1810 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1806 1811 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1807 1812
1808 1813 self.ui.write((',\n "extra": {%s}') %
1809 1814 ", ".join('"%s": "%s"' % (j(k), j(v))
1810 1815 for k, v in ctx.extra().items()))
1811 1816
1812 1817 files = ctx.p1().status(ctx)
1813 1818 self.ui.write((',\n "modified": [%s]') %
1814 1819 ", ".join('"%s"' % j(f) for f in files[0]))
1815 1820 self.ui.write((',\n "added": [%s]') %
1816 1821 ", ".join('"%s"' % j(f) for f in files[1]))
1817 1822 self.ui.write((',\n "removed": [%s]') %
1818 1823 ", ".join('"%s"' % j(f) for f in files[2]))
1819 1824
1820 1825 elif self.ui.verbose:
1821 1826 self.ui.write((',\n "files": [%s]') %
1822 1827 ", ".join('"%s"' % j(f) for f in ctx.files()))
1823 1828
1824 1829 if copies:
1825 1830 self.ui.write((',\n "copies": {%s}') %
1826 1831 ", ".join('"%s": "%s"' % (j(k), j(v))
1827 1832 for k, v in copies))
1828 1833
1829 1834 matchfn = self.matchfn
1830 1835 if matchfn:
1831 1836 stat = self.diffopts.get('stat')
1832 1837 diff = self.diffopts.get('patch')
1833 1838 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1834 1839 node, prev = ctx.node(), ctx.p1().node()
1835 1840 if stat:
1836 1841 self.ui.pushbuffer()
1837 1842 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1838 1843 match=matchfn, stat=True)
1839 1844 self.ui.write((',\n "diffstat": "%s"')
1840 1845 % j(self.ui.popbuffer()))
1841 1846 if diff:
1842 1847 self.ui.pushbuffer()
1843 1848 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1844 1849 match=matchfn, stat=False)
1845 1850 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1846 1851
1847 1852 self.ui.write("\n }")
1848 1853
1849 1854 class changeset_templater(changeset_printer):
1850 1855 '''format changeset information.'''
1851 1856
1852 1857 # Arguments before "buffered" used to be positional. Consider not
1853 1858 # adding/removing arguments before "buffered" to not break callers.
1854 1859 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1855 1860 buffered=False):
1856 1861 diffopts = diffopts or {}
1857 1862
1858 1863 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1859 1864 self.t = formatter.loadtemplater(ui, tmplspec,
1860 1865 cache=templatekw.defaulttempl)
1861 1866 self._counter = itertools.count()
1862 1867 self.cache = {}
1863 1868
1864 1869 self._tref = tmplspec.ref
1865 1870 self._parts = {'header': '', 'footer': '',
1866 1871 tmplspec.ref: tmplspec.ref,
1867 1872 'docheader': '', 'docfooter': '',
1868 1873 'separator': ''}
1869 1874 if tmplspec.mapfile:
1870 1875 # find correct templates for current mode, for backward
1871 1876 # compatibility with 'log -v/-q/--debug' using a mapfile
1872 1877 tmplmodes = [
1873 1878 (True, ''),
1874 1879 (self.ui.verbose, '_verbose'),
1875 1880 (self.ui.quiet, '_quiet'),
1876 1881 (self.ui.debugflag, '_debug'),
1877 1882 ]
1878 1883 for mode, postfix in tmplmodes:
1879 1884 for t in self._parts:
1880 1885 cur = t + postfix
1881 1886 if mode and cur in self.t:
1882 1887 self._parts[t] = cur
1883 1888 else:
1884 1889 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1885 1890 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1886 1891 self._parts.update(m)
1887 1892
1888 1893 if self._parts['docheader']:
1889 1894 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1890 1895
1891 1896 def close(self):
1892 1897 if self._parts['docfooter']:
1893 1898 if not self.footer:
1894 1899 self.footer = ""
1895 1900 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1896 1901 return super(changeset_templater, self).close()
1897 1902
1898 1903 def _show(self, ctx, copies, matchfn, props):
1899 1904 '''show a single changeset or file revision'''
1900 1905 props = props.copy()
1901 1906 props.update(templatekw.keywords)
1902 1907 props['templ'] = self.t
1903 1908 props['ctx'] = ctx
1904 1909 props['repo'] = self.repo
1905 1910 props['ui'] = self.repo.ui
1906 1911 props['index'] = index = next(self._counter)
1907 1912 props['revcache'] = {'copies': copies}
1908 1913 props['cache'] = self.cache
1909 1914 props = pycompat.strkwargs(props)
1910 1915
1911 1916 # write separator, which wouldn't work well with the header part below
1912 1917 # since there's inherently a conflict between header (across items) and
1913 1918 # separator (per item)
1914 1919 if self._parts['separator'] and index > 0:
1915 1920 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1916 1921
1917 1922 # write header
1918 1923 if self._parts['header']:
1919 1924 h = templater.stringify(self.t(self._parts['header'], **props))
1920 1925 if self.buffered:
1921 1926 self.header[ctx.rev()] = h
1922 1927 else:
1923 1928 if self.lastheader != h:
1924 1929 self.lastheader = h
1925 1930 self.ui.write(h)
1926 1931
1927 1932 # write changeset metadata, then patch if requested
1928 1933 key = self._parts[self._tref]
1929 1934 self.ui.write(templater.stringify(self.t(key, **props)))
1930 1935 self.showpatch(ctx, matchfn)
1931 1936
1932 1937 if self._parts['footer']:
1933 1938 if not self.footer:
1934 1939 self.footer = templater.stringify(
1935 1940 self.t(self._parts['footer'], **props))
1936 1941
1937 1942 def logtemplatespec(tmpl, mapfile):
1938 1943 if mapfile:
1939 1944 return formatter.templatespec('changeset', tmpl, mapfile)
1940 1945 else:
1941 1946 return formatter.templatespec('', tmpl, None)
1942 1947
1943 1948 def _lookuplogtemplate(ui, tmpl, style):
1944 1949 """Find the template matching the given template spec or style
1945 1950
1946 1951 See formatter.lookuptemplate() for details.
1947 1952 """
1948 1953
1949 1954 # ui settings
1950 1955 if not tmpl and not style: # template are stronger than style
1951 1956 tmpl = ui.config('ui', 'logtemplate')
1952 1957 if tmpl:
1953 1958 return logtemplatespec(templater.unquotestring(tmpl), None)
1954 1959 else:
1955 1960 style = util.expandpath(ui.config('ui', 'style'))
1956 1961
1957 1962 if not tmpl and style:
1958 1963 mapfile = style
1959 1964 if not os.path.split(mapfile)[0]:
1960 1965 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1961 1966 or templater.templatepath(mapfile))
1962 1967 if mapname:
1963 1968 mapfile = mapname
1964 1969 return logtemplatespec(None, mapfile)
1965 1970
1966 1971 if not tmpl:
1967 1972 return logtemplatespec(None, None)
1968 1973
1969 1974 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1970 1975
1971 1976 def makelogtemplater(ui, repo, tmpl, buffered=False):
1972 1977 """Create a changeset_templater from a literal template 'tmpl'"""
1973 1978 spec = logtemplatespec(tmpl, None)
1974 1979 return changeset_templater(ui, repo, spec, buffered=buffered)
1975 1980
1976 1981 def show_changeset(ui, repo, opts, buffered=False):
1977 1982 """show one changeset using template or regular display.
1978 1983
1979 1984 Display format will be the first non-empty hit of:
1980 1985 1. option 'template'
1981 1986 2. option 'style'
1982 1987 3. [ui] setting 'logtemplate'
1983 1988 4. [ui] setting 'style'
1984 1989 If all of these values are either the unset or the empty string,
1985 1990 regular display via changeset_printer() is done.
1986 1991 """
1987 1992 # options
1988 1993 match = None
1989 1994 if opts.get('patch') or opts.get('stat'):
1990 1995 match = scmutil.matchall(repo)
1991 1996
1992 1997 if opts.get('template') == 'json':
1993 1998 return jsonchangeset(ui, repo, match, opts, buffered)
1994 1999
1995 2000 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1996 2001
1997 2002 if not spec.ref and not spec.tmpl and not spec.mapfile:
1998 2003 return changeset_printer(ui, repo, match, opts, buffered)
1999 2004
2000 2005 return changeset_templater(ui, repo, spec, match, opts, buffered)
2001 2006
2002 2007 def showmarker(fm, marker, index=None):
2003 2008 """utility function to display obsolescence marker in a readable way
2004 2009
2005 2010 To be used by debug function."""
2006 2011 if index is not None:
2007 2012 fm.write('index', '%i ', index)
2008 2013 fm.write('prednode', '%s ', hex(marker.prednode()))
2009 2014 succs = marker.succnodes()
2010 2015 fm.condwrite(succs, 'succnodes', '%s ',
2011 2016 fm.formatlist(map(hex, succs), name='node'))
2012 2017 fm.write('flag', '%X ', marker.flags())
2013 2018 parents = marker.parentnodes()
2014 2019 if parents is not None:
2015 2020 fm.write('parentnodes', '{%s} ',
2016 2021 fm.formatlist(map(hex, parents), name='node', sep=', '))
2017 2022 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
2018 2023 meta = marker.metadata().copy()
2019 2024 meta.pop('date', None)
2020 2025 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
2021 2026 fm.plain('\n')
2022 2027
2023 2028 def finddate(ui, repo, date):
2024 2029 """Find the tipmost changeset that matches the given date spec"""
2025 2030
2026 2031 df = util.matchdate(date)
2027 2032 m = scmutil.matchall(repo)
2028 2033 results = {}
2029 2034
2030 2035 def prep(ctx, fns):
2031 2036 d = ctx.date()
2032 2037 if df(d[0]):
2033 2038 results[ctx.rev()] = d
2034 2039
2035 2040 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
2036 2041 rev = ctx.rev()
2037 2042 if rev in results:
2038 2043 ui.status(_("found revision %s from %s\n") %
2039 2044 (rev, util.datestr(results[rev])))
2040 2045 return '%d' % rev
2041 2046
2042 2047 raise error.Abort(_("revision matching date not found"))
2043 2048
2044 2049 def increasingwindows(windowsize=8, sizelimit=512):
2045 2050 while True:
2046 2051 yield windowsize
2047 2052 if windowsize < sizelimit:
2048 2053 windowsize *= 2
2049 2054
2050 2055 class FileWalkError(Exception):
2051 2056 pass
2052 2057
2053 2058 def walkfilerevs(repo, match, follow, revs, fncache):
2054 2059 '''Walks the file history for the matched files.
2055 2060
2056 2061 Returns the changeset revs that are involved in the file history.
2057 2062
2058 2063 Throws FileWalkError if the file history can't be walked using
2059 2064 filelogs alone.
2060 2065 '''
2061 2066 wanted = set()
2062 2067 copies = []
2063 2068 minrev, maxrev = min(revs), max(revs)
2064 2069 def filerevgen(filelog, last):
2065 2070 """
2066 2071 Only files, no patterns. Check the history of each file.
2067 2072
2068 2073 Examines filelog entries within minrev, maxrev linkrev range
2069 2074 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2070 2075 tuples in backwards order
2071 2076 """
2072 2077 cl_count = len(repo)
2073 2078 revs = []
2074 2079 for j in xrange(0, last + 1):
2075 2080 linkrev = filelog.linkrev(j)
2076 2081 if linkrev < minrev:
2077 2082 continue
2078 2083 # only yield rev for which we have the changelog, it can
2079 2084 # happen while doing "hg log" during a pull or commit
2080 2085 if linkrev >= cl_count:
2081 2086 break
2082 2087
2083 2088 parentlinkrevs = []
2084 2089 for p in filelog.parentrevs(j):
2085 2090 if p != nullrev:
2086 2091 parentlinkrevs.append(filelog.linkrev(p))
2087 2092 n = filelog.node(j)
2088 2093 revs.append((linkrev, parentlinkrevs,
2089 2094 follow and filelog.renamed(n)))
2090 2095
2091 2096 return reversed(revs)
2092 2097 def iterfiles():
2093 2098 pctx = repo['.']
2094 2099 for filename in match.files():
2095 2100 if follow:
2096 2101 if filename not in pctx:
2097 2102 raise error.Abort(_('cannot follow file not in parent '
2098 2103 'revision: "%s"') % filename)
2099 2104 yield filename, pctx[filename].filenode()
2100 2105 else:
2101 2106 yield filename, None
2102 2107 for filename_node in copies:
2103 2108 yield filename_node
2104 2109
2105 2110 for file_, node in iterfiles():
2106 2111 filelog = repo.file(file_)
2107 2112 if not len(filelog):
2108 2113 if node is None:
2109 2114 # A zero count may be a directory or deleted file, so
2110 2115 # try to find matching entries on the slow path.
2111 2116 if follow:
2112 2117 raise error.Abort(
2113 2118 _('cannot follow nonexistent file: "%s"') % file_)
2114 2119 raise FileWalkError("Cannot walk via filelog")
2115 2120 else:
2116 2121 continue
2117 2122
2118 2123 if node is None:
2119 2124 last = len(filelog) - 1
2120 2125 else:
2121 2126 last = filelog.rev(node)
2122 2127
2123 2128 # keep track of all ancestors of the file
2124 2129 ancestors = {filelog.linkrev(last)}
2125 2130
2126 2131 # iterate from latest to oldest revision
2127 2132 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2128 2133 if not follow:
2129 2134 if rev > maxrev:
2130 2135 continue
2131 2136 else:
2132 2137 # Note that last might not be the first interesting
2133 2138 # rev to us:
2134 2139 # if the file has been changed after maxrev, we'll
2135 2140 # have linkrev(last) > maxrev, and we still need
2136 2141 # to explore the file graph
2137 2142 if rev not in ancestors:
2138 2143 continue
2139 2144 # XXX insert 1327 fix here
2140 2145 if flparentlinkrevs:
2141 2146 ancestors.update(flparentlinkrevs)
2142 2147
2143 2148 fncache.setdefault(rev, []).append(file_)
2144 2149 wanted.add(rev)
2145 2150 if copied:
2146 2151 copies.append(copied)
2147 2152
2148 2153 return wanted
2149 2154
2150 2155 class _followfilter(object):
2151 2156 def __init__(self, repo, onlyfirst=False):
2152 2157 self.repo = repo
2153 2158 self.startrev = nullrev
2154 2159 self.roots = set()
2155 2160 self.onlyfirst = onlyfirst
2156 2161
2157 2162 def match(self, rev):
2158 2163 def realparents(rev):
2159 2164 if self.onlyfirst:
2160 2165 return self.repo.changelog.parentrevs(rev)[0:1]
2161 2166 else:
2162 2167 return filter(lambda x: x != nullrev,
2163 2168 self.repo.changelog.parentrevs(rev))
2164 2169
2165 2170 if self.startrev == nullrev:
2166 2171 self.startrev = rev
2167 2172 return True
2168 2173
2169 2174 if rev > self.startrev:
2170 2175 # forward: all descendants
2171 2176 if not self.roots:
2172 2177 self.roots.add(self.startrev)
2173 2178 for parent in realparents(rev):
2174 2179 if parent in self.roots:
2175 2180 self.roots.add(rev)
2176 2181 return True
2177 2182 else:
2178 2183 # backwards: all parents
2179 2184 if not self.roots:
2180 2185 self.roots.update(realparents(self.startrev))
2181 2186 if rev in self.roots:
2182 2187 self.roots.remove(rev)
2183 2188 self.roots.update(realparents(rev))
2184 2189 return True
2185 2190
2186 2191 return False
2187 2192
2188 2193 def walkchangerevs(repo, match, opts, prepare):
2189 2194 '''Iterate over files and the revs in which they changed.
2190 2195
2191 2196 Callers most commonly need to iterate backwards over the history
2192 2197 in which they are interested. Doing so has awful (quadratic-looking)
2193 2198 performance, so we use iterators in a "windowed" way.
2194 2199
2195 2200 We walk a window of revisions in the desired order. Within the
2196 2201 window, we first walk forwards to gather data, then in the desired
2197 2202 order (usually backwards) to display it.
2198 2203
2199 2204 This function returns an iterator yielding contexts. Before
2200 2205 yielding each context, the iterator will first call the prepare
2201 2206 function on each context in the window in forward order.'''
2202 2207
2203 2208 follow = opts.get('follow') or opts.get('follow_first')
2204 2209 revs = _logrevs(repo, opts)
2205 2210 if not revs:
2206 2211 return []
2207 2212 wanted = set()
2208 2213 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2209 2214 opts.get('removed'))
2210 2215 fncache = {}
2211 2216 change = repo.changectx
2212 2217
2213 2218 # First step is to fill wanted, the set of revisions that we want to yield.
2214 2219 # When it does not induce extra cost, we also fill fncache for revisions in
2215 2220 # wanted: a cache of filenames that were changed (ctx.files()) and that
2216 2221 # match the file filtering conditions.
2217 2222
2218 2223 if match.always():
2219 2224 # No files, no patterns. Display all revs.
2220 2225 wanted = revs
2221 2226 elif not slowpath:
2222 2227 # We only have to read through the filelog to find wanted revisions
2223 2228
2224 2229 try:
2225 2230 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2226 2231 except FileWalkError:
2227 2232 slowpath = True
2228 2233
2229 2234 # We decided to fall back to the slowpath because at least one
2230 2235 # of the paths was not a file. Check to see if at least one of them
2231 2236 # existed in history, otherwise simply return
2232 2237 for path in match.files():
2233 2238 if path == '.' or path in repo.store:
2234 2239 break
2235 2240 else:
2236 2241 return []
2237 2242
2238 2243 if slowpath:
2239 2244 # We have to read the changelog to match filenames against
2240 2245 # changed files
2241 2246
2242 2247 if follow:
2243 2248 raise error.Abort(_('can only follow copies/renames for explicit '
2244 2249 'filenames'))
2245 2250
2246 2251 # The slow path checks files modified in every changeset.
2247 2252 # This is really slow on large repos, so compute the set lazily.
2248 2253 class lazywantedset(object):
2249 2254 def __init__(self):
2250 2255 self.set = set()
2251 2256 self.revs = set(revs)
2252 2257
2253 2258 # No need to worry about locality here because it will be accessed
2254 2259 # in the same order as the increasing window below.
2255 2260 def __contains__(self, value):
2256 2261 if value in self.set:
2257 2262 return True
2258 2263 elif not value in self.revs:
2259 2264 return False
2260 2265 else:
2261 2266 self.revs.discard(value)
2262 2267 ctx = change(value)
2263 2268 matches = filter(match, ctx.files())
2264 2269 if matches:
2265 2270 fncache[value] = matches
2266 2271 self.set.add(value)
2267 2272 return True
2268 2273 return False
2269 2274
2270 2275 def discard(self, value):
2271 2276 self.revs.discard(value)
2272 2277 self.set.discard(value)
2273 2278
2274 2279 wanted = lazywantedset()
2275 2280
2276 2281 # it might be worthwhile to do this in the iterator if the rev range
2277 2282 # is descending and the prune args are all within that range
2278 2283 for rev in opts.get('prune', ()):
2279 2284 rev = repo[rev].rev()
2280 2285 ff = _followfilter(repo)
2281 2286 stop = min(revs[0], revs[-1])
2282 2287 for x in xrange(rev, stop - 1, -1):
2283 2288 if ff.match(x):
2284 2289 wanted = wanted - [x]
2285 2290
2286 2291 # Now that wanted is correctly initialized, we can iterate over the
2287 2292 # revision range, yielding only revisions in wanted.
2288 2293 def iterate():
2289 2294 if follow and match.always():
2290 2295 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2291 2296 def want(rev):
2292 2297 return ff.match(rev) and rev in wanted
2293 2298 else:
2294 2299 def want(rev):
2295 2300 return rev in wanted
2296 2301
2297 2302 it = iter(revs)
2298 2303 stopiteration = False
2299 2304 for windowsize in increasingwindows():
2300 2305 nrevs = []
2301 2306 for i in xrange(windowsize):
2302 2307 rev = next(it, None)
2303 2308 if rev is None:
2304 2309 stopiteration = True
2305 2310 break
2306 2311 elif want(rev):
2307 2312 nrevs.append(rev)
2308 2313 for rev in sorted(nrevs):
2309 2314 fns = fncache.get(rev)
2310 2315 ctx = change(rev)
2311 2316 if not fns:
2312 2317 def fns_generator():
2313 2318 for f in ctx.files():
2314 2319 if match(f):
2315 2320 yield f
2316 2321 fns = fns_generator()
2317 2322 prepare(ctx, fns)
2318 2323 for rev in nrevs:
2319 2324 yield change(rev)
2320 2325
2321 2326 if stopiteration:
2322 2327 break
2323 2328
2324 2329 return iterate()
2325 2330
2326 2331 def _makefollowlogfilematcher(repo, files, followfirst):
2327 2332 # When displaying a revision with --patch --follow FILE, we have
2328 2333 # to know which file of the revision must be diffed. With
2329 2334 # --follow, we want the names of the ancestors of FILE in the
2330 2335 # revision, stored in "fcache". "fcache" is populated by
2331 2336 # reproducing the graph traversal already done by --follow revset
2332 2337 # and relating revs to file names (which is not "correct" but
2333 2338 # good enough).
2334 2339 fcache = {}
2335 2340 fcacheready = [False]
2336 2341 pctx = repo['.']
2337 2342
2338 2343 def populate():
2339 2344 for fn in files:
2340 2345 fctx = pctx[fn]
2341 2346 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2342 2347 for c in fctx.ancestors(followfirst=followfirst):
2343 2348 fcache.setdefault(c.rev(), set()).add(c.path())
2344 2349
2345 2350 def filematcher(rev):
2346 2351 if not fcacheready[0]:
2347 2352 # Lazy initialization
2348 2353 fcacheready[0] = True
2349 2354 populate()
2350 2355 return scmutil.matchfiles(repo, fcache.get(rev, []))
2351 2356
2352 2357 return filematcher
2353 2358
2354 2359 def _makenofollowlogfilematcher(repo, pats, opts):
2355 2360 '''hook for extensions to override the filematcher for non-follow cases'''
2356 2361 return None
2357 2362
2358 2363 def _makelogrevset(repo, pats, opts, revs):
2359 2364 """Return (expr, filematcher) where expr is a revset string built
2360 2365 from log options and file patterns or None. If --stat or --patch
2361 2366 are not passed filematcher is None. Otherwise it is a callable
2362 2367 taking a revision number and returning a match objects filtering
2363 2368 the files to be detailed when displaying the revision.
2364 2369 """
2365 2370 opt2revset = {
2366 2371 'no_merges': ('not merge()', None),
2367 2372 'only_merges': ('merge()', None),
2368 2373 '_ancestors': ('ancestors(%(val)s)', None),
2369 2374 '_fancestors': ('_firstancestors(%(val)s)', None),
2370 2375 '_descendants': ('descendants(%(val)s)', None),
2371 2376 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2372 2377 '_matchfiles': ('_matchfiles(%(val)s)', None),
2373 2378 'date': ('date(%(val)r)', None),
2374 2379 'branch': ('branch(%(val)r)', ' or '),
2375 2380 '_patslog': ('filelog(%(val)r)', ' or '),
2376 2381 '_patsfollow': ('follow(%(val)r)', ' or '),
2377 2382 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2378 2383 'keyword': ('keyword(%(val)r)', ' or '),
2379 2384 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2380 2385 'user': ('user(%(val)r)', ' or '),
2381 2386 }
2382 2387
2383 2388 opts = dict(opts)
2384 2389 # follow or not follow?
2385 2390 follow = opts.get('follow') or opts.get('follow_first')
2386 2391 if opts.get('follow_first'):
2387 2392 followfirst = 1
2388 2393 else:
2389 2394 followfirst = 0
2390 2395 # --follow with FILE behavior depends on revs...
2391 2396 it = iter(revs)
2392 2397 startrev = next(it)
2393 2398 followdescendants = startrev < next(it, startrev)
2394 2399
2395 2400 # branch and only_branch are really aliases and must be handled at
2396 2401 # the same time
2397 2402 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2398 2403 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2399 2404 # pats/include/exclude are passed to match.match() directly in
2400 2405 # _matchfiles() revset but walkchangerevs() builds its matcher with
2401 2406 # scmutil.match(). The difference is input pats are globbed on
2402 2407 # platforms without shell expansion (windows).
2403 2408 wctx = repo[None]
2404 2409 match, pats = scmutil.matchandpats(wctx, pats, opts)
2405 2410 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2406 2411 opts.get('removed'))
2407 2412 if not slowpath:
2408 2413 for f in match.files():
2409 2414 if follow and f not in wctx:
2410 2415 # If the file exists, it may be a directory, so let it
2411 2416 # take the slow path.
2412 2417 if os.path.exists(repo.wjoin(f)):
2413 2418 slowpath = True
2414 2419 continue
2415 2420 else:
2416 2421 raise error.Abort(_('cannot follow file not in parent '
2417 2422 'revision: "%s"') % f)
2418 2423 filelog = repo.file(f)
2419 2424 if not filelog:
2420 2425 # A zero count may be a directory or deleted file, so
2421 2426 # try to find matching entries on the slow path.
2422 2427 if follow:
2423 2428 raise error.Abort(
2424 2429 _('cannot follow nonexistent file: "%s"') % f)
2425 2430 slowpath = True
2426 2431
2427 2432 # We decided to fall back to the slowpath because at least one
2428 2433 # of the paths was not a file. Check to see if at least one of them
2429 2434 # existed in history - in that case, we'll continue down the
2430 2435 # slowpath; otherwise, we can turn off the slowpath
2431 2436 if slowpath:
2432 2437 for path in match.files():
2433 2438 if path == '.' or path in repo.store:
2434 2439 break
2435 2440 else:
2436 2441 slowpath = False
2437 2442
2438 2443 fpats = ('_patsfollow', '_patsfollowfirst')
2439 2444 fnopats = (('_ancestors', '_fancestors'),
2440 2445 ('_descendants', '_fdescendants'))
2441 2446 if slowpath:
2442 2447 # See walkchangerevs() slow path.
2443 2448 #
2444 2449 # pats/include/exclude cannot be represented as separate
2445 2450 # revset expressions as their filtering logic applies at file
2446 2451 # level. For instance "-I a -X a" matches a revision touching
2447 2452 # "a" and "b" while "file(a) and not file(b)" does
2448 2453 # not. Besides, filesets are evaluated against the working
2449 2454 # directory.
2450 2455 matchargs = ['r:', 'd:relpath']
2451 2456 for p in pats:
2452 2457 matchargs.append('p:' + p)
2453 2458 for p in opts.get('include', []):
2454 2459 matchargs.append('i:' + p)
2455 2460 for p in opts.get('exclude', []):
2456 2461 matchargs.append('x:' + p)
2457 2462 matchargs = ','.join(('%r' % p) for p in matchargs)
2458 2463 opts['_matchfiles'] = matchargs
2459 2464 if follow:
2460 2465 opts[fnopats[0][followfirst]] = '.'
2461 2466 else:
2462 2467 if follow:
2463 2468 if pats:
2464 2469 # follow() revset interprets its file argument as a
2465 2470 # manifest entry, so use match.files(), not pats.
2466 2471 opts[fpats[followfirst]] = list(match.files())
2467 2472 else:
2468 2473 op = fnopats[followdescendants][followfirst]
2469 2474 opts[op] = 'rev(%d)' % startrev
2470 2475 else:
2471 2476 opts['_patslog'] = list(pats)
2472 2477
2473 2478 filematcher = None
2474 2479 if opts.get('patch') or opts.get('stat'):
2475 2480 # When following files, track renames via a special matcher.
2476 2481 # If we're forced to take the slowpath it means we're following
2477 2482 # at least one pattern/directory, so don't bother with rename tracking.
2478 2483 if follow and not match.always() and not slowpath:
2479 2484 # _makefollowlogfilematcher expects its files argument to be
2480 2485 # relative to the repo root, so use match.files(), not pats.
2481 2486 filematcher = _makefollowlogfilematcher(repo, match.files(),
2482 2487 followfirst)
2483 2488 else:
2484 2489 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2485 2490 if filematcher is None:
2486 2491 filematcher = lambda rev: match
2487 2492
2488 2493 expr = []
2489 2494 for op, val in sorted(opts.iteritems()):
2490 2495 if not val:
2491 2496 continue
2492 2497 if op not in opt2revset:
2493 2498 continue
2494 2499 revop, andor = opt2revset[op]
2495 2500 if '%(val)' not in revop:
2496 2501 expr.append(revop)
2497 2502 else:
2498 2503 if not isinstance(val, list):
2499 2504 e = revop % {'val': val}
2500 2505 else:
2501 2506 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2502 2507 expr.append(e)
2503 2508
2504 2509 if expr:
2505 2510 expr = '(' + ' and '.join(expr) + ')'
2506 2511 else:
2507 2512 expr = None
2508 2513 return expr, filematcher
2509 2514
2510 2515 def _logrevs(repo, opts):
2511 2516 # Default --rev value depends on --follow but --follow behavior
2512 2517 # depends on revisions resolved from --rev...
2513 2518 follow = opts.get('follow') or opts.get('follow_first')
2514 2519 if opts.get('rev'):
2515 2520 revs = scmutil.revrange(repo, opts['rev'])
2516 2521 elif follow and repo.dirstate.p1() == nullid:
2517 2522 revs = smartset.baseset()
2518 2523 elif follow:
2519 2524 revs = repo.revs('reverse(:.)')
2520 2525 else:
2521 2526 revs = smartset.spanset(repo)
2522 2527 revs.reverse()
2523 2528 return revs
2524 2529
2525 2530 def getgraphlogrevs(repo, pats, opts):
2526 2531 """Return (revs, expr, filematcher) where revs is an iterable of
2527 2532 revision numbers, expr is a revset string built from log options
2528 2533 and file patterns or None, and used to filter 'revs'. If --stat or
2529 2534 --patch are not passed filematcher is None. Otherwise it is a
2530 2535 callable taking a revision number and returning a match objects
2531 2536 filtering the files to be detailed when displaying the revision.
2532 2537 """
2533 2538 limit = loglimit(opts)
2534 2539 revs = _logrevs(repo, opts)
2535 2540 if not revs:
2536 2541 return smartset.baseset(), None, None
2537 2542 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2538 2543 if opts.get('rev'):
2539 2544 # User-specified revs might be unsorted, but don't sort before
2540 2545 # _makelogrevset because it might depend on the order of revs
2541 2546 if not (revs.isdescending() or revs.istopo()):
2542 2547 revs.sort(reverse=True)
2543 2548 if expr:
2544 2549 matcher = revset.match(repo.ui, expr)
2545 2550 revs = matcher(repo, revs)
2546 2551 if limit is not None:
2547 2552 limitedrevs = []
2548 2553 for idx, rev in enumerate(revs):
2549 2554 if idx >= limit:
2550 2555 break
2551 2556 limitedrevs.append(rev)
2552 2557 revs = smartset.baseset(limitedrevs)
2553 2558
2554 2559 return revs, expr, filematcher
2555 2560
2556 2561 def getlogrevs(repo, pats, opts):
2557 2562 """Return (revs, expr, filematcher) where revs is an iterable of
2558 2563 revision numbers, expr is a revset string built from log options
2559 2564 and file patterns or None, and used to filter 'revs'. If --stat or
2560 2565 --patch are not passed filematcher is None. Otherwise it is a
2561 2566 callable taking a revision number and returning a match objects
2562 2567 filtering the files to be detailed when displaying the revision.
2563 2568 """
2564 2569 limit = loglimit(opts)
2565 2570 revs = _logrevs(repo, opts)
2566 2571 if not revs:
2567 2572 return smartset.baseset([]), None, None
2568 2573 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2569 2574 if expr:
2570 2575 matcher = revset.match(repo.ui, expr)
2571 2576 revs = matcher(repo, revs)
2572 2577 if limit is not None:
2573 2578 limitedrevs = []
2574 2579 for idx, r in enumerate(revs):
2575 2580 if limit <= idx:
2576 2581 break
2577 2582 limitedrevs.append(r)
2578 2583 revs = smartset.baseset(limitedrevs)
2579 2584
2580 2585 return revs, expr, filematcher
2581 2586
2582 2587 def _graphnodeformatter(ui, displayer):
2583 2588 spec = ui.config('ui', 'graphnodetemplate')
2584 2589 if not spec:
2585 2590 return templatekw.showgraphnode # fast path for "{graphnode}"
2586 2591
2587 2592 spec = templater.unquotestring(spec)
2588 2593 templ = formatter.maketemplater(ui, spec)
2589 2594 cache = {}
2590 2595 if isinstance(displayer, changeset_templater):
2591 2596 cache = displayer.cache # reuse cache of slow templates
2592 2597 props = templatekw.keywords.copy()
2593 2598 props['templ'] = templ
2594 2599 props['cache'] = cache
2595 2600 def formatnode(repo, ctx):
2596 2601 props['ctx'] = ctx
2597 2602 props['repo'] = repo
2598 2603 props['ui'] = repo.ui
2599 2604 props['revcache'] = {}
2600 2605 return templ.render(props)
2601 2606 return formatnode
2602 2607
2603 2608 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2604 2609 filematcher=None, props=None):
2605 2610 props = props or {}
2606 2611 formatnode = _graphnodeformatter(ui, displayer)
2607 2612 state = graphmod.asciistate()
2608 2613 styles = state['styles']
2609 2614
2610 2615 # only set graph styling if HGPLAIN is not set.
2611 2616 if ui.plain('graph'):
2612 2617 # set all edge styles to |, the default pre-3.8 behaviour
2613 2618 styles.update(dict.fromkeys(styles, '|'))
2614 2619 else:
2615 2620 edgetypes = {
2616 2621 'parent': graphmod.PARENT,
2617 2622 'grandparent': graphmod.GRANDPARENT,
2618 2623 'missing': graphmod.MISSINGPARENT
2619 2624 }
2620 2625 for name, key in edgetypes.items():
2621 2626 # experimental config: experimental.graphstyle.*
2622 2627 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2623 2628 styles[key])
2624 2629 if not styles[key]:
2625 2630 styles[key] = None
2626 2631
2627 2632 # experimental config: experimental.graphshorten
2628 2633 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2629 2634
2630 2635 for rev, type, ctx, parents in dag:
2631 2636 char = formatnode(repo, ctx)
2632 2637 copies = None
2633 2638 if getrenamed and ctx.rev():
2634 2639 copies = []
2635 2640 for fn in ctx.files():
2636 2641 rename = getrenamed(fn, ctx.rev())
2637 2642 if rename:
2638 2643 copies.append((fn, rename[0]))
2639 2644 revmatchfn = None
2640 2645 if filematcher is not None:
2641 2646 revmatchfn = filematcher(ctx.rev())
2642 2647 edges = edgefn(type, char, state, rev, parents)
2643 2648 firstedge = next(edges)
2644 2649 width = firstedge[2]
2645 2650 displayer.show(ctx, copies=copies, matchfn=revmatchfn,
2646 2651 _graphwidth=width, **props)
2647 2652 lines = displayer.hunk.pop(rev).split('\n')
2648 2653 if not lines[-1]:
2649 2654 del lines[-1]
2650 2655 displayer.flush(ctx)
2651 2656 for type, char, width, coldata in itertools.chain([firstedge], edges):
2652 2657 graphmod.ascii(ui, state, type, char, lines, coldata)
2653 2658 lines = []
2654 2659 displayer.close()
2655 2660
2656 2661 def graphlog(ui, repo, pats, opts):
2657 2662 # Parameters are identical to log command ones
2658 2663 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2659 2664 revdag = graphmod.dagwalker(repo, revs)
2660 2665
2661 2666 getrenamed = None
2662 2667 if opts.get('copies'):
2663 2668 endrev = None
2664 2669 if opts.get('rev'):
2665 2670 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2666 2671 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2667 2672
2668 2673 ui.pager('log')
2669 2674 displayer = show_changeset(ui, repo, opts, buffered=True)
2670 2675 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2671 2676 filematcher)
2672 2677
2673 2678 def checkunsupportedgraphflags(pats, opts):
2674 2679 for op in ["newest_first"]:
2675 2680 if op in opts and opts[op]:
2676 2681 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2677 2682 % op.replace("_", "-"))
2678 2683
2679 2684 def graphrevs(repo, nodes, opts):
2680 2685 limit = loglimit(opts)
2681 2686 nodes.reverse()
2682 2687 if limit is not None:
2683 2688 nodes = nodes[:limit]
2684 2689 return graphmod.nodes(repo, nodes)
2685 2690
2686 2691 def add(ui, repo, match, prefix, explicitonly, **opts):
2687 2692 join = lambda f: os.path.join(prefix, f)
2688 2693 bad = []
2689 2694
2690 2695 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2691 2696 names = []
2692 2697 wctx = repo[None]
2693 2698 cca = None
2694 2699 abort, warn = scmutil.checkportabilityalert(ui)
2695 2700 if abort or warn:
2696 2701 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2697 2702
2698 2703 badmatch = matchmod.badmatch(match, badfn)
2699 2704 dirstate = repo.dirstate
2700 2705 # We don't want to just call wctx.walk here, since it would return a lot of
2701 2706 # clean files, which we aren't interested in and takes time.
2702 2707 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2703 2708 unknown=True, ignored=False, full=False)):
2704 2709 exact = match.exact(f)
2705 2710 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2706 2711 if cca:
2707 2712 cca(f)
2708 2713 names.append(f)
2709 2714 if ui.verbose or not exact:
2710 2715 ui.status(_('adding %s\n') % match.rel(f))
2711 2716
2712 2717 for subpath in sorted(wctx.substate):
2713 2718 sub = wctx.sub(subpath)
2714 2719 try:
2715 2720 submatch = matchmod.subdirmatcher(subpath, match)
2716 2721 if opts.get(r'subrepos'):
2717 2722 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2718 2723 else:
2719 2724 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2720 2725 except error.LookupError:
2721 2726 ui.status(_("skipping missing subrepository: %s\n")
2722 2727 % join(subpath))
2723 2728
2724 2729 if not opts.get(r'dry_run'):
2725 2730 rejected = wctx.add(names, prefix)
2726 2731 bad.extend(f for f in rejected if f in match.files())
2727 2732 return bad
2728 2733
2729 2734 def addwebdirpath(repo, serverpath, webconf):
2730 2735 webconf[serverpath] = repo.root
2731 2736 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2732 2737
2733 2738 for r in repo.revs('filelog("path:.hgsub")'):
2734 2739 ctx = repo[r]
2735 2740 for subpath in ctx.substate:
2736 2741 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2737 2742
2738 2743 def forget(ui, repo, match, prefix, explicitonly):
2739 2744 join = lambda f: os.path.join(prefix, f)
2740 2745 bad = []
2741 2746 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2742 2747 wctx = repo[None]
2743 2748 forgot = []
2744 2749
2745 2750 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2746 2751 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2747 2752 if explicitonly:
2748 2753 forget = [f for f in forget if match.exact(f)]
2749 2754
2750 2755 for subpath in sorted(wctx.substate):
2751 2756 sub = wctx.sub(subpath)
2752 2757 try:
2753 2758 submatch = matchmod.subdirmatcher(subpath, match)
2754 2759 subbad, subforgot = sub.forget(submatch, prefix)
2755 2760 bad.extend([subpath + '/' + f for f in subbad])
2756 2761 forgot.extend([subpath + '/' + f for f in subforgot])
2757 2762 except error.LookupError:
2758 2763 ui.status(_("skipping missing subrepository: %s\n")
2759 2764 % join(subpath))
2760 2765
2761 2766 if not explicitonly:
2762 2767 for f in match.files():
2763 2768 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2764 2769 if f not in forgot:
2765 2770 if repo.wvfs.exists(f):
2766 2771 # Don't complain if the exact case match wasn't given.
2767 2772 # But don't do this until after checking 'forgot', so
2768 2773 # that subrepo files aren't normalized, and this op is
2769 2774 # purely from data cached by the status walk above.
2770 2775 if repo.dirstate.normalize(f) in repo.dirstate:
2771 2776 continue
2772 2777 ui.warn(_('not removing %s: '
2773 2778 'file is already untracked\n')
2774 2779 % match.rel(f))
2775 2780 bad.append(f)
2776 2781
2777 2782 for f in forget:
2778 2783 if ui.verbose or not match.exact(f):
2779 2784 ui.status(_('removing %s\n') % match.rel(f))
2780 2785
2781 2786 rejected = wctx.forget(forget, prefix)
2782 2787 bad.extend(f for f in rejected if f in match.files())
2783 2788 forgot.extend(f for f in forget if f not in rejected)
2784 2789 return bad, forgot
2785 2790
2786 2791 def files(ui, ctx, m, fm, fmt, subrepos):
2787 2792 rev = ctx.rev()
2788 2793 ret = 1
2789 2794 ds = ctx.repo().dirstate
2790 2795
2791 2796 for f in ctx.matches(m):
2792 2797 if rev is None and ds[f] == 'r':
2793 2798 continue
2794 2799 fm.startitem()
2795 2800 if ui.verbose:
2796 2801 fc = ctx[f]
2797 2802 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2798 2803 fm.data(abspath=f)
2799 2804 fm.write('path', fmt, m.rel(f))
2800 2805 ret = 0
2801 2806
2802 2807 for subpath in sorted(ctx.substate):
2803 2808 submatch = matchmod.subdirmatcher(subpath, m)
2804 2809 if (subrepos or m.exact(subpath) or any(submatch.files())):
2805 2810 sub = ctx.sub(subpath)
2806 2811 try:
2807 2812 recurse = m.exact(subpath) or subrepos
2808 2813 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2809 2814 ret = 0
2810 2815 except error.LookupError:
2811 2816 ui.status(_("skipping missing subrepository: %s\n")
2812 2817 % m.abs(subpath))
2813 2818
2814 2819 return ret
2815 2820
2816 2821 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2817 2822 join = lambda f: os.path.join(prefix, f)
2818 2823 ret = 0
2819 2824 s = repo.status(match=m, clean=True)
2820 2825 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2821 2826
2822 2827 wctx = repo[None]
2823 2828
2824 2829 if warnings is None:
2825 2830 warnings = []
2826 2831 warn = True
2827 2832 else:
2828 2833 warn = False
2829 2834
2830 2835 subs = sorted(wctx.substate)
2831 2836 total = len(subs)
2832 2837 count = 0
2833 2838 for subpath in subs:
2834 2839 count += 1
2835 2840 submatch = matchmod.subdirmatcher(subpath, m)
2836 2841 if subrepos or m.exact(subpath) or any(submatch.files()):
2837 2842 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2838 2843 sub = wctx.sub(subpath)
2839 2844 try:
2840 2845 if sub.removefiles(submatch, prefix, after, force, subrepos,
2841 2846 warnings):
2842 2847 ret = 1
2843 2848 except error.LookupError:
2844 2849 warnings.append(_("skipping missing subrepository: %s\n")
2845 2850 % join(subpath))
2846 2851 ui.progress(_('searching'), None)
2847 2852
2848 2853 # warn about failure to delete explicit files/dirs
2849 2854 deleteddirs = util.dirs(deleted)
2850 2855 files = m.files()
2851 2856 total = len(files)
2852 2857 count = 0
2853 2858 for f in files:
2854 2859 def insubrepo():
2855 2860 for subpath in wctx.substate:
2856 2861 if f.startswith(subpath + '/'):
2857 2862 return True
2858 2863 return False
2859 2864
2860 2865 count += 1
2861 2866 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2862 2867 isdir = f in deleteddirs or wctx.hasdir(f)
2863 2868 if (f in repo.dirstate or isdir or f == '.'
2864 2869 or insubrepo() or f in subs):
2865 2870 continue
2866 2871
2867 2872 if repo.wvfs.exists(f):
2868 2873 if repo.wvfs.isdir(f):
2869 2874 warnings.append(_('not removing %s: no tracked files\n')
2870 2875 % m.rel(f))
2871 2876 else:
2872 2877 warnings.append(_('not removing %s: file is untracked\n')
2873 2878 % m.rel(f))
2874 2879 # missing files will generate a warning elsewhere
2875 2880 ret = 1
2876 2881 ui.progress(_('deleting'), None)
2877 2882
2878 2883 if force:
2879 2884 list = modified + deleted + clean + added
2880 2885 elif after:
2881 2886 list = deleted
2882 2887 remaining = modified + added + clean
2883 2888 total = len(remaining)
2884 2889 count = 0
2885 2890 for f in remaining:
2886 2891 count += 1
2887 2892 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2888 2893 warnings.append(_('not removing %s: file still exists\n')
2889 2894 % m.rel(f))
2890 2895 ret = 1
2891 2896 ui.progress(_('skipping'), None)
2892 2897 else:
2893 2898 list = deleted + clean
2894 2899 total = len(modified) + len(added)
2895 2900 count = 0
2896 2901 for f in modified:
2897 2902 count += 1
2898 2903 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2899 2904 warnings.append(_('not removing %s: file is modified (use -f'
2900 2905 ' to force removal)\n') % m.rel(f))
2901 2906 ret = 1
2902 2907 for f in added:
2903 2908 count += 1
2904 2909 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2905 2910 warnings.append(_("not removing %s: file has been marked for add"
2906 2911 " (use 'hg forget' to undo add)\n") % m.rel(f))
2907 2912 ret = 1
2908 2913 ui.progress(_('skipping'), None)
2909 2914
2910 2915 list = sorted(list)
2911 2916 total = len(list)
2912 2917 count = 0
2913 2918 for f in list:
2914 2919 count += 1
2915 2920 if ui.verbose or not m.exact(f):
2916 2921 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2917 2922 ui.status(_('removing %s\n') % m.rel(f))
2918 2923 ui.progress(_('deleting'), None)
2919 2924
2920 2925 with repo.wlock():
2921 2926 if not after:
2922 2927 for f in list:
2923 2928 if f in added:
2924 2929 continue # we never unlink added files on remove
2925 2930 repo.wvfs.unlinkpath(f, ignoremissing=True)
2926 2931 repo[None].forget(list)
2927 2932
2928 2933 if warn:
2929 2934 for warning in warnings:
2930 2935 ui.warn(warning)
2931 2936
2932 2937 return ret
2933 2938
2934 2939 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2935 2940 err = 1
2936 2941
2937 2942 def write(path):
2938 2943 filename = None
2939 2944 if fntemplate:
2940 2945 filename = makefilename(repo, fntemplate, ctx.node(),
2941 2946 pathname=os.path.join(prefix, path))
2942 2947 with formatter.maybereopen(basefm, filename, opts) as fm:
2943 2948 data = ctx[path].data()
2944 2949 if opts.get('decode'):
2945 2950 data = repo.wwritedata(path, data)
2946 2951 fm.startitem()
2947 2952 fm.write('data', '%s', data)
2948 2953 fm.data(abspath=path, path=matcher.rel(path))
2949 2954
2950 2955 # Automation often uses hg cat on single files, so special case it
2951 2956 # for performance to avoid the cost of parsing the manifest.
2952 2957 if len(matcher.files()) == 1 and not matcher.anypats():
2953 2958 file = matcher.files()[0]
2954 2959 mfl = repo.manifestlog
2955 2960 mfnode = ctx.manifestnode()
2956 2961 try:
2957 2962 if mfnode and mfl[mfnode].find(file)[0]:
2958 2963 write(file)
2959 2964 return 0
2960 2965 except KeyError:
2961 2966 pass
2962 2967
2963 2968 for abs in ctx.walk(matcher):
2964 2969 write(abs)
2965 2970 err = 0
2966 2971
2967 2972 for subpath in sorted(ctx.substate):
2968 2973 sub = ctx.sub(subpath)
2969 2974 try:
2970 2975 submatch = matchmod.subdirmatcher(subpath, matcher)
2971 2976
2972 2977 if not sub.cat(submatch, basefm, fntemplate,
2973 2978 os.path.join(prefix, sub._path), **opts):
2974 2979 err = 0
2975 2980 except error.RepoLookupError:
2976 2981 ui.status(_("skipping missing subrepository: %s\n")
2977 2982 % os.path.join(prefix, subpath))
2978 2983
2979 2984 return err
2980 2985
2981 2986 def commit(ui, repo, commitfunc, pats, opts):
2982 2987 '''commit the specified files or all outstanding changes'''
2983 2988 date = opts.get('date')
2984 2989 if date:
2985 2990 opts['date'] = util.parsedate(date)
2986 2991 message = logmessage(ui, opts)
2987 2992 matcher = scmutil.match(repo[None], pats, opts)
2988 2993
2989 2994 dsguard = None
2990 2995 # extract addremove carefully -- this function can be called from a command
2991 2996 # that doesn't support addremove
2992 2997 if opts.get('addremove'):
2993 2998 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2994 2999 with dsguard or util.nullcontextmanager():
2995 3000 if dsguard:
2996 3001 if scmutil.addremove(repo, matcher, "", opts) != 0:
2997 3002 raise error.Abort(
2998 3003 _("failed to mark all new/missing files as added/removed"))
2999 3004
3000 3005 return commitfunc(ui, repo, message, matcher, opts)
3001 3006
3002 3007 def samefile(f, ctx1, ctx2):
3003 3008 if f in ctx1.manifest():
3004 3009 a = ctx1.filectx(f)
3005 3010 if f in ctx2.manifest():
3006 3011 b = ctx2.filectx(f)
3007 3012 return (not a.cmp(b)
3008 3013 and a.flags() == b.flags())
3009 3014 else:
3010 3015 return False
3011 3016 else:
3012 3017 return f not in ctx2.manifest()
3013 3018
3014 3019 def amend(ui, repo, old, extra, pats, opts):
3015 3020 # avoid cycle context -> subrepo -> cmdutil
3016 3021 from . import context
3017 3022
3018 3023 # amend will reuse the existing user if not specified, but the obsolete
3019 3024 # marker creation requires that the current user's name is specified.
3020 3025 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3021 3026 ui.username() # raise exception if username not set
3022 3027
3023 3028 ui.note(_('amending changeset %s\n') % old)
3024 3029 base = old.p1()
3025 3030
3026 3031 with repo.wlock(), repo.lock(), repo.transaction('amend'):
3027 3032 # Participating changesets:
3028 3033 #
3029 3034 # wctx o - workingctx that contains changes from working copy
3030 3035 # | to go into amending commit
3031 3036 # |
3032 3037 # old o - changeset to amend
3033 3038 # |
3034 3039 # base o - first parent of the changeset to amend
3035 3040 wctx = repo[None]
3036 3041
3037 3042 # Update extra dict from amended commit (e.g. to preserve graft
3038 3043 # source)
3039 3044 extra.update(old.extra())
3040 3045
3041 3046 # Also update it from the from the wctx
3042 3047 extra.update(wctx.extra())
3043 3048
3044 3049 user = opts.get('user') or old.user()
3045 3050 date = opts.get('date') or old.date()
3046 3051
3047 3052 # Parse the date to allow comparison between date and old.date()
3048 3053 date = util.parsedate(date)
3049 3054
3050 3055 if len(old.parents()) > 1:
3051 3056 # ctx.files() isn't reliable for merges, so fall back to the
3052 3057 # slower repo.status() method
3053 3058 files = set([fn for st in repo.status(base, old)[:3]
3054 3059 for fn in st])
3055 3060 else:
3056 3061 files = set(old.files())
3057 3062
3058 3063 # add/remove the files to the working copy if the "addremove" option
3059 3064 # was specified.
3060 3065 matcher = scmutil.match(wctx, pats, opts)
3061 3066 if (opts.get('addremove')
3062 3067 and scmutil.addremove(repo, matcher, "", opts)):
3063 3068 raise error.Abort(
3064 3069 _("failed to mark all new/missing files as added/removed"))
3065 3070
3066 3071 filestoamend = set(f for f in wctx.files() if matcher(f))
3067 3072
3068 3073 changes = (len(filestoamend) > 0)
3069 3074 if changes:
3070 3075 # Recompute copies (avoid recording a -> b -> a)
3071 3076 copied = copies.pathcopies(base, wctx, matcher)
3072 3077 if old.p2:
3073 3078 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3074 3079
3075 3080 # Prune files which were reverted by the updates: if old
3076 3081 # introduced file X and the file was renamed in the working
3077 3082 # copy, then those two files are the same and
3078 3083 # we can discard X from our list of files. Likewise if X
3079 3084 # was deleted, it's no longer relevant
3080 3085 files.update(filestoamend)
3081 3086 files = [f for f in files if not samefile(f, wctx, base)]
3082 3087
3083 3088 def filectxfn(repo, ctx_, path):
3084 3089 try:
3085 3090 # If the file being considered is not amongst the files
3086 3091 # to be amended, we should return the file context from the
3087 3092 # old changeset. This avoids issues when only some files in
3088 3093 # the working copy are being amended but there are also
3089 3094 # changes to other files from the old changeset.
3090 3095 if path not in filestoamend:
3091 3096 return old.filectx(path)
3092 3097
3093 3098 fctx = wctx[path]
3094 3099
3095 3100 # Return None for removed files.
3096 3101 if not fctx.exists():
3097 3102 return None
3098 3103
3099 3104 flags = fctx.flags()
3100 3105 mctx = context.memfilectx(repo,
3101 3106 fctx.path(), fctx.data(),
3102 3107 islink='l' in flags,
3103 3108 isexec='x' in flags,
3104 3109 copied=copied.get(path))
3105 3110 return mctx
3106 3111 except KeyError:
3107 3112 return None
3108 3113 else:
3109 3114 ui.note(_('copying changeset %s to %s\n') % (old, base))
3110 3115
3111 3116 # Use version of files as in the old cset
3112 3117 def filectxfn(repo, ctx_, path):
3113 3118 try:
3114 3119 return old.filectx(path)
3115 3120 except KeyError:
3116 3121 return None
3117 3122
3118 3123 # See if we got a message from -m or -l, if not, open the editor with
3119 3124 # the message of the changeset to amend.
3120 3125 message = logmessage(ui, opts)
3121 3126
3122 3127 editform = mergeeditform(old, 'commit.amend')
3123 3128 editor = getcommiteditor(editform=editform,
3124 3129 **pycompat.strkwargs(opts))
3125 3130
3126 3131 if not message:
3127 3132 editor = getcommiteditor(edit=True, editform=editform)
3128 3133 message = old.description()
3129 3134
3130 3135 pureextra = extra.copy()
3131 3136 extra['amend_source'] = old.hex()
3132 3137
3133 3138 new = context.memctx(repo,
3134 3139 parents=[base.node(), old.p2().node()],
3135 3140 text=message,
3136 3141 files=files,
3137 3142 filectxfn=filectxfn,
3138 3143 user=user,
3139 3144 date=date,
3140 3145 extra=extra,
3141 3146 editor=editor)
3142 3147
3143 3148 newdesc = changelog.stripdesc(new.description())
3144 3149 if ((not changes)
3145 3150 and newdesc == old.description()
3146 3151 and user == old.user()
3147 3152 and date == old.date()
3148 3153 and pureextra == old.extra()):
3149 3154 # nothing changed. continuing here would create a new node
3150 3155 # anyway because of the amend_source noise.
3151 3156 #
3152 3157 # This not what we expect from amend.
3153 3158 return old.node()
3154 3159
3155 3160 if opts.get('secret'):
3156 3161 commitphase = 'secret'
3157 3162 else:
3158 3163 commitphase = old.phase()
3159 3164 overrides = {('phases', 'new-commit'): commitphase}
3160 3165 with ui.configoverride(overrides, 'amend'):
3161 3166 newid = repo.commitctx(new)
3162 3167
3163 3168 # Reroute the working copy parent to the new changeset
3164 3169 repo.setparents(newid, nullid)
3165 3170 mapping = {old.node(): (newid,)}
3166 3171 scmutil.cleanupnodes(repo, mapping, 'amend')
3167 3172
3168 3173 # Fixing the dirstate because localrepo.commitctx does not update
3169 3174 # it. This is rather convenient because we did not need to update
3170 3175 # the dirstate for all the files in the new commit which commitctx
3171 3176 # could have done if it updated the dirstate. Now, we can
3172 3177 # selectively update the dirstate only for the amended files.
3173 3178 dirstate = repo.dirstate
3174 3179
3175 3180 # Update the state of the files which were added and
3176 3181 # and modified in the amend to "normal" in the dirstate.
3177 3182 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3178 3183 for f in normalfiles:
3179 3184 dirstate.normal(f)
3180 3185
3181 3186 # Update the state of files which were removed in the amend
3182 3187 # to "removed" in the dirstate.
3183 3188 removedfiles = set(wctx.removed()) & filestoamend
3184 3189 for f in removedfiles:
3185 3190 dirstate.drop(f)
3186 3191
3187 3192 return newid
3188 3193
3189 3194 def commiteditor(repo, ctx, subs, editform=''):
3190 3195 if ctx.description():
3191 3196 return ctx.description()
3192 3197 return commitforceeditor(repo, ctx, subs, editform=editform,
3193 3198 unchangedmessagedetection=True)
3194 3199
3195 3200 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3196 3201 editform='', unchangedmessagedetection=False):
3197 3202 if not extramsg:
3198 3203 extramsg = _("Leave message empty to abort commit.")
3199 3204
3200 3205 forms = [e for e in editform.split('.') if e]
3201 3206 forms.insert(0, 'changeset')
3202 3207 templatetext = None
3203 3208 while forms:
3204 3209 ref = '.'.join(forms)
3205 3210 if repo.ui.config('committemplate', ref):
3206 3211 templatetext = committext = buildcommittemplate(
3207 3212 repo, ctx, subs, extramsg, ref)
3208 3213 break
3209 3214 forms.pop()
3210 3215 else:
3211 3216 committext = buildcommittext(repo, ctx, subs, extramsg)
3212 3217
3213 3218 # run editor in the repository root
3214 3219 olddir = pycompat.getcwd()
3215 3220 os.chdir(repo.root)
3216 3221
3217 3222 # make in-memory changes visible to external process
3218 3223 tr = repo.currenttransaction()
3219 3224 repo.dirstate.write(tr)
3220 3225 pending = tr and tr.writepending() and repo.root
3221 3226
3222 3227 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3223 3228 editform=editform, pending=pending,
3224 3229 repopath=repo.path, action='commit')
3225 3230 text = editortext
3226 3231
3227 3232 # strip away anything below this special string (used for editors that want
3228 3233 # to display the diff)
3229 3234 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3230 3235 if stripbelow:
3231 3236 text = text[:stripbelow.start()]
3232 3237
3233 3238 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3234 3239 os.chdir(olddir)
3235 3240
3236 3241 if finishdesc:
3237 3242 text = finishdesc(text)
3238 3243 if not text.strip():
3239 3244 raise error.Abort(_("empty commit message"))
3240 3245 if unchangedmessagedetection and editortext == templatetext:
3241 3246 raise error.Abort(_("commit message unchanged"))
3242 3247
3243 3248 return text
3244 3249
3245 3250 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3246 3251 ui = repo.ui
3247 3252 spec = formatter.templatespec(ref, None, None)
3248 3253 t = changeset_templater(ui, repo, spec, None, {}, False)
3249 3254 t.t.cache.update((k, templater.unquotestring(v))
3250 3255 for k, v in repo.ui.configitems('committemplate'))
3251 3256
3252 3257 if not extramsg:
3253 3258 extramsg = '' # ensure that extramsg is string
3254 3259
3255 3260 ui.pushbuffer()
3256 3261 t.show(ctx, extramsg=extramsg)
3257 3262 return ui.popbuffer()
3258 3263
3259 3264 def hgprefix(msg):
3260 3265 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3261 3266
3262 3267 def buildcommittext(repo, ctx, subs, extramsg):
3263 3268 edittext = []
3264 3269 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3265 3270 if ctx.description():
3266 3271 edittext.append(ctx.description())
3267 3272 edittext.append("")
3268 3273 edittext.append("") # Empty line between message and comments.
3269 3274 edittext.append(hgprefix(_("Enter commit message."
3270 3275 " Lines beginning with 'HG:' are removed.")))
3271 3276 edittext.append(hgprefix(extramsg))
3272 3277 edittext.append("HG: --")
3273 3278 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3274 3279 if ctx.p2():
3275 3280 edittext.append(hgprefix(_("branch merge")))
3276 3281 if ctx.branch():
3277 3282 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3278 3283 if bookmarks.isactivewdirparent(repo):
3279 3284 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3280 3285 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3281 3286 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3282 3287 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3283 3288 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3284 3289 if not added and not modified and not removed:
3285 3290 edittext.append(hgprefix(_("no files changed")))
3286 3291 edittext.append("")
3287 3292
3288 3293 return "\n".join(edittext)
3289 3294
3290 3295 def commitstatus(repo, node, branch, bheads=None, opts=None):
3291 3296 if opts is None:
3292 3297 opts = {}
3293 3298 ctx = repo[node]
3294 3299 parents = ctx.parents()
3295 3300
3296 3301 if (not opts.get('amend') and bheads and node not in bheads and not
3297 3302 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3298 3303 repo.ui.status(_('created new head\n'))
3299 3304 # The message is not printed for initial roots. For the other
3300 3305 # changesets, it is printed in the following situations:
3301 3306 #
3302 3307 # Par column: for the 2 parents with ...
3303 3308 # N: null or no parent
3304 3309 # B: parent is on another named branch
3305 3310 # C: parent is a regular non head changeset
3306 3311 # H: parent was a branch head of the current branch
3307 3312 # Msg column: whether we print "created new head" message
3308 3313 # In the following, it is assumed that there already exists some
3309 3314 # initial branch heads of the current branch, otherwise nothing is
3310 3315 # printed anyway.
3311 3316 #
3312 3317 # Par Msg Comment
3313 3318 # N N y additional topo root
3314 3319 #
3315 3320 # B N y additional branch root
3316 3321 # C N y additional topo head
3317 3322 # H N n usual case
3318 3323 #
3319 3324 # B B y weird additional branch root
3320 3325 # C B y branch merge
3321 3326 # H B n merge with named branch
3322 3327 #
3323 3328 # C C y additional head from merge
3324 3329 # C H n merge with a head
3325 3330 #
3326 3331 # H H n head merge: head count decreases
3327 3332
3328 3333 if not opts.get('close_branch'):
3329 3334 for r in parents:
3330 3335 if r.closesbranch() and r.branch() == branch:
3331 3336 repo.ui.status(_('reopening closed branch head %d\n') % r)
3332 3337
3333 3338 if repo.ui.debugflag:
3334 3339 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3335 3340 elif repo.ui.verbose:
3336 3341 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3337 3342
3338 3343 def postcommitstatus(repo, pats, opts):
3339 3344 return repo.status(match=scmutil.match(repo[None], pats, opts))
3340 3345
3341 3346 def revert(ui, repo, ctx, parents, *pats, **opts):
3342 3347 parent, p2 = parents
3343 3348 node = ctx.node()
3344 3349
3345 3350 mf = ctx.manifest()
3346 3351 if node == p2:
3347 3352 parent = p2
3348 3353
3349 3354 # need all matching names in dirstate and manifest of target rev,
3350 3355 # so have to walk both. do not print errors if files exist in one
3351 3356 # but not other. in both cases, filesets should be evaluated against
3352 3357 # workingctx to get consistent result (issue4497). this means 'set:**'
3353 3358 # cannot be used to select missing files from target rev.
3354 3359
3355 3360 # `names` is a mapping for all elements in working copy and target revision
3356 3361 # The mapping is in the form:
3357 3362 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3358 3363 names = {}
3359 3364
3360 3365 with repo.wlock():
3361 3366 ## filling of the `names` mapping
3362 3367 # walk dirstate to fill `names`
3363 3368
3364 3369 interactive = opts.get('interactive', False)
3365 3370 wctx = repo[None]
3366 3371 m = scmutil.match(wctx, pats, opts)
3367 3372
3368 3373 # we'll need this later
3369 3374 targetsubs = sorted(s for s in wctx.substate if m(s))
3370 3375
3371 3376 if not m.always():
3372 3377 matcher = matchmod.badmatch(m, lambda x, y: False)
3373 3378 for abs in wctx.walk(matcher):
3374 3379 names[abs] = m.rel(abs), m.exact(abs)
3375 3380
3376 3381 # walk target manifest to fill `names`
3377 3382
3378 3383 def badfn(path, msg):
3379 3384 if path in names:
3380 3385 return
3381 3386 if path in ctx.substate:
3382 3387 return
3383 3388 path_ = path + '/'
3384 3389 for f in names:
3385 3390 if f.startswith(path_):
3386 3391 return
3387 3392 ui.warn("%s: %s\n" % (m.rel(path), msg))
3388 3393
3389 3394 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3390 3395 if abs not in names:
3391 3396 names[abs] = m.rel(abs), m.exact(abs)
3392 3397
3393 3398 # Find status of all file in `names`.
3394 3399 m = scmutil.matchfiles(repo, names)
3395 3400
3396 3401 changes = repo.status(node1=node, match=m,
3397 3402 unknown=True, ignored=True, clean=True)
3398 3403 else:
3399 3404 changes = repo.status(node1=node, match=m)
3400 3405 for kind in changes:
3401 3406 for abs in kind:
3402 3407 names[abs] = m.rel(abs), m.exact(abs)
3403 3408
3404 3409 m = scmutil.matchfiles(repo, names)
3405 3410
3406 3411 modified = set(changes.modified)
3407 3412 added = set(changes.added)
3408 3413 removed = set(changes.removed)
3409 3414 _deleted = set(changes.deleted)
3410 3415 unknown = set(changes.unknown)
3411 3416 unknown.update(changes.ignored)
3412 3417 clean = set(changes.clean)
3413 3418 modadded = set()
3414 3419
3415 3420 # We need to account for the state of the file in the dirstate,
3416 3421 # even when we revert against something else than parent. This will
3417 3422 # slightly alter the behavior of revert (doing back up or not, delete
3418 3423 # or just forget etc).
3419 3424 if parent == node:
3420 3425 dsmodified = modified
3421 3426 dsadded = added
3422 3427 dsremoved = removed
3423 3428 # store all local modifications, useful later for rename detection
3424 3429 localchanges = dsmodified | dsadded
3425 3430 modified, added, removed = set(), set(), set()
3426 3431 else:
3427 3432 changes = repo.status(node1=parent, match=m)
3428 3433 dsmodified = set(changes.modified)
3429 3434 dsadded = set(changes.added)
3430 3435 dsremoved = set(changes.removed)
3431 3436 # store all local modifications, useful later for rename detection
3432 3437 localchanges = dsmodified | dsadded
3433 3438
3434 3439 # only take into account for removes between wc and target
3435 3440 clean |= dsremoved - removed
3436 3441 dsremoved &= removed
3437 3442 # distinct between dirstate remove and other
3438 3443 removed -= dsremoved
3439 3444
3440 3445 modadded = added & dsmodified
3441 3446 added -= modadded
3442 3447
3443 3448 # tell newly modified apart.
3444 3449 dsmodified &= modified
3445 3450 dsmodified |= modified & dsadded # dirstate added may need backup
3446 3451 modified -= dsmodified
3447 3452
3448 3453 # We need to wait for some post-processing to update this set
3449 3454 # before making the distinction. The dirstate will be used for
3450 3455 # that purpose.
3451 3456 dsadded = added
3452 3457
3453 3458 # in case of merge, files that are actually added can be reported as
3454 3459 # modified, we need to post process the result
3455 3460 if p2 != nullid:
3456 3461 mergeadd = set(dsmodified)
3457 3462 for path in dsmodified:
3458 3463 if path in mf:
3459 3464 mergeadd.remove(path)
3460 3465 dsadded |= mergeadd
3461 3466 dsmodified -= mergeadd
3462 3467
3463 3468 # if f is a rename, update `names` to also revert the source
3464 3469 cwd = repo.getcwd()
3465 3470 for f in localchanges:
3466 3471 src = repo.dirstate.copied(f)
3467 3472 # XXX should we check for rename down to target node?
3468 3473 if src and src not in names and repo.dirstate[src] == 'r':
3469 3474 dsremoved.add(src)
3470 3475 names[src] = (repo.pathto(src, cwd), True)
3471 3476
3472 3477 # determine the exact nature of the deleted changesets
3473 3478 deladded = set(_deleted)
3474 3479 for path in _deleted:
3475 3480 if path in mf:
3476 3481 deladded.remove(path)
3477 3482 deleted = _deleted - deladded
3478 3483
3479 3484 # distinguish between file to forget and the other
3480 3485 added = set()
3481 3486 for abs in dsadded:
3482 3487 if repo.dirstate[abs] != 'a':
3483 3488 added.add(abs)
3484 3489 dsadded -= added
3485 3490
3486 3491 for abs in deladded:
3487 3492 if repo.dirstate[abs] == 'a':
3488 3493 dsadded.add(abs)
3489 3494 deladded -= dsadded
3490 3495
3491 3496 # For files marked as removed, we check if an unknown file is present at
3492 3497 # the same path. If a such file exists it may need to be backed up.
3493 3498 # Making the distinction at this stage helps have simpler backup
3494 3499 # logic.
3495 3500 removunk = set()
3496 3501 for abs in removed:
3497 3502 target = repo.wjoin(abs)
3498 3503 if os.path.lexists(target):
3499 3504 removunk.add(abs)
3500 3505 removed -= removunk
3501 3506
3502 3507 dsremovunk = set()
3503 3508 for abs in dsremoved:
3504 3509 target = repo.wjoin(abs)
3505 3510 if os.path.lexists(target):
3506 3511 dsremovunk.add(abs)
3507 3512 dsremoved -= dsremovunk
3508 3513
3509 3514 # action to be actually performed by revert
3510 3515 # (<list of file>, message>) tuple
3511 3516 actions = {'revert': ([], _('reverting %s\n')),
3512 3517 'add': ([], _('adding %s\n')),
3513 3518 'remove': ([], _('removing %s\n')),
3514 3519 'drop': ([], _('removing %s\n')),
3515 3520 'forget': ([], _('forgetting %s\n')),
3516 3521 'undelete': ([], _('undeleting %s\n')),
3517 3522 'noop': (None, _('no changes needed to %s\n')),
3518 3523 'unknown': (None, _('file not managed: %s\n')),
3519 3524 }
3520 3525
3521 3526 # "constant" that convey the backup strategy.
3522 3527 # All set to `discard` if `no-backup` is set do avoid checking
3523 3528 # no_backup lower in the code.
3524 3529 # These values are ordered for comparison purposes
3525 3530 backupinteractive = 3 # do backup if interactively modified
3526 3531 backup = 2 # unconditionally do backup
3527 3532 check = 1 # check if the existing file differs from target
3528 3533 discard = 0 # never do backup
3529 3534 if opts.get('no_backup'):
3530 3535 backupinteractive = backup = check = discard
3531 3536 if interactive:
3532 3537 dsmodifiedbackup = backupinteractive
3533 3538 else:
3534 3539 dsmodifiedbackup = backup
3535 3540 tobackup = set()
3536 3541
3537 3542 backupanddel = actions['remove']
3538 3543 if not opts.get('no_backup'):
3539 3544 backupanddel = actions['drop']
3540 3545
3541 3546 disptable = (
3542 3547 # dispatch table:
3543 3548 # file state
3544 3549 # action
3545 3550 # make backup
3546 3551
3547 3552 ## Sets that results that will change file on disk
3548 3553 # Modified compared to target, no local change
3549 3554 (modified, actions['revert'], discard),
3550 3555 # Modified compared to target, but local file is deleted
3551 3556 (deleted, actions['revert'], discard),
3552 3557 # Modified compared to target, local change
3553 3558 (dsmodified, actions['revert'], dsmodifiedbackup),
3554 3559 # Added since target
3555 3560 (added, actions['remove'], discard),
3556 3561 # Added in working directory
3557 3562 (dsadded, actions['forget'], discard),
3558 3563 # Added since target, have local modification
3559 3564 (modadded, backupanddel, backup),
3560 3565 # Added since target but file is missing in working directory
3561 3566 (deladded, actions['drop'], discard),
3562 3567 # Removed since target, before working copy parent
3563 3568 (removed, actions['add'], discard),
3564 3569 # Same as `removed` but an unknown file exists at the same path
3565 3570 (removunk, actions['add'], check),
3566 3571 # Removed since targe, marked as such in working copy parent
3567 3572 (dsremoved, actions['undelete'], discard),
3568 3573 # Same as `dsremoved` but an unknown file exists at the same path
3569 3574 (dsremovunk, actions['undelete'], check),
3570 3575 ## the following sets does not result in any file changes
3571 3576 # File with no modification
3572 3577 (clean, actions['noop'], discard),
3573 3578 # Existing file, not tracked anywhere
3574 3579 (unknown, actions['unknown'], discard),
3575 3580 )
3576 3581
3577 3582 for abs, (rel, exact) in sorted(names.items()):
3578 3583 # target file to be touch on disk (relative to cwd)
3579 3584 target = repo.wjoin(abs)
3580 3585 # search the entry in the dispatch table.
3581 3586 # if the file is in any of these sets, it was touched in the working
3582 3587 # directory parent and we are sure it needs to be reverted.
3583 3588 for table, (xlist, msg), dobackup in disptable:
3584 3589 if abs not in table:
3585 3590 continue
3586 3591 if xlist is not None:
3587 3592 xlist.append(abs)
3588 3593 if dobackup:
3589 3594 # If in interactive mode, don't automatically create
3590 3595 # .orig files (issue4793)
3591 3596 if dobackup == backupinteractive:
3592 3597 tobackup.add(abs)
3593 3598 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3594 3599 bakname = scmutil.origpath(ui, repo, rel)
3595 3600 ui.note(_('saving current version of %s as %s\n') %
3596 3601 (rel, bakname))
3597 3602 if not opts.get('dry_run'):
3598 3603 if interactive:
3599 3604 util.copyfile(target, bakname)
3600 3605 else:
3601 3606 util.rename(target, bakname)
3602 3607 if ui.verbose or not exact:
3603 3608 if not isinstance(msg, basestring):
3604 3609 msg = msg(abs)
3605 3610 ui.status(msg % rel)
3606 3611 elif exact:
3607 3612 ui.warn(msg % rel)
3608 3613 break
3609 3614
3610 3615 if not opts.get('dry_run'):
3611 3616 needdata = ('revert', 'add', 'undelete')
3612 3617 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3613 3618 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3614 3619
3615 3620 if targetsubs:
3616 3621 # Revert the subrepos on the revert list
3617 3622 for sub in targetsubs:
3618 3623 try:
3619 3624 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3620 3625 except KeyError:
3621 3626 raise error.Abort("subrepository '%s' does not exist in %s!"
3622 3627 % (sub, short(ctx.node())))
3623 3628
3624 3629 def _revertprefetch(repo, ctx, *files):
3625 3630 """Let extension changing the storage layer prefetch content"""
3626 3631
3627 3632 def _performrevert(repo, parents, ctx, actions, interactive=False,
3628 3633 tobackup=None):
3629 3634 """function that actually perform all the actions computed for revert
3630 3635
3631 3636 This is an independent function to let extension to plug in and react to
3632 3637 the imminent revert.
3633 3638
3634 3639 Make sure you have the working directory locked when calling this function.
3635 3640 """
3636 3641 parent, p2 = parents
3637 3642 node = ctx.node()
3638 3643 excluded_files = []
3639 3644 matcher_opts = {"exclude": excluded_files}
3640 3645
3641 3646 def checkout(f):
3642 3647 fc = ctx[f]
3643 3648 repo.wwrite(f, fc.data(), fc.flags())
3644 3649
3645 3650 def doremove(f):
3646 3651 try:
3647 3652 repo.wvfs.unlinkpath(f)
3648 3653 except OSError:
3649 3654 pass
3650 3655 repo.dirstate.remove(f)
3651 3656
3652 3657 audit_path = pathutil.pathauditor(repo.root, cached=True)
3653 3658 for f in actions['forget'][0]:
3654 3659 if interactive:
3655 3660 choice = repo.ui.promptchoice(
3656 3661 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3657 3662 if choice == 0:
3658 3663 repo.dirstate.drop(f)
3659 3664 else:
3660 3665 excluded_files.append(repo.wjoin(f))
3661 3666 else:
3662 3667 repo.dirstate.drop(f)
3663 3668 for f in actions['remove'][0]:
3664 3669 audit_path(f)
3665 3670 if interactive:
3666 3671 choice = repo.ui.promptchoice(
3667 3672 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3668 3673 if choice == 0:
3669 3674 doremove(f)
3670 3675 else:
3671 3676 excluded_files.append(repo.wjoin(f))
3672 3677 else:
3673 3678 doremove(f)
3674 3679 for f in actions['drop'][0]:
3675 3680 audit_path(f)
3676 3681 repo.dirstate.remove(f)
3677 3682
3678 3683 normal = None
3679 3684 if node == parent:
3680 3685 # We're reverting to our parent. If possible, we'd like status
3681 3686 # to report the file as clean. We have to use normallookup for
3682 3687 # merges to avoid losing information about merged/dirty files.
3683 3688 if p2 != nullid:
3684 3689 normal = repo.dirstate.normallookup
3685 3690 else:
3686 3691 normal = repo.dirstate.normal
3687 3692
3688 3693 newlyaddedandmodifiedfiles = set()
3689 3694 if interactive:
3690 3695 # Prompt the user for changes to revert
3691 3696 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3692 3697 m = scmutil.match(ctx, torevert, matcher_opts)
3693 3698 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3694 3699 diffopts.nodates = True
3695 3700 diffopts.git = True
3696 3701 operation = 'discard'
3697 3702 reversehunks = True
3698 3703 if node != parent:
3699 3704 operation = 'revert'
3700 3705 reversehunks = repo.ui.configbool('experimental',
3701 3706 'revertalternateinteractivemode')
3702 3707 if reversehunks:
3703 3708 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3704 3709 else:
3705 3710 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3706 3711 originalchunks = patch.parsepatch(diff)
3707 3712
3708 3713 try:
3709 3714
3710 3715 chunks, opts = recordfilter(repo.ui, originalchunks,
3711 3716 operation=operation)
3712 3717 if reversehunks:
3713 3718 chunks = patch.reversehunks(chunks)
3714 3719
3715 3720 except error.PatchError as err:
3716 3721 raise error.Abort(_('error parsing patch: %s') % err)
3717 3722
3718 3723 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3719 3724 if tobackup is None:
3720 3725 tobackup = set()
3721 3726 # Apply changes
3722 3727 fp = stringio()
3723 3728 for c in chunks:
3724 3729 # Create a backup file only if this hunk should be backed up
3725 3730 if ishunk(c) and c.header.filename() in tobackup:
3726 3731 abs = c.header.filename()
3727 3732 target = repo.wjoin(abs)
3728 3733 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3729 3734 util.copyfile(target, bakname)
3730 3735 tobackup.remove(abs)
3731 3736 c.write(fp)
3732 3737 dopatch = fp.tell()
3733 3738 fp.seek(0)
3734 3739 if dopatch:
3735 3740 try:
3736 3741 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3737 3742 except error.PatchError as err:
3738 3743 raise error.Abort(str(err))
3739 3744 del fp
3740 3745 else:
3741 3746 for f in actions['revert'][0]:
3742 3747 checkout(f)
3743 3748 if normal:
3744 3749 normal(f)
3745 3750
3746 3751 for f in actions['add'][0]:
3747 3752 # Don't checkout modified files, they are already created by the diff
3748 3753 if f not in newlyaddedandmodifiedfiles:
3749 3754 checkout(f)
3750 3755 repo.dirstate.add(f)
3751 3756
3752 3757 normal = repo.dirstate.normallookup
3753 3758 if node == parent and p2 == nullid:
3754 3759 normal = repo.dirstate.normal
3755 3760 for f in actions['undelete'][0]:
3756 3761 checkout(f)
3757 3762 normal(f)
3758 3763
3759 3764 copied = copies.pathcopies(repo[parent], ctx)
3760 3765
3761 3766 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3762 3767 if f in copied:
3763 3768 repo.dirstate.copy(copied[f], f)
3764 3769
3765 3770 class command(registrar.command):
3766 3771 def _doregister(self, func, name, *args, **kwargs):
3767 3772 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3768 3773 return super(command, self)._doregister(func, name, *args, **kwargs)
3769 3774
3770 3775 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3771 3776 # commands.outgoing. "missing" is "missing" of the result of
3772 3777 # "findcommonoutgoing()"
3773 3778 outgoinghooks = util.hooks()
3774 3779
3775 3780 # a list of (ui, repo) functions called by commands.summary
3776 3781 summaryhooks = util.hooks()
3777 3782
3778 3783 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3779 3784 #
3780 3785 # functions should return tuple of booleans below, if 'changes' is None:
3781 3786 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3782 3787 #
3783 3788 # otherwise, 'changes' is a tuple of tuples below:
3784 3789 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3785 3790 # - (desturl, destbranch, destpeer, outgoing)
3786 3791 summaryremotehooks = util.hooks()
3787 3792
3788 3793 # A list of state files kept by multistep operations like graft.
3789 3794 # Since graft cannot be aborted, it is considered 'clearable' by update.
3790 3795 # note: bisect is intentionally excluded
3791 3796 # (state file, clearable, allowcommit, error, hint)
3792 3797 unfinishedstates = [
3793 3798 ('graftstate', True, False, _('graft in progress'),
3794 3799 _("use 'hg graft --continue' or 'hg update' to abort")),
3795 3800 ('updatestate', True, False, _('last update was interrupted'),
3796 3801 _("use 'hg update' to get a consistent checkout"))
3797 3802 ]
3798 3803
3799 3804 def checkunfinished(repo, commit=False):
3800 3805 '''Look for an unfinished multistep operation, like graft, and abort
3801 3806 if found. It's probably good to check this right before
3802 3807 bailifchanged().
3803 3808 '''
3804 3809 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3805 3810 if commit and allowcommit:
3806 3811 continue
3807 3812 if repo.vfs.exists(f):
3808 3813 raise error.Abort(msg, hint=hint)
3809 3814
3810 3815 def clearunfinished(repo):
3811 3816 '''Check for unfinished operations (as above), and clear the ones
3812 3817 that are clearable.
3813 3818 '''
3814 3819 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3815 3820 if not clearable and repo.vfs.exists(f):
3816 3821 raise error.Abort(msg, hint=hint)
3817 3822 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3818 3823 if clearable and repo.vfs.exists(f):
3819 3824 util.unlink(repo.vfs.join(f))
3820 3825
3821 3826 afterresolvedstates = [
3822 3827 ('graftstate',
3823 3828 _('hg graft --continue')),
3824 3829 ]
3825 3830
3826 3831 def howtocontinue(repo):
3827 3832 '''Check for an unfinished operation and return the command to finish
3828 3833 it.
3829 3834
3830 3835 afterresolvedstates tuples define a .hg/{file} and the corresponding
3831 3836 command needed to finish it.
3832 3837
3833 3838 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3834 3839 a boolean.
3835 3840 '''
3836 3841 contmsg = _("continue: %s")
3837 3842 for f, msg in afterresolvedstates:
3838 3843 if repo.vfs.exists(f):
3839 3844 return contmsg % msg, True
3840 3845 if repo[None].dirty(missing=True, merge=False, branch=False):
3841 3846 return contmsg % _("hg commit"), False
3842 3847 return None, None
3843 3848
3844 3849 def checkafterresolved(repo):
3845 3850 '''Inform the user about the next action after completing hg resolve
3846 3851
3847 3852 If there's a matching afterresolvedstates, howtocontinue will yield
3848 3853 repo.ui.warn as the reporter.
3849 3854
3850 3855 Otherwise, it will yield repo.ui.note.
3851 3856 '''
3852 3857 msg, warning = howtocontinue(repo)
3853 3858 if msg is not None:
3854 3859 if warning:
3855 3860 repo.ui.warn("%s\n" % msg)
3856 3861 else:
3857 3862 repo.ui.note("%s\n" % msg)
3858 3863
3859 3864 def wrongtooltocontinue(repo, task):
3860 3865 '''Raise an abort suggesting how to properly continue if there is an
3861 3866 active task.
3862 3867
3863 3868 Uses howtocontinue() to find the active task.
3864 3869
3865 3870 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3866 3871 a hint.
3867 3872 '''
3868 3873 after = howtocontinue(repo)
3869 3874 hint = None
3870 3875 if after[1]:
3871 3876 hint = after[0]
3872 3877 raise error.Abort(_('no %s in progress') % task, hint=hint)
General Comments 0
You need to be logged in to leave comments. Login now