##// END OF EJS Templates
cmdutil: convert the prefetchfiles() hook to a callback mechanism (API)...
Matt Harbison -
r36154:f52a9336 default
parent child Browse files
Show More
@@ -1,3162 +1,3164 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import os
12 12 import re
13 13 import tempfile
14 14
15 15 from .i18n import _
16 16 from .node import (
17 17 hex,
18 18 nullid,
19 19 nullrev,
20 20 short,
21 21 )
22 22
23 23 from . import (
24 24 bookmarks,
25 25 changelog,
26 26 copies,
27 27 crecord as crecordmod,
28 28 dirstateguard,
29 29 encoding,
30 30 error,
31 31 formatter,
32 32 logcmdutil,
33 33 match as matchmod,
34 34 merge as mergemod,
35 35 obsolete,
36 36 patch,
37 37 pathutil,
38 38 pycompat,
39 39 registrar,
40 40 revlog,
41 41 rewriteutil,
42 42 scmutil,
43 43 smartset,
44 44 subrepoutil,
45 45 templater,
46 46 util,
47 47 vfs as vfsmod,
48 48 )
49 49 stringio = util.stringio
50 50
51 51 # templates of common command options
52 52
53 53 dryrunopts = [
54 54 ('n', 'dry-run', None,
55 55 _('do not perform actions, just print output')),
56 56 ]
57 57
58 58 remoteopts = [
59 59 ('e', 'ssh', '',
60 60 _('specify ssh command to use'), _('CMD')),
61 61 ('', 'remotecmd', '',
62 62 _('specify hg command to run on the remote side'), _('CMD')),
63 63 ('', 'insecure', None,
64 64 _('do not verify server certificate (ignoring web.cacerts config)')),
65 65 ]
66 66
67 67 walkopts = [
68 68 ('I', 'include', [],
69 69 _('include names matching the given patterns'), _('PATTERN')),
70 70 ('X', 'exclude', [],
71 71 _('exclude names matching the given patterns'), _('PATTERN')),
72 72 ]
73 73
74 74 commitopts = [
75 75 ('m', 'message', '',
76 76 _('use text as commit message'), _('TEXT')),
77 77 ('l', 'logfile', '',
78 78 _('read commit message from file'), _('FILE')),
79 79 ]
80 80
81 81 commitopts2 = [
82 82 ('d', 'date', '',
83 83 _('record the specified date as commit date'), _('DATE')),
84 84 ('u', 'user', '',
85 85 _('record the specified user as committer'), _('USER')),
86 86 ]
87 87
88 88 # hidden for now
89 89 formatteropts = [
90 90 ('T', 'template', '',
91 91 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
92 92 ]
93 93
94 94 templateopts = [
95 95 ('', 'style', '',
96 96 _('display using template map file (DEPRECATED)'), _('STYLE')),
97 97 ('T', 'template', '',
98 98 _('display with template'), _('TEMPLATE')),
99 99 ]
100 100
101 101 logopts = [
102 102 ('p', 'patch', None, _('show patch')),
103 103 ('g', 'git', None, _('use git extended diff format')),
104 104 ('l', 'limit', '',
105 105 _('limit number of changes displayed'), _('NUM')),
106 106 ('M', 'no-merges', None, _('do not show merges')),
107 107 ('', 'stat', None, _('output diffstat-style summary of changes')),
108 108 ('G', 'graph', None, _("show the revision DAG")),
109 109 ] + templateopts
110 110
111 111 diffopts = [
112 112 ('a', 'text', None, _('treat all files as text')),
113 113 ('g', 'git', None, _('use git extended diff format')),
114 114 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
115 115 ('', 'nodates', None, _('omit dates from diff headers'))
116 116 ]
117 117
118 118 diffwsopts = [
119 119 ('w', 'ignore-all-space', None,
120 120 _('ignore white space when comparing lines')),
121 121 ('b', 'ignore-space-change', None,
122 122 _('ignore changes in the amount of white space')),
123 123 ('B', 'ignore-blank-lines', None,
124 124 _('ignore changes whose lines are all blank')),
125 125 ('Z', 'ignore-space-at-eol', None,
126 126 _('ignore changes in whitespace at EOL')),
127 127 ]
128 128
129 129 diffopts2 = [
130 130 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
131 131 ('p', 'show-function', None, _('show which function each change is in')),
132 132 ('', 'reverse', None, _('produce a diff that undoes the changes')),
133 133 ] + diffwsopts + [
134 134 ('U', 'unified', '',
135 135 _('number of lines of context to show'), _('NUM')),
136 136 ('', 'stat', None, _('output diffstat-style summary of changes')),
137 137 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
138 138 ]
139 139
140 140 mergetoolopts = [
141 141 ('t', 'tool', '', _('specify merge tool')),
142 142 ]
143 143
144 144 similarityopts = [
145 145 ('s', 'similarity', '',
146 146 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
147 147 ]
148 148
149 149 subrepoopts = [
150 150 ('S', 'subrepos', None,
151 151 _('recurse into subrepositories'))
152 152 ]
153 153
154 154 debugrevlogopts = [
155 155 ('c', 'changelog', False, _('open changelog')),
156 156 ('m', 'manifest', False, _('open manifest')),
157 157 ('', 'dir', '', _('open directory manifest')),
158 158 ]
159 159
160 160 # special string such that everything below this line will be ingored in the
161 161 # editor text
162 162 _linebelow = "^HG: ------------------------ >8 ------------------------$"
163 163
164 164 def ishunk(x):
165 165 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
166 166 return isinstance(x, hunkclasses)
167 167
168 168 def newandmodified(chunks, originalchunks):
169 169 newlyaddedandmodifiedfiles = set()
170 170 for chunk in chunks:
171 171 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
172 172 originalchunks:
173 173 newlyaddedandmodifiedfiles.add(chunk.header.filename())
174 174 return newlyaddedandmodifiedfiles
175 175
176 176 def parsealiases(cmd):
177 177 return cmd.lstrip("^").split("|")
178 178
179 179 def setupwrapcolorwrite(ui):
180 180 # wrap ui.write so diff output can be labeled/colorized
181 181 def wrapwrite(orig, *args, **kw):
182 182 label = kw.pop(r'label', '')
183 183 for chunk, l in patch.difflabel(lambda: args):
184 184 orig(chunk, label=label + l)
185 185
186 186 oldwrite = ui.write
187 187 def wrap(*args, **kwargs):
188 188 return wrapwrite(oldwrite, *args, **kwargs)
189 189 setattr(ui, 'write', wrap)
190 190 return oldwrite
191 191
192 192 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
193 193 if usecurses:
194 194 if testfile:
195 195 recordfn = crecordmod.testdecorator(testfile,
196 196 crecordmod.testchunkselector)
197 197 else:
198 198 recordfn = crecordmod.chunkselector
199 199
200 200 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
201 201
202 202 else:
203 203 return patch.filterpatch(ui, originalhunks, operation)
204 204
205 205 def recordfilter(ui, originalhunks, operation=None):
206 206 """ Prompts the user to filter the originalhunks and return a list of
207 207 selected hunks.
208 208 *operation* is used for to build ui messages to indicate the user what
209 209 kind of filtering they are doing: reverting, committing, shelving, etc.
210 210 (see patch.filterpatch).
211 211 """
212 212 usecurses = crecordmod.checkcurses(ui)
213 213 testfile = ui.config('experimental', 'crecordtest')
214 214 oldwrite = setupwrapcolorwrite(ui)
215 215 try:
216 216 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
217 217 testfile, operation)
218 218 finally:
219 219 ui.write = oldwrite
220 220 return newchunks, newopts
221 221
222 222 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
223 223 filterfn, *pats, **opts):
224 224 opts = pycompat.byteskwargs(opts)
225 225 if not ui.interactive():
226 226 if cmdsuggest:
227 227 msg = _('running non-interactively, use %s instead') % cmdsuggest
228 228 else:
229 229 msg = _('running non-interactively')
230 230 raise error.Abort(msg)
231 231
232 232 # make sure username is set before going interactive
233 233 if not opts.get('user'):
234 234 ui.username() # raise exception, username not provided
235 235
236 236 def recordfunc(ui, repo, message, match, opts):
237 237 """This is generic record driver.
238 238
239 239 Its job is to interactively filter local changes, and
240 240 accordingly prepare working directory into a state in which the
241 241 job can be delegated to a non-interactive commit command such as
242 242 'commit' or 'qrefresh'.
243 243
244 244 After the actual job is done by non-interactive command, the
245 245 working directory is restored to its original state.
246 246
247 247 In the end we'll record interesting changes, and everything else
248 248 will be left in place, so the user can continue working.
249 249 """
250 250
251 251 checkunfinished(repo, commit=True)
252 252 wctx = repo[None]
253 253 merge = len(wctx.parents()) > 1
254 254 if merge:
255 255 raise error.Abort(_('cannot partially commit a merge '
256 256 '(use "hg commit" instead)'))
257 257
258 258 def fail(f, msg):
259 259 raise error.Abort('%s: %s' % (f, msg))
260 260
261 261 force = opts.get('force')
262 262 if not force:
263 263 vdirs = []
264 264 match.explicitdir = vdirs.append
265 265 match.bad = fail
266 266
267 267 status = repo.status(match=match)
268 268 if not force:
269 269 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
270 270 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
271 271 diffopts.nodates = True
272 272 diffopts.git = True
273 273 diffopts.showfunc = True
274 274 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
275 275 originalchunks = patch.parsepatch(originaldiff)
276 276
277 277 # 1. filter patch, since we are intending to apply subset of it
278 278 try:
279 279 chunks, newopts = filterfn(ui, originalchunks)
280 280 except error.PatchError as err:
281 281 raise error.Abort(_('error parsing patch: %s') % err)
282 282 opts.update(newopts)
283 283
284 284 # We need to keep a backup of files that have been newly added and
285 285 # modified during the recording process because there is a previous
286 286 # version without the edit in the workdir
287 287 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
288 288 contenders = set()
289 289 for h in chunks:
290 290 try:
291 291 contenders.update(set(h.files()))
292 292 except AttributeError:
293 293 pass
294 294
295 295 changed = status.modified + status.added + status.removed
296 296 newfiles = [f for f in changed if f in contenders]
297 297 if not newfiles:
298 298 ui.status(_('no changes to record\n'))
299 299 return 0
300 300
301 301 modified = set(status.modified)
302 302
303 303 # 2. backup changed files, so we can restore them in the end
304 304
305 305 if backupall:
306 306 tobackup = changed
307 307 else:
308 308 tobackup = [f for f in newfiles if f in modified or f in \
309 309 newlyaddedandmodifiedfiles]
310 310 backups = {}
311 311 if tobackup:
312 312 backupdir = repo.vfs.join('record-backups')
313 313 try:
314 314 os.mkdir(backupdir)
315 315 except OSError as err:
316 316 if err.errno != errno.EEXIST:
317 317 raise
318 318 try:
319 319 # backup continues
320 320 for f in tobackup:
321 321 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
322 322 dir=backupdir)
323 323 os.close(fd)
324 324 ui.debug('backup %r as %r\n' % (f, tmpname))
325 325 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
326 326 backups[f] = tmpname
327 327
328 328 fp = stringio()
329 329 for c in chunks:
330 330 fname = c.filename()
331 331 if fname in backups:
332 332 c.write(fp)
333 333 dopatch = fp.tell()
334 334 fp.seek(0)
335 335
336 336 # 2.5 optionally review / modify patch in text editor
337 337 if opts.get('review', False):
338 338 patchtext = (crecordmod.diffhelptext
339 339 + crecordmod.patchhelptext
340 340 + fp.read())
341 341 reviewedpatch = ui.edit(patchtext, "",
342 342 action="diff",
343 343 repopath=repo.path)
344 344 fp.truncate(0)
345 345 fp.write(reviewedpatch)
346 346 fp.seek(0)
347 347
348 348 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
349 349 # 3a. apply filtered patch to clean repo (clean)
350 350 if backups:
351 351 # Equivalent to hg.revert
352 352 m = scmutil.matchfiles(repo, backups.keys())
353 353 mergemod.update(repo, repo.dirstate.p1(),
354 354 False, True, matcher=m)
355 355
356 356 # 3b. (apply)
357 357 if dopatch:
358 358 try:
359 359 ui.debug('applying patch\n')
360 360 ui.debug(fp.getvalue())
361 361 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
362 362 except error.PatchError as err:
363 363 raise error.Abort(str(err))
364 364 del fp
365 365
366 366 # 4. We prepared working directory according to filtered
367 367 # patch. Now is the time to delegate the job to
368 368 # commit/qrefresh or the like!
369 369
370 370 # Make all of the pathnames absolute.
371 371 newfiles = [repo.wjoin(nf) for nf in newfiles]
372 372 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
373 373 finally:
374 374 # 5. finally restore backed-up files
375 375 try:
376 376 dirstate = repo.dirstate
377 377 for realname, tmpname in backups.iteritems():
378 378 ui.debug('restoring %r to %r\n' % (tmpname, realname))
379 379
380 380 if dirstate[realname] == 'n':
381 381 # without normallookup, restoring timestamp
382 382 # may cause partially committed files
383 383 # to be treated as unmodified
384 384 dirstate.normallookup(realname)
385 385
386 386 # copystat=True here and above are a hack to trick any
387 387 # editors that have f open that we haven't modified them.
388 388 #
389 389 # Also note that this racy as an editor could notice the
390 390 # file's mtime before we've finished writing it.
391 391 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
392 392 os.unlink(tmpname)
393 393 if tobackup:
394 394 os.rmdir(backupdir)
395 395 except OSError:
396 396 pass
397 397
398 398 def recordinwlock(ui, repo, message, match, opts):
399 399 with repo.wlock():
400 400 return recordfunc(ui, repo, message, match, opts)
401 401
402 402 return commit(ui, repo, recordinwlock, pats, opts)
403 403
404 404 class dirnode(object):
405 405 """
406 406 Represent a directory in user working copy with information required for
407 407 the purpose of tersing its status.
408 408
409 409 path is the path to the directory
410 410
411 411 statuses is a set of statuses of all files in this directory (this includes
412 412 all the files in all the subdirectories too)
413 413
414 414 files is a list of files which are direct child of this directory
415 415
416 416 subdirs is a dictionary of sub-directory name as the key and it's own
417 417 dirnode object as the value
418 418 """
419 419
420 420 def __init__(self, dirpath):
421 421 self.path = dirpath
422 422 self.statuses = set([])
423 423 self.files = []
424 424 self.subdirs = {}
425 425
426 426 def _addfileindir(self, filename, status):
427 427 """Add a file in this directory as a direct child."""
428 428 self.files.append((filename, status))
429 429
430 430 def addfile(self, filename, status):
431 431 """
432 432 Add a file to this directory or to its direct parent directory.
433 433
434 434 If the file is not direct child of this directory, we traverse to the
435 435 directory of which this file is a direct child of and add the file
436 436 there.
437 437 """
438 438
439 439 # the filename contains a path separator, it means it's not the direct
440 440 # child of this directory
441 441 if '/' in filename:
442 442 subdir, filep = filename.split('/', 1)
443 443
444 444 # does the dirnode object for subdir exists
445 445 if subdir not in self.subdirs:
446 446 subdirpath = os.path.join(self.path, subdir)
447 447 self.subdirs[subdir] = dirnode(subdirpath)
448 448
449 449 # try adding the file in subdir
450 450 self.subdirs[subdir].addfile(filep, status)
451 451
452 452 else:
453 453 self._addfileindir(filename, status)
454 454
455 455 if status not in self.statuses:
456 456 self.statuses.add(status)
457 457
458 458 def iterfilepaths(self):
459 459 """Yield (status, path) for files directly under this directory."""
460 460 for f, st in self.files:
461 461 yield st, os.path.join(self.path, f)
462 462
463 463 def tersewalk(self, terseargs):
464 464 """
465 465 Yield (status, path) obtained by processing the status of this
466 466 dirnode.
467 467
468 468 terseargs is the string of arguments passed by the user with `--terse`
469 469 flag.
470 470
471 471 Following are the cases which can happen:
472 472
473 473 1) All the files in the directory (including all the files in its
474 474 subdirectories) share the same status and the user has asked us to terse
475 475 that status. -> yield (status, dirpath)
476 476
477 477 2) Otherwise, we do following:
478 478
479 479 a) Yield (status, filepath) for all the files which are in this
480 480 directory (only the ones in this directory, not the subdirs)
481 481
482 482 b) Recurse the function on all the subdirectories of this
483 483 directory
484 484 """
485 485
486 486 if len(self.statuses) == 1:
487 487 onlyst = self.statuses.pop()
488 488
489 489 # Making sure we terse only when the status abbreviation is
490 490 # passed as terse argument
491 491 if onlyst in terseargs:
492 492 yield onlyst, self.path + pycompat.ossep
493 493 return
494 494
495 495 # add the files to status list
496 496 for st, fpath in self.iterfilepaths():
497 497 yield st, fpath
498 498
499 499 #recurse on the subdirs
500 500 for dirobj in self.subdirs.values():
501 501 for st, fpath in dirobj.tersewalk(terseargs):
502 502 yield st, fpath
503 503
504 504 def tersedir(statuslist, terseargs):
505 505 """
506 506 Terse the status if all the files in a directory shares the same status.
507 507
508 508 statuslist is scmutil.status() object which contains a list of files for
509 509 each status.
510 510 terseargs is string which is passed by the user as the argument to `--terse`
511 511 flag.
512 512
513 513 The function makes a tree of objects of dirnode class, and at each node it
514 514 stores the information required to know whether we can terse a certain
515 515 directory or not.
516 516 """
517 517 # the order matters here as that is used to produce final list
518 518 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
519 519
520 520 # checking the argument validity
521 521 for s in pycompat.bytestr(terseargs):
522 522 if s not in allst:
523 523 raise error.Abort(_("'%s' not recognized") % s)
524 524
525 525 # creating a dirnode object for the root of the repo
526 526 rootobj = dirnode('')
527 527 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
528 528 'ignored', 'removed')
529 529
530 530 tersedict = {}
531 531 for attrname in pstatus:
532 532 statuschar = attrname[0:1]
533 533 for f in getattr(statuslist, attrname):
534 534 rootobj.addfile(f, statuschar)
535 535 tersedict[statuschar] = []
536 536
537 537 # we won't be tersing the root dir, so add files in it
538 538 for st, fpath in rootobj.iterfilepaths():
539 539 tersedict[st].append(fpath)
540 540
541 541 # process each sub-directory and build tersedict
542 542 for subdir in rootobj.subdirs.values():
543 543 for st, f in subdir.tersewalk(terseargs):
544 544 tersedict[st].append(f)
545 545
546 546 tersedlist = []
547 547 for st in allst:
548 548 tersedict[st].sort()
549 549 tersedlist.append(tersedict[st])
550 550
551 551 return tersedlist
552 552
553 553 def _commentlines(raw):
554 554 '''Surround lineswith a comment char and a new line'''
555 555 lines = raw.splitlines()
556 556 commentedlines = ['# %s' % line for line in lines]
557 557 return '\n'.join(commentedlines) + '\n'
558 558
559 559 def _conflictsmsg(repo):
560 560 mergestate = mergemod.mergestate.read(repo)
561 561 if not mergestate.active():
562 562 return
563 563
564 564 m = scmutil.match(repo[None])
565 565 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
566 566 if unresolvedlist:
567 567 mergeliststr = '\n'.join(
568 568 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
569 569 for path in unresolvedlist])
570 570 msg = _('''Unresolved merge conflicts:
571 571
572 572 %s
573 573
574 574 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
575 575 else:
576 576 msg = _('No unresolved merge conflicts.')
577 577
578 578 return _commentlines(msg)
579 579
580 580 def _helpmessage(continuecmd, abortcmd):
581 581 msg = _('To continue: %s\n'
582 582 'To abort: %s') % (continuecmd, abortcmd)
583 583 return _commentlines(msg)
584 584
585 585 def _rebasemsg():
586 586 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
587 587
588 588 def _histeditmsg():
589 589 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
590 590
591 591 def _unshelvemsg():
592 592 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
593 593
594 594 def _updatecleanmsg(dest=None):
595 595 warning = _('warning: this will discard uncommitted changes')
596 596 return 'hg update --clean %s (%s)' % (dest or '.', warning)
597 597
598 598 def _graftmsg():
599 599 # tweakdefaults requires `update` to have a rev hence the `.`
600 600 return _helpmessage('hg graft --continue', _updatecleanmsg())
601 601
602 602 def _mergemsg():
603 603 # tweakdefaults requires `update` to have a rev hence the `.`
604 604 return _helpmessage('hg commit', _updatecleanmsg())
605 605
606 606 def _bisectmsg():
607 607 msg = _('To mark the changeset good: hg bisect --good\n'
608 608 'To mark the changeset bad: hg bisect --bad\n'
609 609 'To abort: hg bisect --reset\n')
610 610 return _commentlines(msg)
611 611
612 612 def fileexistspredicate(filename):
613 613 return lambda repo: repo.vfs.exists(filename)
614 614
615 615 def _mergepredicate(repo):
616 616 return len(repo[None].parents()) > 1
617 617
618 618 STATES = (
619 619 # (state, predicate to detect states, helpful message function)
620 620 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
621 621 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
622 622 ('graft', fileexistspredicate('graftstate'), _graftmsg),
623 623 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
624 624 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
625 625 # The merge state is part of a list that will be iterated over.
626 626 # They need to be last because some of the other unfinished states may also
627 627 # be in a merge or update state (eg. rebase, histedit, graft, etc).
628 628 # We want those to have priority.
629 629 ('merge', _mergepredicate, _mergemsg),
630 630 )
631 631
632 632 def _getrepostate(repo):
633 633 # experimental config: commands.status.skipstates
634 634 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
635 635 for state, statedetectionpredicate, msgfn in STATES:
636 636 if state in skip:
637 637 continue
638 638 if statedetectionpredicate(repo):
639 639 return (state, statedetectionpredicate, msgfn)
640 640
641 641 def morestatus(repo, fm):
642 642 statetuple = _getrepostate(repo)
643 643 label = 'status.morestatus'
644 644 if statetuple:
645 645 fm.startitem()
646 646 state, statedetectionpredicate, helpfulmsg = statetuple
647 647 statemsg = _('The repository is in an unfinished *%s* state.') % state
648 648 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
649 649 conmsg = _conflictsmsg(repo)
650 650 if conmsg:
651 651 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
652 652 if helpfulmsg:
653 653 helpmsg = helpfulmsg()
654 654 fm.write('helpmsg', '%s\n', helpmsg, label=label)
655 655
656 656 def findpossible(cmd, table, strict=False):
657 657 """
658 658 Return cmd -> (aliases, command table entry)
659 659 for each matching command.
660 660 Return debug commands (or their aliases) only if no normal command matches.
661 661 """
662 662 choice = {}
663 663 debugchoice = {}
664 664
665 665 if cmd in table:
666 666 # short-circuit exact matches, "log" alias beats "^log|history"
667 667 keys = [cmd]
668 668 else:
669 669 keys = table.keys()
670 670
671 671 allcmds = []
672 672 for e in keys:
673 673 aliases = parsealiases(e)
674 674 allcmds.extend(aliases)
675 675 found = None
676 676 if cmd in aliases:
677 677 found = cmd
678 678 elif not strict:
679 679 for a in aliases:
680 680 if a.startswith(cmd):
681 681 found = a
682 682 break
683 683 if found is not None:
684 684 if aliases[0].startswith("debug") or found.startswith("debug"):
685 685 debugchoice[found] = (aliases, table[e])
686 686 else:
687 687 choice[found] = (aliases, table[e])
688 688
689 689 if not choice and debugchoice:
690 690 choice = debugchoice
691 691
692 692 return choice, allcmds
693 693
694 694 def findcmd(cmd, table, strict=True):
695 695 """Return (aliases, command table entry) for command string."""
696 696 choice, allcmds = findpossible(cmd, table, strict)
697 697
698 698 if cmd in choice:
699 699 return choice[cmd]
700 700
701 701 if len(choice) > 1:
702 702 clist = sorted(choice)
703 703 raise error.AmbiguousCommand(cmd, clist)
704 704
705 705 if choice:
706 706 return list(choice.values())[0]
707 707
708 708 raise error.UnknownCommand(cmd, allcmds)
709 709
710 710 def changebranch(ui, repo, revs, label):
711 711 """ Change the branch name of given revs to label """
712 712
713 713 with repo.wlock(), repo.lock(), repo.transaction('branches'):
714 714 # abort in case of uncommitted merge or dirty wdir
715 715 bailifchanged(repo)
716 716 revs = scmutil.revrange(repo, revs)
717 717 if not revs:
718 718 raise error.Abort("empty revision set")
719 719 roots = repo.revs('roots(%ld)', revs)
720 720 if len(roots) > 1:
721 721 raise error.Abort(_("cannot change branch of non-linear revisions"))
722 722 rewriteutil.precheck(repo, revs, 'change branch of')
723 723
724 724 root = repo[roots.first()]
725 725 if not root.p1().branch() == label and label in repo.branchmap():
726 726 raise error.Abort(_("a branch of the same name already exists"))
727 727
728 728 if repo.revs('merge() and %ld', revs):
729 729 raise error.Abort(_("cannot change branch of a merge commit"))
730 730 if repo.revs('obsolete() and %ld', revs):
731 731 raise error.Abort(_("cannot change branch of a obsolete changeset"))
732 732
733 733 # make sure only topological heads
734 734 if repo.revs('heads(%ld) - head()', revs):
735 735 raise error.Abort(_("cannot change branch in middle of a stack"))
736 736
737 737 replacements = {}
738 738 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
739 739 # mercurial.subrepo -> mercurial.cmdutil
740 740 from . import context
741 741 for rev in revs:
742 742 ctx = repo[rev]
743 743 oldbranch = ctx.branch()
744 744 # check if ctx has same branch
745 745 if oldbranch == label:
746 746 continue
747 747
748 748 def filectxfn(repo, newctx, path):
749 749 try:
750 750 return ctx[path]
751 751 except error.ManifestLookupError:
752 752 return None
753 753
754 754 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
755 755 % (hex(ctx.node()), oldbranch, label))
756 756 extra = ctx.extra()
757 757 extra['branch_change'] = hex(ctx.node())
758 758 # While changing branch of set of linear commits, make sure that
759 759 # we base our commits on new parent rather than old parent which
760 760 # was obsoleted while changing the branch
761 761 p1 = ctx.p1().node()
762 762 p2 = ctx.p2().node()
763 763 if p1 in replacements:
764 764 p1 = replacements[p1][0]
765 765 if p2 in replacements:
766 766 p2 = replacements[p2][0]
767 767
768 768 mc = context.memctx(repo, (p1, p2),
769 769 ctx.description(),
770 770 ctx.files(),
771 771 filectxfn,
772 772 user=ctx.user(),
773 773 date=ctx.date(),
774 774 extra=extra,
775 775 branch=label)
776 776
777 777 commitphase = ctx.phase()
778 778 overrides = {('phases', 'new-commit'): commitphase}
779 779 with repo.ui.configoverride(overrides, 'branch-change'):
780 780 newnode = repo.commitctx(mc)
781 781
782 782 replacements[ctx.node()] = (newnode,)
783 783 ui.debug('new node id is %s\n' % hex(newnode))
784 784
785 785 # create obsmarkers and move bookmarks
786 786 scmutil.cleanupnodes(repo, replacements, 'branch-change')
787 787
788 788 # move the working copy too
789 789 wctx = repo[None]
790 790 # in-progress merge is a bit too complex for now.
791 791 if len(wctx.parents()) == 1:
792 792 newid = replacements.get(wctx.p1().node())
793 793 if newid is not None:
794 794 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
795 795 # mercurial.cmdutil
796 796 from . import hg
797 797 hg.update(repo, newid[0], quietempty=True)
798 798
799 799 ui.status(_("changed branch on %d changesets\n") % len(replacements))
800 800
801 801 def findrepo(p):
802 802 while not os.path.isdir(os.path.join(p, ".hg")):
803 803 oldp, p = p, os.path.dirname(p)
804 804 if p == oldp:
805 805 return None
806 806
807 807 return p
808 808
809 809 def bailifchanged(repo, merge=True, hint=None):
810 810 """ enforce the precondition that working directory must be clean.
811 811
812 812 'merge' can be set to false if a pending uncommitted merge should be
813 813 ignored (such as when 'update --check' runs).
814 814
815 815 'hint' is the usual hint given to Abort exception.
816 816 """
817 817
818 818 if merge and repo.dirstate.p2() != nullid:
819 819 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
820 820 modified, added, removed, deleted = repo.status()[:4]
821 821 if modified or added or removed or deleted:
822 822 raise error.Abort(_('uncommitted changes'), hint=hint)
823 823 ctx = repo[None]
824 824 for s in sorted(ctx.substate):
825 825 ctx.sub(s).bailifchanged(hint=hint)
826 826
827 827 def logmessage(ui, opts):
828 828 """ get the log message according to -m and -l option """
829 829 message = opts.get('message')
830 830 logfile = opts.get('logfile')
831 831
832 832 if message and logfile:
833 833 raise error.Abort(_('options --message and --logfile are mutually '
834 834 'exclusive'))
835 835 if not message and logfile:
836 836 try:
837 837 if isstdiofilename(logfile):
838 838 message = ui.fin.read()
839 839 else:
840 840 message = '\n'.join(util.readfile(logfile).splitlines())
841 841 except IOError as inst:
842 842 raise error.Abort(_("can't read commit message '%s': %s") %
843 843 (logfile, encoding.strtolocal(inst.strerror)))
844 844 return message
845 845
846 846 def mergeeditform(ctxorbool, baseformname):
847 847 """return appropriate editform name (referencing a committemplate)
848 848
849 849 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
850 850 merging is committed.
851 851
852 852 This returns baseformname with '.merge' appended if it is a merge,
853 853 otherwise '.normal' is appended.
854 854 """
855 855 if isinstance(ctxorbool, bool):
856 856 if ctxorbool:
857 857 return baseformname + ".merge"
858 858 elif 1 < len(ctxorbool.parents()):
859 859 return baseformname + ".merge"
860 860
861 861 return baseformname + ".normal"
862 862
863 863 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
864 864 editform='', **opts):
865 865 """get appropriate commit message editor according to '--edit' option
866 866
867 867 'finishdesc' is a function to be called with edited commit message
868 868 (= 'description' of the new changeset) just after editing, but
869 869 before checking empty-ness. It should return actual text to be
870 870 stored into history. This allows to change description before
871 871 storing.
872 872
873 873 'extramsg' is a extra message to be shown in the editor instead of
874 874 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
875 875 is automatically added.
876 876
877 877 'editform' is a dot-separated list of names, to distinguish
878 878 the purpose of commit text editing.
879 879
880 880 'getcommiteditor' returns 'commitforceeditor' regardless of
881 881 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
882 882 they are specific for usage in MQ.
883 883 """
884 884 if edit or finishdesc or extramsg:
885 885 return lambda r, c, s: commitforceeditor(r, c, s,
886 886 finishdesc=finishdesc,
887 887 extramsg=extramsg,
888 888 editform=editform)
889 889 elif editform:
890 890 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
891 891 else:
892 892 return commiteditor
893 893
894 894 def makefilename(repo, pat, node, desc=None,
895 895 total=None, seqno=None, revwidth=None, pathname=None):
896 896 node_expander = {
897 897 'H': lambda: hex(node),
898 898 'R': lambda: '%d' % repo.changelog.rev(node),
899 899 'h': lambda: short(node),
900 900 'm': lambda: re.sub('[^\w]', '_', desc or '')
901 901 }
902 902 expander = {
903 903 '%': lambda: '%',
904 904 'b': lambda: os.path.basename(repo.root),
905 905 }
906 906
907 907 try:
908 908 if node:
909 909 expander.update(node_expander)
910 910 if node:
911 911 expander['r'] = (lambda:
912 912 ('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
913 913 if total is not None:
914 914 expander['N'] = lambda: '%d' % total
915 915 if seqno is not None:
916 916 expander['n'] = lambda: '%d' % seqno
917 917 if total is not None and seqno is not None:
918 918 expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
919 919 if pathname is not None:
920 920 expander['s'] = lambda: os.path.basename(pathname)
921 921 expander['d'] = lambda: os.path.dirname(pathname) or '.'
922 922 expander['p'] = lambda: pathname
923 923
924 924 newname = []
925 925 patlen = len(pat)
926 926 i = 0
927 927 while i < patlen:
928 928 c = pat[i:i + 1]
929 929 if c == '%':
930 930 i += 1
931 931 c = pat[i:i + 1]
932 932 c = expander[c]()
933 933 newname.append(c)
934 934 i += 1
935 935 return ''.join(newname)
936 936 except KeyError as inst:
937 937 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
938 938 inst.args[0])
939 939
940 940 def isstdiofilename(pat):
941 941 """True if the given pat looks like a filename denoting stdin/stdout"""
942 942 return not pat or pat == '-'
943 943
944 944 class _unclosablefile(object):
945 945 def __init__(self, fp):
946 946 self._fp = fp
947 947
948 948 def close(self):
949 949 pass
950 950
951 951 def __iter__(self):
952 952 return iter(self._fp)
953 953
954 954 def __getattr__(self, attr):
955 955 return getattr(self._fp, attr)
956 956
957 957 def __enter__(self):
958 958 return self
959 959
960 960 def __exit__(self, exc_type, exc_value, exc_tb):
961 961 pass
962 962
963 963 def makefileobj(repo, pat, node=None, desc=None, total=None,
964 964 seqno=None, revwidth=None, mode='wb', modemap=None,
965 965 pathname=None):
966 966
967 967 writable = mode not in ('r', 'rb')
968 968
969 969 if isstdiofilename(pat):
970 970 if writable:
971 971 fp = repo.ui.fout
972 972 else:
973 973 fp = repo.ui.fin
974 974 return _unclosablefile(fp)
975 975 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
976 976 if modemap is not None:
977 977 mode = modemap.get(fn, mode)
978 978 if mode == 'wb':
979 979 modemap[fn] = 'ab'
980 980 return open(fn, mode)
981 981
982 982 def openrevlog(repo, cmd, file_, opts):
983 983 """opens the changelog, manifest, a filelog or a given revlog"""
984 984 cl = opts['changelog']
985 985 mf = opts['manifest']
986 986 dir = opts['dir']
987 987 msg = None
988 988 if cl and mf:
989 989 msg = _('cannot specify --changelog and --manifest at the same time')
990 990 elif cl and dir:
991 991 msg = _('cannot specify --changelog and --dir at the same time')
992 992 elif cl or mf or dir:
993 993 if file_:
994 994 msg = _('cannot specify filename with --changelog or --manifest')
995 995 elif not repo:
996 996 msg = _('cannot specify --changelog or --manifest or --dir '
997 997 'without a repository')
998 998 if msg:
999 999 raise error.Abort(msg)
1000 1000
1001 1001 r = None
1002 1002 if repo:
1003 1003 if cl:
1004 1004 r = repo.unfiltered().changelog
1005 1005 elif dir:
1006 1006 if 'treemanifest' not in repo.requirements:
1007 1007 raise error.Abort(_("--dir can only be used on repos with "
1008 1008 "treemanifest enabled"))
1009 1009 dirlog = repo.manifestlog._revlog.dirlog(dir)
1010 1010 if len(dirlog):
1011 1011 r = dirlog
1012 1012 elif mf:
1013 1013 r = repo.manifestlog._revlog
1014 1014 elif file_:
1015 1015 filelog = repo.file(file_)
1016 1016 if len(filelog):
1017 1017 r = filelog
1018 1018 if not r:
1019 1019 if not file_:
1020 1020 raise error.CommandError(cmd, _('invalid arguments'))
1021 1021 if not os.path.isfile(file_):
1022 1022 raise error.Abort(_("revlog '%s' not found") % file_)
1023 1023 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
1024 1024 file_[:-2] + ".i")
1025 1025 return r
1026 1026
1027 1027 def copy(ui, repo, pats, opts, rename=False):
1028 1028 # called with the repo lock held
1029 1029 #
1030 1030 # hgsep => pathname that uses "/" to separate directories
1031 1031 # ossep => pathname that uses os.sep to separate directories
1032 1032 cwd = repo.getcwd()
1033 1033 targets = {}
1034 1034 after = opts.get("after")
1035 1035 dryrun = opts.get("dry_run")
1036 1036 wctx = repo[None]
1037 1037
1038 1038 def walkpat(pat):
1039 1039 srcs = []
1040 1040 if after:
1041 1041 badstates = '?'
1042 1042 else:
1043 1043 badstates = '?r'
1044 1044 m = scmutil.match(wctx, [pat], opts, globbed=True)
1045 1045 for abs in wctx.walk(m):
1046 1046 state = repo.dirstate[abs]
1047 1047 rel = m.rel(abs)
1048 1048 exact = m.exact(abs)
1049 1049 if state in badstates:
1050 1050 if exact and state == '?':
1051 1051 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1052 1052 if exact and state == 'r':
1053 1053 ui.warn(_('%s: not copying - file has been marked for'
1054 1054 ' remove\n') % rel)
1055 1055 continue
1056 1056 # abs: hgsep
1057 1057 # rel: ossep
1058 1058 srcs.append((abs, rel, exact))
1059 1059 return srcs
1060 1060
1061 1061 # abssrc: hgsep
1062 1062 # relsrc: ossep
1063 1063 # otarget: ossep
1064 1064 def copyfile(abssrc, relsrc, otarget, exact):
1065 1065 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1066 1066 if '/' in abstarget:
1067 1067 # We cannot normalize abstarget itself, this would prevent
1068 1068 # case only renames, like a => A.
1069 1069 abspath, absname = abstarget.rsplit('/', 1)
1070 1070 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1071 1071 reltarget = repo.pathto(abstarget, cwd)
1072 1072 target = repo.wjoin(abstarget)
1073 1073 src = repo.wjoin(abssrc)
1074 1074 state = repo.dirstate[abstarget]
1075 1075
1076 1076 scmutil.checkportable(ui, abstarget)
1077 1077
1078 1078 # check for collisions
1079 1079 prevsrc = targets.get(abstarget)
1080 1080 if prevsrc is not None:
1081 1081 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1082 1082 (reltarget, repo.pathto(abssrc, cwd),
1083 1083 repo.pathto(prevsrc, cwd)))
1084 1084 return
1085 1085
1086 1086 # check for overwrites
1087 1087 exists = os.path.lexists(target)
1088 1088 samefile = False
1089 1089 if exists and abssrc != abstarget:
1090 1090 if (repo.dirstate.normalize(abssrc) ==
1091 1091 repo.dirstate.normalize(abstarget)):
1092 1092 if not rename:
1093 1093 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1094 1094 return
1095 1095 exists = False
1096 1096 samefile = True
1097 1097
1098 1098 if not after and exists or after and state in 'mn':
1099 1099 if not opts['force']:
1100 1100 if state in 'mn':
1101 1101 msg = _('%s: not overwriting - file already committed\n')
1102 1102 if after:
1103 1103 flags = '--after --force'
1104 1104 else:
1105 1105 flags = '--force'
1106 1106 if rename:
1107 1107 hint = _('(hg rename %s to replace the file by '
1108 1108 'recording a rename)\n') % flags
1109 1109 else:
1110 1110 hint = _('(hg copy %s to replace the file by '
1111 1111 'recording a copy)\n') % flags
1112 1112 else:
1113 1113 msg = _('%s: not overwriting - file exists\n')
1114 1114 if rename:
1115 1115 hint = _('(hg rename --after to record the rename)\n')
1116 1116 else:
1117 1117 hint = _('(hg copy --after to record the copy)\n')
1118 1118 ui.warn(msg % reltarget)
1119 1119 ui.warn(hint)
1120 1120 return
1121 1121
1122 1122 if after:
1123 1123 if not exists:
1124 1124 if rename:
1125 1125 ui.warn(_('%s: not recording move - %s does not exist\n') %
1126 1126 (relsrc, reltarget))
1127 1127 else:
1128 1128 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1129 1129 (relsrc, reltarget))
1130 1130 return
1131 1131 elif not dryrun:
1132 1132 try:
1133 1133 if exists:
1134 1134 os.unlink(target)
1135 1135 targetdir = os.path.dirname(target) or '.'
1136 1136 if not os.path.isdir(targetdir):
1137 1137 os.makedirs(targetdir)
1138 1138 if samefile:
1139 1139 tmp = target + "~hgrename"
1140 1140 os.rename(src, tmp)
1141 1141 os.rename(tmp, target)
1142 1142 else:
1143 1143 util.copyfile(src, target)
1144 1144 srcexists = True
1145 1145 except IOError as inst:
1146 1146 if inst.errno == errno.ENOENT:
1147 1147 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1148 1148 srcexists = False
1149 1149 else:
1150 1150 ui.warn(_('%s: cannot copy - %s\n') %
1151 1151 (relsrc, encoding.strtolocal(inst.strerror)))
1152 1152 return True # report a failure
1153 1153
1154 1154 if ui.verbose or not exact:
1155 1155 if rename:
1156 1156 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1157 1157 else:
1158 1158 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1159 1159
1160 1160 targets[abstarget] = abssrc
1161 1161
1162 1162 # fix up dirstate
1163 1163 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1164 1164 dryrun=dryrun, cwd=cwd)
1165 1165 if rename and not dryrun:
1166 1166 if not after and srcexists and not samefile:
1167 1167 repo.wvfs.unlinkpath(abssrc)
1168 1168 wctx.forget([abssrc])
1169 1169
1170 1170 # pat: ossep
1171 1171 # dest ossep
1172 1172 # srcs: list of (hgsep, hgsep, ossep, bool)
1173 1173 # return: function that takes hgsep and returns ossep
1174 1174 def targetpathfn(pat, dest, srcs):
1175 1175 if os.path.isdir(pat):
1176 1176 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1177 1177 abspfx = util.localpath(abspfx)
1178 1178 if destdirexists:
1179 1179 striplen = len(os.path.split(abspfx)[0])
1180 1180 else:
1181 1181 striplen = len(abspfx)
1182 1182 if striplen:
1183 1183 striplen += len(pycompat.ossep)
1184 1184 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1185 1185 elif destdirexists:
1186 1186 res = lambda p: os.path.join(dest,
1187 1187 os.path.basename(util.localpath(p)))
1188 1188 else:
1189 1189 res = lambda p: dest
1190 1190 return res
1191 1191
1192 1192 # pat: ossep
1193 1193 # dest ossep
1194 1194 # srcs: list of (hgsep, hgsep, ossep, bool)
1195 1195 # return: function that takes hgsep and returns ossep
1196 1196 def targetpathafterfn(pat, dest, srcs):
1197 1197 if matchmod.patkind(pat):
1198 1198 # a mercurial pattern
1199 1199 res = lambda p: os.path.join(dest,
1200 1200 os.path.basename(util.localpath(p)))
1201 1201 else:
1202 1202 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1203 1203 if len(abspfx) < len(srcs[0][0]):
1204 1204 # A directory. Either the target path contains the last
1205 1205 # component of the source path or it does not.
1206 1206 def evalpath(striplen):
1207 1207 score = 0
1208 1208 for s in srcs:
1209 1209 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1210 1210 if os.path.lexists(t):
1211 1211 score += 1
1212 1212 return score
1213 1213
1214 1214 abspfx = util.localpath(abspfx)
1215 1215 striplen = len(abspfx)
1216 1216 if striplen:
1217 1217 striplen += len(pycompat.ossep)
1218 1218 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1219 1219 score = evalpath(striplen)
1220 1220 striplen1 = len(os.path.split(abspfx)[0])
1221 1221 if striplen1:
1222 1222 striplen1 += len(pycompat.ossep)
1223 1223 if evalpath(striplen1) > score:
1224 1224 striplen = striplen1
1225 1225 res = lambda p: os.path.join(dest,
1226 1226 util.localpath(p)[striplen:])
1227 1227 else:
1228 1228 # a file
1229 1229 if destdirexists:
1230 1230 res = lambda p: os.path.join(dest,
1231 1231 os.path.basename(util.localpath(p)))
1232 1232 else:
1233 1233 res = lambda p: dest
1234 1234 return res
1235 1235
1236 1236 pats = scmutil.expandpats(pats)
1237 1237 if not pats:
1238 1238 raise error.Abort(_('no source or destination specified'))
1239 1239 if len(pats) == 1:
1240 1240 raise error.Abort(_('no destination specified'))
1241 1241 dest = pats.pop()
1242 1242 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1243 1243 if not destdirexists:
1244 1244 if len(pats) > 1 or matchmod.patkind(pats[0]):
1245 1245 raise error.Abort(_('with multiple sources, destination must be an '
1246 1246 'existing directory'))
1247 1247 if util.endswithsep(dest):
1248 1248 raise error.Abort(_('destination %s is not a directory') % dest)
1249 1249
1250 1250 tfn = targetpathfn
1251 1251 if after:
1252 1252 tfn = targetpathafterfn
1253 1253 copylist = []
1254 1254 for pat in pats:
1255 1255 srcs = walkpat(pat)
1256 1256 if not srcs:
1257 1257 continue
1258 1258 copylist.append((tfn(pat, dest, srcs), srcs))
1259 1259 if not copylist:
1260 1260 raise error.Abort(_('no files to copy'))
1261 1261
1262 1262 errors = 0
1263 1263 for targetpath, srcs in copylist:
1264 1264 for abssrc, relsrc, exact in srcs:
1265 1265 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1266 1266 errors += 1
1267 1267
1268 1268 if errors:
1269 1269 ui.warn(_('(consider using --after)\n'))
1270 1270
1271 1271 return errors != 0
1272 1272
1273 1273 ## facility to let extension process additional data into an import patch
1274 1274 # list of identifier to be executed in order
1275 1275 extrapreimport = [] # run before commit
1276 1276 extrapostimport = [] # run after commit
1277 1277 # mapping from identifier to actual import function
1278 1278 #
1279 1279 # 'preimport' are run before the commit is made and are provided the following
1280 1280 # arguments:
1281 1281 # - repo: the localrepository instance,
1282 1282 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1283 1283 # - extra: the future extra dictionary of the changeset, please mutate it,
1284 1284 # - opts: the import options.
1285 1285 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1286 1286 # mutation of in memory commit and more. Feel free to rework the code to get
1287 1287 # there.
1288 1288 extrapreimportmap = {}
1289 1289 # 'postimport' are run after the commit is made and are provided the following
1290 1290 # argument:
1291 1291 # - ctx: the changectx created by import.
1292 1292 extrapostimportmap = {}
1293 1293
1294 1294 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1295 1295 """Utility function used by commands.import to import a single patch
1296 1296
1297 1297 This function is explicitly defined here to help the evolve extension to
1298 1298 wrap this part of the import logic.
1299 1299
1300 1300 The API is currently a bit ugly because it a simple code translation from
1301 1301 the import command. Feel free to make it better.
1302 1302
1303 1303 :hunk: a patch (as a binary string)
1304 1304 :parents: nodes that will be parent of the created commit
1305 1305 :opts: the full dict of option passed to the import command
1306 1306 :msgs: list to save commit message to.
1307 1307 (used in case we need to save it when failing)
1308 1308 :updatefunc: a function that update a repo to a given node
1309 1309 updatefunc(<repo>, <node>)
1310 1310 """
1311 1311 # avoid cycle context -> subrepo -> cmdutil
1312 1312 from . import context
1313 1313 extractdata = patch.extract(ui, hunk)
1314 1314 tmpname = extractdata.get('filename')
1315 1315 message = extractdata.get('message')
1316 1316 user = opts.get('user') or extractdata.get('user')
1317 1317 date = opts.get('date') or extractdata.get('date')
1318 1318 branch = extractdata.get('branch')
1319 1319 nodeid = extractdata.get('nodeid')
1320 1320 p1 = extractdata.get('p1')
1321 1321 p2 = extractdata.get('p2')
1322 1322
1323 1323 nocommit = opts.get('no_commit')
1324 1324 importbranch = opts.get('import_branch')
1325 1325 update = not opts.get('bypass')
1326 1326 strip = opts["strip"]
1327 1327 prefix = opts["prefix"]
1328 1328 sim = float(opts.get('similarity') or 0)
1329 1329 if not tmpname:
1330 1330 return (None, None, False)
1331 1331
1332 1332 rejects = False
1333 1333
1334 1334 try:
1335 1335 cmdline_message = logmessage(ui, opts)
1336 1336 if cmdline_message:
1337 1337 # pickup the cmdline msg
1338 1338 message = cmdline_message
1339 1339 elif message:
1340 1340 # pickup the patch msg
1341 1341 message = message.strip()
1342 1342 else:
1343 1343 # launch the editor
1344 1344 message = None
1345 1345 ui.debug('message:\n%s\n' % message)
1346 1346
1347 1347 if len(parents) == 1:
1348 1348 parents.append(repo[nullid])
1349 1349 if opts.get('exact'):
1350 1350 if not nodeid or not p1:
1351 1351 raise error.Abort(_('not a Mercurial patch'))
1352 1352 p1 = repo[p1]
1353 1353 p2 = repo[p2 or nullid]
1354 1354 elif p2:
1355 1355 try:
1356 1356 p1 = repo[p1]
1357 1357 p2 = repo[p2]
1358 1358 # Without any options, consider p2 only if the
1359 1359 # patch is being applied on top of the recorded
1360 1360 # first parent.
1361 1361 if p1 != parents[0]:
1362 1362 p1 = parents[0]
1363 1363 p2 = repo[nullid]
1364 1364 except error.RepoError:
1365 1365 p1, p2 = parents
1366 1366 if p2.node() == nullid:
1367 1367 ui.warn(_("warning: import the patch as a normal revision\n"
1368 1368 "(use --exact to import the patch as a merge)\n"))
1369 1369 else:
1370 1370 p1, p2 = parents
1371 1371
1372 1372 n = None
1373 1373 if update:
1374 1374 if p1 != parents[0]:
1375 1375 updatefunc(repo, p1.node())
1376 1376 if p2 != parents[1]:
1377 1377 repo.setparents(p1.node(), p2.node())
1378 1378
1379 1379 if opts.get('exact') or importbranch:
1380 1380 repo.dirstate.setbranch(branch or 'default')
1381 1381
1382 1382 partial = opts.get('partial', False)
1383 1383 files = set()
1384 1384 try:
1385 1385 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1386 1386 files=files, eolmode=None, similarity=sim / 100.0)
1387 1387 except error.PatchError as e:
1388 1388 if not partial:
1389 1389 raise error.Abort(str(e))
1390 1390 if partial:
1391 1391 rejects = True
1392 1392
1393 1393 files = list(files)
1394 1394 if nocommit:
1395 1395 if message:
1396 1396 msgs.append(message)
1397 1397 else:
1398 1398 if opts.get('exact') or p2:
1399 1399 # If you got here, you either use --force and know what
1400 1400 # you are doing or used --exact or a merge patch while
1401 1401 # being updated to its first parent.
1402 1402 m = None
1403 1403 else:
1404 1404 m = scmutil.matchfiles(repo, files or [])
1405 1405 editform = mergeeditform(repo[None], 'import.normal')
1406 1406 if opts.get('exact'):
1407 1407 editor = None
1408 1408 else:
1409 1409 editor = getcommiteditor(editform=editform,
1410 1410 **pycompat.strkwargs(opts))
1411 1411 extra = {}
1412 1412 for idfunc in extrapreimport:
1413 1413 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1414 1414 overrides = {}
1415 1415 if partial:
1416 1416 overrides[('ui', 'allowemptycommit')] = True
1417 1417 with repo.ui.configoverride(overrides, 'import'):
1418 1418 n = repo.commit(message, user,
1419 1419 date, match=m,
1420 1420 editor=editor, extra=extra)
1421 1421 for idfunc in extrapostimport:
1422 1422 extrapostimportmap[idfunc](repo[n])
1423 1423 else:
1424 1424 if opts.get('exact') or importbranch:
1425 1425 branch = branch or 'default'
1426 1426 else:
1427 1427 branch = p1.branch()
1428 1428 store = patch.filestore()
1429 1429 try:
1430 1430 files = set()
1431 1431 try:
1432 1432 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1433 1433 files, eolmode=None)
1434 1434 except error.PatchError as e:
1435 1435 raise error.Abort(str(e))
1436 1436 if opts.get('exact'):
1437 1437 editor = None
1438 1438 else:
1439 1439 editor = getcommiteditor(editform='import.bypass')
1440 1440 memctx = context.memctx(repo, (p1.node(), p2.node()),
1441 1441 message,
1442 1442 files=files,
1443 1443 filectxfn=store,
1444 1444 user=user,
1445 1445 date=date,
1446 1446 branch=branch,
1447 1447 editor=editor)
1448 1448 n = memctx.commit()
1449 1449 finally:
1450 1450 store.close()
1451 1451 if opts.get('exact') and nocommit:
1452 1452 # --exact with --no-commit is still useful in that it does merge
1453 1453 # and branch bits
1454 1454 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1455 1455 elif opts.get('exact') and hex(n) != nodeid:
1456 1456 raise error.Abort(_('patch is damaged or loses information'))
1457 1457 msg = _('applied to working directory')
1458 1458 if n:
1459 1459 # i18n: refers to a short changeset id
1460 1460 msg = _('created %s') % short(n)
1461 1461 return (msg, n, rejects)
1462 1462 finally:
1463 1463 os.unlink(tmpname)
1464 1464
1465 1465 # facility to let extensions include additional data in an exported patch
1466 1466 # list of identifiers to be executed in order
1467 1467 extraexport = []
1468 1468 # mapping from identifier to actual export function
1469 1469 # function as to return a string to be added to the header or None
1470 1470 # it is given two arguments (sequencenumber, changectx)
1471 1471 extraexportmap = {}
1472 1472
1473 1473 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1474 1474 node = scmutil.binnode(ctx)
1475 1475 parents = [p.node() for p in ctx.parents() if p]
1476 1476 branch = ctx.branch()
1477 1477 if switch_parent:
1478 1478 parents.reverse()
1479 1479
1480 1480 if parents:
1481 1481 prev = parents[0]
1482 1482 else:
1483 1483 prev = nullid
1484 1484
1485 1485 write("# HG changeset patch\n")
1486 1486 write("# User %s\n" % ctx.user())
1487 1487 write("# Date %d %d\n" % ctx.date())
1488 1488 write("# %s\n" % util.datestr(ctx.date()))
1489 1489 if branch and branch != 'default':
1490 1490 write("# Branch %s\n" % branch)
1491 1491 write("# Node ID %s\n" % hex(node))
1492 1492 write("# Parent %s\n" % hex(prev))
1493 1493 if len(parents) > 1:
1494 1494 write("# Parent %s\n" % hex(parents[1]))
1495 1495
1496 1496 for headerid in extraexport:
1497 1497 header = extraexportmap[headerid](seqno, ctx)
1498 1498 if header is not None:
1499 1499 write('# %s\n' % header)
1500 1500 write(ctx.description().rstrip())
1501 1501 write("\n\n")
1502 1502
1503 1503 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1504 1504 write(chunk, label=label)
1505 1505
1506 1506 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1507 1507 opts=None, match=None):
1508 1508 '''export changesets as hg patches
1509 1509
1510 1510 Args:
1511 1511 repo: The repository from which we're exporting revisions.
1512 1512 revs: A list of revisions to export as revision numbers.
1513 1513 fntemplate: An optional string to use for generating patch file names.
1514 1514 fp: An optional file-like object to which patches should be written.
1515 1515 switch_parent: If True, show diffs against second parent when not nullid.
1516 1516 Default is false, which always shows diff against p1.
1517 1517 opts: diff options to use for generating the patch.
1518 1518 match: If specified, only export changes to files matching this matcher.
1519 1519
1520 1520 Returns:
1521 1521 Nothing.
1522 1522
1523 1523 Side Effect:
1524 1524 "HG Changeset Patch" data is emitted to one of the following
1525 1525 destinations:
1526 1526 fp is specified: All revs are written to the specified
1527 1527 file-like object.
1528 1528 fntemplate specified: Each rev is written to a unique file named using
1529 1529 the given template.
1530 1530 Neither fp nor template specified: All revs written to repo.ui.write()
1531 1531 '''
1532 1532
1533 1533 total = len(revs)
1534 1534 revwidth = max(len(str(rev)) for rev in revs)
1535 1535 filemode = {}
1536 1536
1537 1537 write = None
1538 1538 dest = '<unnamed>'
1539 1539 if fp:
1540 1540 dest = getattr(fp, 'name', dest)
1541 1541 def write(s, **kw):
1542 1542 fp.write(s)
1543 1543 elif not fntemplate:
1544 1544 write = repo.ui.write
1545 1545
1546 1546 for seqno, rev in enumerate(revs, 1):
1547 1547 ctx = repo[rev]
1548 1548 fo = None
1549 1549 if not fp and fntemplate:
1550 1550 desc_lines = ctx.description().rstrip().split('\n')
1551 1551 desc = desc_lines[0] #Commit always has a first line.
1552 1552 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1553 1553 total=total, seqno=seqno, revwidth=revwidth,
1554 1554 mode='wb', modemap=filemode)
1555 1555 dest = fo.name
1556 1556 def write(s, **kw):
1557 1557 fo.write(s)
1558 1558 if not dest.startswith('<'):
1559 1559 repo.ui.note("%s\n" % dest)
1560 1560 _exportsingle(
1561 1561 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1562 1562 if fo is not None:
1563 1563 fo.close()
1564 1564
1565 1565 def showmarker(fm, marker, index=None):
1566 1566 """utility function to display obsolescence marker in a readable way
1567 1567
1568 1568 To be used by debug function."""
1569 1569 if index is not None:
1570 1570 fm.write('index', '%i ', index)
1571 1571 fm.write('prednode', '%s ', hex(marker.prednode()))
1572 1572 succs = marker.succnodes()
1573 1573 fm.condwrite(succs, 'succnodes', '%s ',
1574 1574 fm.formatlist(map(hex, succs), name='node'))
1575 1575 fm.write('flag', '%X ', marker.flags())
1576 1576 parents = marker.parentnodes()
1577 1577 if parents is not None:
1578 1578 fm.write('parentnodes', '{%s} ',
1579 1579 fm.formatlist(map(hex, parents), name='node', sep=', '))
1580 1580 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1581 1581 meta = marker.metadata().copy()
1582 1582 meta.pop('date', None)
1583 1583 smeta = util.rapply(pycompat.maybebytestr, meta)
1584 1584 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1585 1585 fm.plain('\n')
1586 1586
1587 1587 def finddate(ui, repo, date):
1588 1588 """Find the tipmost changeset that matches the given date spec"""
1589 1589
1590 1590 df = util.matchdate(date)
1591 1591 m = scmutil.matchall(repo)
1592 1592 results = {}
1593 1593
1594 1594 def prep(ctx, fns):
1595 1595 d = ctx.date()
1596 1596 if df(d[0]):
1597 1597 results[ctx.rev()] = d
1598 1598
1599 1599 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1600 1600 rev = ctx.rev()
1601 1601 if rev in results:
1602 1602 ui.status(_("found revision %s from %s\n") %
1603 1603 (rev, util.datestr(results[rev])))
1604 1604 return '%d' % rev
1605 1605
1606 1606 raise error.Abort(_("revision matching date not found"))
1607 1607
1608 1608 def increasingwindows(windowsize=8, sizelimit=512):
1609 1609 while True:
1610 1610 yield windowsize
1611 1611 if windowsize < sizelimit:
1612 1612 windowsize *= 2
1613 1613
1614 1614 def _walkrevs(repo, opts):
1615 1615 # Default --rev value depends on --follow but --follow behavior
1616 1616 # depends on revisions resolved from --rev...
1617 1617 follow = opts.get('follow') or opts.get('follow_first')
1618 1618 if opts.get('rev'):
1619 1619 revs = scmutil.revrange(repo, opts['rev'])
1620 1620 elif follow and repo.dirstate.p1() == nullid:
1621 1621 revs = smartset.baseset()
1622 1622 elif follow:
1623 1623 revs = repo.revs('reverse(:.)')
1624 1624 else:
1625 1625 revs = smartset.spanset(repo)
1626 1626 revs.reverse()
1627 1627 return revs
1628 1628
1629 1629 class FileWalkError(Exception):
1630 1630 pass
1631 1631
1632 1632 def walkfilerevs(repo, match, follow, revs, fncache):
1633 1633 '''Walks the file history for the matched files.
1634 1634
1635 1635 Returns the changeset revs that are involved in the file history.
1636 1636
1637 1637 Throws FileWalkError if the file history can't be walked using
1638 1638 filelogs alone.
1639 1639 '''
1640 1640 wanted = set()
1641 1641 copies = []
1642 1642 minrev, maxrev = min(revs), max(revs)
1643 1643 def filerevgen(filelog, last):
1644 1644 """
1645 1645 Only files, no patterns. Check the history of each file.
1646 1646
1647 1647 Examines filelog entries within minrev, maxrev linkrev range
1648 1648 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1649 1649 tuples in backwards order
1650 1650 """
1651 1651 cl_count = len(repo)
1652 1652 revs = []
1653 1653 for j in xrange(0, last + 1):
1654 1654 linkrev = filelog.linkrev(j)
1655 1655 if linkrev < minrev:
1656 1656 continue
1657 1657 # only yield rev for which we have the changelog, it can
1658 1658 # happen while doing "hg log" during a pull or commit
1659 1659 if linkrev >= cl_count:
1660 1660 break
1661 1661
1662 1662 parentlinkrevs = []
1663 1663 for p in filelog.parentrevs(j):
1664 1664 if p != nullrev:
1665 1665 parentlinkrevs.append(filelog.linkrev(p))
1666 1666 n = filelog.node(j)
1667 1667 revs.append((linkrev, parentlinkrevs,
1668 1668 follow and filelog.renamed(n)))
1669 1669
1670 1670 return reversed(revs)
1671 1671 def iterfiles():
1672 1672 pctx = repo['.']
1673 1673 for filename in match.files():
1674 1674 if follow:
1675 1675 if filename not in pctx:
1676 1676 raise error.Abort(_('cannot follow file not in parent '
1677 1677 'revision: "%s"') % filename)
1678 1678 yield filename, pctx[filename].filenode()
1679 1679 else:
1680 1680 yield filename, None
1681 1681 for filename_node in copies:
1682 1682 yield filename_node
1683 1683
1684 1684 for file_, node in iterfiles():
1685 1685 filelog = repo.file(file_)
1686 1686 if not len(filelog):
1687 1687 if node is None:
1688 1688 # A zero count may be a directory or deleted file, so
1689 1689 # try to find matching entries on the slow path.
1690 1690 if follow:
1691 1691 raise error.Abort(
1692 1692 _('cannot follow nonexistent file: "%s"') % file_)
1693 1693 raise FileWalkError("Cannot walk via filelog")
1694 1694 else:
1695 1695 continue
1696 1696
1697 1697 if node is None:
1698 1698 last = len(filelog) - 1
1699 1699 else:
1700 1700 last = filelog.rev(node)
1701 1701
1702 1702 # keep track of all ancestors of the file
1703 1703 ancestors = {filelog.linkrev(last)}
1704 1704
1705 1705 # iterate from latest to oldest revision
1706 1706 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1707 1707 if not follow:
1708 1708 if rev > maxrev:
1709 1709 continue
1710 1710 else:
1711 1711 # Note that last might not be the first interesting
1712 1712 # rev to us:
1713 1713 # if the file has been changed after maxrev, we'll
1714 1714 # have linkrev(last) > maxrev, and we still need
1715 1715 # to explore the file graph
1716 1716 if rev not in ancestors:
1717 1717 continue
1718 1718 # XXX insert 1327 fix here
1719 1719 if flparentlinkrevs:
1720 1720 ancestors.update(flparentlinkrevs)
1721 1721
1722 1722 fncache.setdefault(rev, []).append(file_)
1723 1723 wanted.add(rev)
1724 1724 if copied:
1725 1725 copies.append(copied)
1726 1726
1727 1727 return wanted
1728 1728
1729 1729 class _followfilter(object):
1730 1730 def __init__(self, repo, onlyfirst=False):
1731 1731 self.repo = repo
1732 1732 self.startrev = nullrev
1733 1733 self.roots = set()
1734 1734 self.onlyfirst = onlyfirst
1735 1735
1736 1736 def match(self, rev):
1737 1737 def realparents(rev):
1738 1738 if self.onlyfirst:
1739 1739 return self.repo.changelog.parentrevs(rev)[0:1]
1740 1740 else:
1741 1741 return filter(lambda x: x != nullrev,
1742 1742 self.repo.changelog.parentrevs(rev))
1743 1743
1744 1744 if self.startrev == nullrev:
1745 1745 self.startrev = rev
1746 1746 return True
1747 1747
1748 1748 if rev > self.startrev:
1749 1749 # forward: all descendants
1750 1750 if not self.roots:
1751 1751 self.roots.add(self.startrev)
1752 1752 for parent in realparents(rev):
1753 1753 if parent in self.roots:
1754 1754 self.roots.add(rev)
1755 1755 return True
1756 1756 else:
1757 1757 # backwards: all parents
1758 1758 if not self.roots:
1759 1759 self.roots.update(realparents(self.startrev))
1760 1760 if rev in self.roots:
1761 1761 self.roots.remove(rev)
1762 1762 self.roots.update(realparents(rev))
1763 1763 return True
1764 1764
1765 1765 return False
1766 1766
1767 1767 def walkchangerevs(repo, match, opts, prepare):
1768 1768 '''Iterate over files and the revs in which they changed.
1769 1769
1770 1770 Callers most commonly need to iterate backwards over the history
1771 1771 in which they are interested. Doing so has awful (quadratic-looking)
1772 1772 performance, so we use iterators in a "windowed" way.
1773 1773
1774 1774 We walk a window of revisions in the desired order. Within the
1775 1775 window, we first walk forwards to gather data, then in the desired
1776 1776 order (usually backwards) to display it.
1777 1777
1778 1778 This function returns an iterator yielding contexts. Before
1779 1779 yielding each context, the iterator will first call the prepare
1780 1780 function on each context in the window in forward order.'''
1781 1781
1782 1782 follow = opts.get('follow') or opts.get('follow_first')
1783 1783 revs = _walkrevs(repo, opts)
1784 1784 if not revs:
1785 1785 return []
1786 1786 wanted = set()
1787 1787 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1788 1788 fncache = {}
1789 1789 change = repo.changectx
1790 1790
1791 1791 # First step is to fill wanted, the set of revisions that we want to yield.
1792 1792 # When it does not induce extra cost, we also fill fncache for revisions in
1793 1793 # wanted: a cache of filenames that were changed (ctx.files()) and that
1794 1794 # match the file filtering conditions.
1795 1795
1796 1796 if match.always():
1797 1797 # No files, no patterns. Display all revs.
1798 1798 wanted = revs
1799 1799 elif not slowpath:
1800 1800 # We only have to read through the filelog to find wanted revisions
1801 1801
1802 1802 try:
1803 1803 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1804 1804 except FileWalkError:
1805 1805 slowpath = True
1806 1806
1807 1807 # We decided to fall back to the slowpath because at least one
1808 1808 # of the paths was not a file. Check to see if at least one of them
1809 1809 # existed in history, otherwise simply return
1810 1810 for path in match.files():
1811 1811 if path == '.' or path in repo.store:
1812 1812 break
1813 1813 else:
1814 1814 return []
1815 1815
1816 1816 if slowpath:
1817 1817 # We have to read the changelog to match filenames against
1818 1818 # changed files
1819 1819
1820 1820 if follow:
1821 1821 raise error.Abort(_('can only follow copies/renames for explicit '
1822 1822 'filenames'))
1823 1823
1824 1824 # The slow path checks files modified in every changeset.
1825 1825 # This is really slow on large repos, so compute the set lazily.
1826 1826 class lazywantedset(object):
1827 1827 def __init__(self):
1828 1828 self.set = set()
1829 1829 self.revs = set(revs)
1830 1830
1831 1831 # No need to worry about locality here because it will be accessed
1832 1832 # in the same order as the increasing window below.
1833 1833 def __contains__(self, value):
1834 1834 if value in self.set:
1835 1835 return True
1836 1836 elif not value in self.revs:
1837 1837 return False
1838 1838 else:
1839 1839 self.revs.discard(value)
1840 1840 ctx = change(value)
1841 1841 matches = filter(match, ctx.files())
1842 1842 if matches:
1843 1843 fncache[value] = matches
1844 1844 self.set.add(value)
1845 1845 return True
1846 1846 return False
1847 1847
1848 1848 def discard(self, value):
1849 1849 self.revs.discard(value)
1850 1850 self.set.discard(value)
1851 1851
1852 1852 wanted = lazywantedset()
1853 1853
1854 1854 # it might be worthwhile to do this in the iterator if the rev range
1855 1855 # is descending and the prune args are all within that range
1856 1856 for rev in opts.get('prune', ()):
1857 1857 rev = repo[rev].rev()
1858 1858 ff = _followfilter(repo)
1859 1859 stop = min(revs[0], revs[-1])
1860 1860 for x in xrange(rev, stop - 1, -1):
1861 1861 if ff.match(x):
1862 1862 wanted = wanted - [x]
1863 1863
1864 1864 # Now that wanted is correctly initialized, we can iterate over the
1865 1865 # revision range, yielding only revisions in wanted.
1866 1866 def iterate():
1867 1867 if follow and match.always():
1868 1868 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1869 1869 def want(rev):
1870 1870 return ff.match(rev) and rev in wanted
1871 1871 else:
1872 1872 def want(rev):
1873 1873 return rev in wanted
1874 1874
1875 1875 it = iter(revs)
1876 1876 stopiteration = False
1877 1877 for windowsize in increasingwindows():
1878 1878 nrevs = []
1879 1879 for i in xrange(windowsize):
1880 1880 rev = next(it, None)
1881 1881 if rev is None:
1882 1882 stopiteration = True
1883 1883 break
1884 1884 elif want(rev):
1885 1885 nrevs.append(rev)
1886 1886 for rev in sorted(nrevs):
1887 1887 fns = fncache.get(rev)
1888 1888 ctx = change(rev)
1889 1889 if not fns:
1890 1890 def fns_generator():
1891 1891 for f in ctx.files():
1892 1892 if match(f):
1893 1893 yield f
1894 1894 fns = fns_generator()
1895 1895 prepare(ctx, fns)
1896 1896 for rev in nrevs:
1897 1897 yield change(rev)
1898 1898
1899 1899 if stopiteration:
1900 1900 break
1901 1901
1902 1902 return iterate()
1903 1903
1904 1904 def add(ui, repo, match, prefix, explicitonly, **opts):
1905 1905 join = lambda f: os.path.join(prefix, f)
1906 1906 bad = []
1907 1907
1908 1908 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1909 1909 names = []
1910 1910 wctx = repo[None]
1911 1911 cca = None
1912 1912 abort, warn = scmutil.checkportabilityalert(ui)
1913 1913 if abort or warn:
1914 1914 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1915 1915
1916 1916 badmatch = matchmod.badmatch(match, badfn)
1917 1917 dirstate = repo.dirstate
1918 1918 # We don't want to just call wctx.walk here, since it would return a lot of
1919 1919 # clean files, which we aren't interested in and takes time.
1920 1920 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
1921 1921 unknown=True, ignored=False, full=False)):
1922 1922 exact = match.exact(f)
1923 1923 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
1924 1924 if cca:
1925 1925 cca(f)
1926 1926 names.append(f)
1927 1927 if ui.verbose or not exact:
1928 1928 ui.status(_('adding %s\n') % match.rel(f))
1929 1929
1930 1930 for subpath in sorted(wctx.substate):
1931 1931 sub = wctx.sub(subpath)
1932 1932 try:
1933 1933 submatch = matchmod.subdirmatcher(subpath, match)
1934 1934 if opts.get(r'subrepos'):
1935 1935 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
1936 1936 else:
1937 1937 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
1938 1938 except error.LookupError:
1939 1939 ui.status(_("skipping missing subrepository: %s\n")
1940 1940 % join(subpath))
1941 1941
1942 1942 if not opts.get(r'dry_run'):
1943 1943 rejected = wctx.add(names, prefix)
1944 1944 bad.extend(f for f in rejected if f in match.files())
1945 1945 return bad
1946 1946
1947 1947 def addwebdirpath(repo, serverpath, webconf):
1948 1948 webconf[serverpath] = repo.root
1949 1949 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
1950 1950
1951 1951 for r in repo.revs('filelog("path:.hgsub")'):
1952 1952 ctx = repo[r]
1953 1953 for subpath in ctx.substate:
1954 1954 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
1955 1955
1956 1956 def forget(ui, repo, match, prefix, explicitonly):
1957 1957 join = lambda f: os.path.join(prefix, f)
1958 1958 bad = []
1959 1959 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1960 1960 wctx = repo[None]
1961 1961 forgot = []
1962 1962
1963 1963 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
1964 1964 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1965 1965 if explicitonly:
1966 1966 forget = [f for f in forget if match.exact(f)]
1967 1967
1968 1968 for subpath in sorted(wctx.substate):
1969 1969 sub = wctx.sub(subpath)
1970 1970 try:
1971 1971 submatch = matchmod.subdirmatcher(subpath, match)
1972 1972 subbad, subforgot = sub.forget(submatch, prefix)
1973 1973 bad.extend([subpath + '/' + f for f in subbad])
1974 1974 forgot.extend([subpath + '/' + f for f in subforgot])
1975 1975 except error.LookupError:
1976 1976 ui.status(_("skipping missing subrepository: %s\n")
1977 1977 % join(subpath))
1978 1978
1979 1979 if not explicitonly:
1980 1980 for f in match.files():
1981 1981 if f not in repo.dirstate and not repo.wvfs.isdir(f):
1982 1982 if f not in forgot:
1983 1983 if repo.wvfs.exists(f):
1984 1984 # Don't complain if the exact case match wasn't given.
1985 1985 # But don't do this until after checking 'forgot', so
1986 1986 # that subrepo files aren't normalized, and this op is
1987 1987 # purely from data cached by the status walk above.
1988 1988 if repo.dirstate.normalize(f) in repo.dirstate:
1989 1989 continue
1990 1990 ui.warn(_('not removing %s: '
1991 1991 'file is already untracked\n')
1992 1992 % match.rel(f))
1993 1993 bad.append(f)
1994 1994
1995 1995 for f in forget:
1996 1996 if ui.verbose or not match.exact(f):
1997 1997 ui.status(_('removing %s\n') % match.rel(f))
1998 1998
1999 1999 rejected = wctx.forget(forget, prefix)
2000 2000 bad.extend(f for f in rejected if f in match.files())
2001 2001 forgot.extend(f for f in forget if f not in rejected)
2002 2002 return bad, forgot
2003 2003
2004 2004 def files(ui, ctx, m, fm, fmt, subrepos):
2005 2005 rev = ctx.rev()
2006 2006 ret = 1
2007 2007 ds = ctx.repo().dirstate
2008 2008
2009 2009 for f in ctx.matches(m):
2010 2010 if rev is None and ds[f] == 'r':
2011 2011 continue
2012 2012 fm.startitem()
2013 2013 if ui.verbose:
2014 2014 fc = ctx[f]
2015 2015 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2016 2016 fm.data(abspath=f)
2017 2017 fm.write('path', fmt, m.rel(f))
2018 2018 ret = 0
2019 2019
2020 2020 for subpath in sorted(ctx.substate):
2021 2021 submatch = matchmod.subdirmatcher(subpath, m)
2022 2022 if (subrepos or m.exact(subpath) or any(submatch.files())):
2023 2023 sub = ctx.sub(subpath)
2024 2024 try:
2025 2025 recurse = m.exact(subpath) or subrepos
2026 2026 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2027 2027 ret = 0
2028 2028 except error.LookupError:
2029 2029 ui.status(_("skipping missing subrepository: %s\n")
2030 2030 % m.abs(subpath))
2031 2031
2032 2032 return ret
2033 2033
2034 2034 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2035 2035 join = lambda f: os.path.join(prefix, f)
2036 2036 ret = 0
2037 2037 s = repo.status(match=m, clean=True)
2038 2038 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2039 2039
2040 2040 wctx = repo[None]
2041 2041
2042 2042 if warnings is None:
2043 2043 warnings = []
2044 2044 warn = True
2045 2045 else:
2046 2046 warn = False
2047 2047
2048 2048 subs = sorted(wctx.substate)
2049 2049 total = len(subs)
2050 2050 count = 0
2051 2051 for subpath in subs:
2052 2052 count += 1
2053 2053 submatch = matchmod.subdirmatcher(subpath, m)
2054 2054 if subrepos or m.exact(subpath) or any(submatch.files()):
2055 2055 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2056 2056 sub = wctx.sub(subpath)
2057 2057 try:
2058 2058 if sub.removefiles(submatch, prefix, after, force, subrepos,
2059 2059 warnings):
2060 2060 ret = 1
2061 2061 except error.LookupError:
2062 2062 warnings.append(_("skipping missing subrepository: %s\n")
2063 2063 % join(subpath))
2064 2064 ui.progress(_('searching'), None)
2065 2065
2066 2066 # warn about failure to delete explicit files/dirs
2067 2067 deleteddirs = util.dirs(deleted)
2068 2068 files = m.files()
2069 2069 total = len(files)
2070 2070 count = 0
2071 2071 for f in files:
2072 2072 def insubrepo():
2073 2073 for subpath in wctx.substate:
2074 2074 if f.startswith(subpath + '/'):
2075 2075 return True
2076 2076 return False
2077 2077
2078 2078 count += 1
2079 2079 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2080 2080 isdir = f in deleteddirs or wctx.hasdir(f)
2081 2081 if (f in repo.dirstate or isdir or f == '.'
2082 2082 or insubrepo() or f in subs):
2083 2083 continue
2084 2084
2085 2085 if repo.wvfs.exists(f):
2086 2086 if repo.wvfs.isdir(f):
2087 2087 warnings.append(_('not removing %s: no tracked files\n')
2088 2088 % m.rel(f))
2089 2089 else:
2090 2090 warnings.append(_('not removing %s: file is untracked\n')
2091 2091 % m.rel(f))
2092 2092 # missing files will generate a warning elsewhere
2093 2093 ret = 1
2094 2094 ui.progress(_('deleting'), None)
2095 2095
2096 2096 if force:
2097 2097 list = modified + deleted + clean + added
2098 2098 elif after:
2099 2099 list = deleted
2100 2100 remaining = modified + added + clean
2101 2101 total = len(remaining)
2102 2102 count = 0
2103 2103 for f in remaining:
2104 2104 count += 1
2105 2105 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2106 2106 if ui.verbose or (f in files):
2107 2107 warnings.append(_('not removing %s: file still exists\n')
2108 2108 % m.rel(f))
2109 2109 ret = 1
2110 2110 ui.progress(_('skipping'), None)
2111 2111 else:
2112 2112 list = deleted + clean
2113 2113 total = len(modified) + len(added)
2114 2114 count = 0
2115 2115 for f in modified:
2116 2116 count += 1
2117 2117 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2118 2118 warnings.append(_('not removing %s: file is modified (use -f'
2119 2119 ' to force removal)\n') % m.rel(f))
2120 2120 ret = 1
2121 2121 for f in added:
2122 2122 count += 1
2123 2123 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2124 2124 warnings.append(_("not removing %s: file has been marked for add"
2125 2125 " (use 'hg forget' to undo add)\n") % m.rel(f))
2126 2126 ret = 1
2127 2127 ui.progress(_('skipping'), None)
2128 2128
2129 2129 list = sorted(list)
2130 2130 total = len(list)
2131 2131 count = 0
2132 2132 for f in list:
2133 2133 count += 1
2134 2134 if ui.verbose or not m.exact(f):
2135 2135 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2136 2136 ui.status(_('removing %s\n') % m.rel(f))
2137 2137 ui.progress(_('deleting'), None)
2138 2138
2139 2139 with repo.wlock():
2140 2140 if not after:
2141 2141 for f in list:
2142 2142 if f in added:
2143 2143 continue # we never unlink added files on remove
2144 2144 repo.wvfs.unlinkpath(f, ignoremissing=True)
2145 2145 repo[None].forget(list)
2146 2146
2147 2147 if warn:
2148 2148 for warning in warnings:
2149 2149 ui.warn(warning)
2150 2150
2151 2151 return ret
2152 2152
2153 2153 def _updatecatformatter(fm, ctx, matcher, path, decode):
2154 2154 """Hook for adding data to the formatter used by ``hg cat``.
2155 2155
2156 2156 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2157 2157 this method first."""
2158 2158 data = ctx[path].data()
2159 2159 if decode:
2160 2160 data = ctx.repo().wwritedata(path, data)
2161 2161 fm.startitem()
2162 2162 fm.write('data', '%s', data)
2163 2163 fm.data(abspath=path, path=matcher.rel(path))
2164 2164
2165 2165 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2166 2166 err = 1
2167 2167 opts = pycompat.byteskwargs(opts)
2168 2168
2169 2169 def write(path):
2170 2170 filename = None
2171 2171 if fntemplate:
2172 2172 filename = makefilename(repo, fntemplate, ctx.node(),
2173 2173 pathname=os.path.join(prefix, path))
2174 2174 # attempt to create the directory if it does not already exist
2175 2175 try:
2176 2176 os.makedirs(os.path.dirname(filename))
2177 2177 except OSError:
2178 2178 pass
2179 2179 with formatter.maybereopen(basefm, filename, opts) as fm:
2180 2180 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2181 2181
2182 2182 # Automation often uses hg cat on single files, so special case it
2183 2183 # for performance to avoid the cost of parsing the manifest.
2184 2184 if len(matcher.files()) == 1 and not matcher.anypats():
2185 2185 file = matcher.files()[0]
2186 2186 mfl = repo.manifestlog
2187 2187 mfnode = ctx.manifestnode()
2188 2188 try:
2189 2189 if mfnode and mfl[mfnode].find(file)[0]:
2190 2190 _prefetchfiles(repo, ctx, [file])
2191 2191 write(file)
2192 2192 return 0
2193 2193 except KeyError:
2194 2194 pass
2195 2195
2196 2196 files = [f for f in ctx.walk(matcher)]
2197 2197 _prefetchfiles(repo, ctx, files)
2198 2198
2199 2199 for abs in files:
2200 2200 write(abs)
2201 2201 err = 0
2202 2202
2203 2203 for subpath in sorted(ctx.substate):
2204 2204 sub = ctx.sub(subpath)
2205 2205 try:
2206 2206 submatch = matchmod.subdirmatcher(subpath, matcher)
2207 2207
2208 2208 if not sub.cat(submatch, basefm, fntemplate,
2209 2209 os.path.join(prefix, sub._path),
2210 2210 **pycompat.strkwargs(opts)):
2211 2211 err = 0
2212 2212 except error.RepoLookupError:
2213 2213 ui.status(_("skipping missing subrepository: %s\n")
2214 2214 % os.path.join(prefix, subpath))
2215 2215
2216 2216 return err
2217 2217
2218 2218 def commit(ui, repo, commitfunc, pats, opts):
2219 2219 '''commit the specified files or all outstanding changes'''
2220 2220 date = opts.get('date')
2221 2221 if date:
2222 2222 opts['date'] = util.parsedate(date)
2223 2223 message = logmessage(ui, opts)
2224 2224 matcher = scmutil.match(repo[None], pats, opts)
2225 2225
2226 2226 dsguard = None
2227 2227 # extract addremove carefully -- this function can be called from a command
2228 2228 # that doesn't support addremove
2229 2229 if opts.get('addremove'):
2230 2230 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2231 2231 with dsguard or util.nullcontextmanager():
2232 2232 if dsguard:
2233 2233 if scmutil.addremove(repo, matcher, "", opts) != 0:
2234 2234 raise error.Abort(
2235 2235 _("failed to mark all new/missing files as added/removed"))
2236 2236
2237 2237 return commitfunc(ui, repo, message, matcher, opts)
2238 2238
2239 2239 def samefile(f, ctx1, ctx2):
2240 2240 if f in ctx1.manifest():
2241 2241 a = ctx1.filectx(f)
2242 2242 if f in ctx2.manifest():
2243 2243 b = ctx2.filectx(f)
2244 2244 return (not a.cmp(b)
2245 2245 and a.flags() == b.flags())
2246 2246 else:
2247 2247 return False
2248 2248 else:
2249 2249 return f not in ctx2.manifest()
2250 2250
2251 2251 def amend(ui, repo, old, extra, pats, opts):
2252 2252 # avoid cycle context -> subrepo -> cmdutil
2253 2253 from . import context
2254 2254
2255 2255 # amend will reuse the existing user if not specified, but the obsolete
2256 2256 # marker creation requires that the current user's name is specified.
2257 2257 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2258 2258 ui.username() # raise exception if username not set
2259 2259
2260 2260 ui.note(_('amending changeset %s\n') % old)
2261 2261 base = old.p1()
2262 2262
2263 2263 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2264 2264 # Participating changesets:
2265 2265 #
2266 2266 # wctx o - workingctx that contains changes from working copy
2267 2267 # | to go into amending commit
2268 2268 # |
2269 2269 # old o - changeset to amend
2270 2270 # |
2271 2271 # base o - first parent of the changeset to amend
2272 2272 wctx = repo[None]
2273 2273
2274 2274 # Copy to avoid mutating input
2275 2275 extra = extra.copy()
2276 2276 # Update extra dict from amended commit (e.g. to preserve graft
2277 2277 # source)
2278 2278 extra.update(old.extra())
2279 2279
2280 2280 # Also update it from the from the wctx
2281 2281 extra.update(wctx.extra())
2282 2282
2283 2283 user = opts.get('user') or old.user()
2284 2284 date = opts.get('date') or old.date()
2285 2285
2286 2286 # Parse the date to allow comparison between date and old.date()
2287 2287 date = util.parsedate(date)
2288 2288
2289 2289 if len(old.parents()) > 1:
2290 2290 # ctx.files() isn't reliable for merges, so fall back to the
2291 2291 # slower repo.status() method
2292 2292 files = set([fn for st in repo.status(base, old)[:3]
2293 2293 for fn in st])
2294 2294 else:
2295 2295 files = set(old.files())
2296 2296
2297 2297 # add/remove the files to the working copy if the "addremove" option
2298 2298 # was specified.
2299 2299 matcher = scmutil.match(wctx, pats, opts)
2300 2300 if (opts.get('addremove')
2301 2301 and scmutil.addremove(repo, matcher, "", opts)):
2302 2302 raise error.Abort(
2303 2303 _("failed to mark all new/missing files as added/removed"))
2304 2304
2305 2305 # Check subrepos. This depends on in-place wctx._status update in
2306 2306 # subrepo.precommit(). To minimize the risk of this hack, we do
2307 2307 # nothing if .hgsub does not exist.
2308 2308 if '.hgsub' in wctx or '.hgsub' in old:
2309 2309 subs, commitsubs, newsubstate = subrepoutil.precommit(
2310 2310 ui, wctx, wctx._status, matcher)
2311 2311 # amend should abort if commitsubrepos is enabled
2312 2312 assert not commitsubs
2313 2313 if subs:
2314 2314 subrepoutil.writestate(repo, newsubstate)
2315 2315
2316 2316 filestoamend = set(f for f in wctx.files() if matcher(f))
2317 2317
2318 2318 changes = (len(filestoamend) > 0)
2319 2319 if changes:
2320 2320 # Recompute copies (avoid recording a -> b -> a)
2321 2321 copied = copies.pathcopies(base, wctx, matcher)
2322 2322 if old.p2:
2323 2323 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2324 2324
2325 2325 # Prune files which were reverted by the updates: if old
2326 2326 # introduced file X and the file was renamed in the working
2327 2327 # copy, then those two files are the same and
2328 2328 # we can discard X from our list of files. Likewise if X
2329 2329 # was removed, it's no longer relevant. If X is missing (aka
2330 2330 # deleted), old X must be preserved.
2331 2331 files.update(filestoamend)
2332 2332 files = [f for f in files if (not samefile(f, wctx, base)
2333 2333 or f in wctx.deleted())]
2334 2334
2335 2335 def filectxfn(repo, ctx_, path):
2336 2336 try:
2337 2337 # If the file being considered is not amongst the files
2338 2338 # to be amended, we should return the file context from the
2339 2339 # old changeset. This avoids issues when only some files in
2340 2340 # the working copy are being amended but there are also
2341 2341 # changes to other files from the old changeset.
2342 2342 if path not in filestoamend:
2343 2343 return old.filectx(path)
2344 2344
2345 2345 # Return None for removed files.
2346 2346 if path in wctx.removed():
2347 2347 return None
2348 2348
2349 2349 fctx = wctx[path]
2350 2350 flags = fctx.flags()
2351 2351 mctx = context.memfilectx(repo, ctx_,
2352 2352 fctx.path(), fctx.data(),
2353 2353 islink='l' in flags,
2354 2354 isexec='x' in flags,
2355 2355 copied=copied.get(path))
2356 2356 return mctx
2357 2357 except KeyError:
2358 2358 return None
2359 2359 else:
2360 2360 ui.note(_('copying changeset %s to %s\n') % (old, base))
2361 2361
2362 2362 # Use version of files as in the old cset
2363 2363 def filectxfn(repo, ctx_, path):
2364 2364 try:
2365 2365 return old.filectx(path)
2366 2366 except KeyError:
2367 2367 return None
2368 2368
2369 2369 # See if we got a message from -m or -l, if not, open the editor with
2370 2370 # the message of the changeset to amend.
2371 2371 message = logmessage(ui, opts)
2372 2372
2373 2373 editform = mergeeditform(old, 'commit.amend')
2374 2374 editor = getcommiteditor(editform=editform,
2375 2375 **pycompat.strkwargs(opts))
2376 2376
2377 2377 if not message:
2378 2378 editor = getcommiteditor(edit=True, editform=editform)
2379 2379 message = old.description()
2380 2380
2381 2381 pureextra = extra.copy()
2382 2382 extra['amend_source'] = old.hex()
2383 2383
2384 2384 new = context.memctx(repo,
2385 2385 parents=[base.node(), old.p2().node()],
2386 2386 text=message,
2387 2387 files=files,
2388 2388 filectxfn=filectxfn,
2389 2389 user=user,
2390 2390 date=date,
2391 2391 extra=extra,
2392 2392 editor=editor)
2393 2393
2394 2394 newdesc = changelog.stripdesc(new.description())
2395 2395 if ((not changes)
2396 2396 and newdesc == old.description()
2397 2397 and user == old.user()
2398 2398 and date == old.date()
2399 2399 and pureextra == old.extra()):
2400 2400 # nothing changed. continuing here would create a new node
2401 2401 # anyway because of the amend_source noise.
2402 2402 #
2403 2403 # This not what we expect from amend.
2404 2404 return old.node()
2405 2405
2406 2406 if opts.get('secret'):
2407 2407 commitphase = 'secret'
2408 2408 else:
2409 2409 commitphase = old.phase()
2410 2410 overrides = {('phases', 'new-commit'): commitphase}
2411 2411 with ui.configoverride(overrides, 'amend'):
2412 2412 newid = repo.commitctx(new)
2413 2413
2414 2414 # Reroute the working copy parent to the new changeset
2415 2415 repo.setparents(newid, nullid)
2416 2416 mapping = {old.node(): (newid,)}
2417 2417 obsmetadata = None
2418 2418 if opts.get('note'):
2419 2419 obsmetadata = {'note': opts['note']}
2420 2420 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
2421 2421
2422 2422 # Fixing the dirstate because localrepo.commitctx does not update
2423 2423 # it. This is rather convenient because we did not need to update
2424 2424 # the dirstate for all the files in the new commit which commitctx
2425 2425 # could have done if it updated the dirstate. Now, we can
2426 2426 # selectively update the dirstate only for the amended files.
2427 2427 dirstate = repo.dirstate
2428 2428
2429 2429 # Update the state of the files which were added and
2430 2430 # and modified in the amend to "normal" in the dirstate.
2431 2431 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2432 2432 for f in normalfiles:
2433 2433 dirstate.normal(f)
2434 2434
2435 2435 # Update the state of files which were removed in the amend
2436 2436 # to "removed" in the dirstate.
2437 2437 removedfiles = set(wctx.removed()) & filestoamend
2438 2438 for f in removedfiles:
2439 2439 dirstate.drop(f)
2440 2440
2441 2441 return newid
2442 2442
2443 2443 def commiteditor(repo, ctx, subs, editform=''):
2444 2444 if ctx.description():
2445 2445 return ctx.description()
2446 2446 return commitforceeditor(repo, ctx, subs, editform=editform,
2447 2447 unchangedmessagedetection=True)
2448 2448
2449 2449 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2450 2450 editform='', unchangedmessagedetection=False):
2451 2451 if not extramsg:
2452 2452 extramsg = _("Leave message empty to abort commit.")
2453 2453
2454 2454 forms = [e for e in editform.split('.') if e]
2455 2455 forms.insert(0, 'changeset')
2456 2456 templatetext = None
2457 2457 while forms:
2458 2458 ref = '.'.join(forms)
2459 2459 if repo.ui.config('committemplate', ref):
2460 2460 templatetext = committext = buildcommittemplate(
2461 2461 repo, ctx, subs, extramsg, ref)
2462 2462 break
2463 2463 forms.pop()
2464 2464 else:
2465 2465 committext = buildcommittext(repo, ctx, subs, extramsg)
2466 2466
2467 2467 # run editor in the repository root
2468 2468 olddir = pycompat.getcwd()
2469 2469 os.chdir(repo.root)
2470 2470
2471 2471 # make in-memory changes visible to external process
2472 2472 tr = repo.currenttransaction()
2473 2473 repo.dirstate.write(tr)
2474 2474 pending = tr and tr.writepending() and repo.root
2475 2475
2476 2476 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2477 2477 editform=editform, pending=pending,
2478 2478 repopath=repo.path, action='commit')
2479 2479 text = editortext
2480 2480
2481 2481 # strip away anything below this special string (used for editors that want
2482 2482 # to display the diff)
2483 2483 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2484 2484 if stripbelow:
2485 2485 text = text[:stripbelow.start()]
2486 2486
2487 2487 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2488 2488 os.chdir(olddir)
2489 2489
2490 2490 if finishdesc:
2491 2491 text = finishdesc(text)
2492 2492 if not text.strip():
2493 2493 raise error.Abort(_("empty commit message"))
2494 2494 if unchangedmessagedetection and editortext == templatetext:
2495 2495 raise error.Abort(_("commit message unchanged"))
2496 2496
2497 2497 return text
2498 2498
2499 2499 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2500 2500 ui = repo.ui
2501 2501 spec = formatter.templatespec(ref, None, None)
2502 2502 t = logcmdutil.changesettemplater(ui, repo, spec)
2503 2503 t.t.cache.update((k, templater.unquotestring(v))
2504 2504 for k, v in repo.ui.configitems('committemplate'))
2505 2505
2506 2506 if not extramsg:
2507 2507 extramsg = '' # ensure that extramsg is string
2508 2508
2509 2509 ui.pushbuffer()
2510 2510 t.show(ctx, extramsg=extramsg)
2511 2511 return ui.popbuffer()
2512 2512
2513 2513 def hgprefix(msg):
2514 2514 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2515 2515
2516 2516 def buildcommittext(repo, ctx, subs, extramsg):
2517 2517 edittext = []
2518 2518 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2519 2519 if ctx.description():
2520 2520 edittext.append(ctx.description())
2521 2521 edittext.append("")
2522 2522 edittext.append("") # Empty line between message and comments.
2523 2523 edittext.append(hgprefix(_("Enter commit message."
2524 2524 " Lines beginning with 'HG:' are removed.")))
2525 2525 edittext.append(hgprefix(extramsg))
2526 2526 edittext.append("HG: --")
2527 2527 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2528 2528 if ctx.p2():
2529 2529 edittext.append(hgprefix(_("branch merge")))
2530 2530 if ctx.branch():
2531 2531 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2532 2532 if bookmarks.isactivewdirparent(repo):
2533 2533 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2534 2534 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2535 2535 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2536 2536 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2537 2537 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2538 2538 if not added and not modified and not removed:
2539 2539 edittext.append(hgprefix(_("no files changed")))
2540 2540 edittext.append("")
2541 2541
2542 2542 return "\n".join(edittext)
2543 2543
2544 2544 def commitstatus(repo, node, branch, bheads=None, opts=None):
2545 2545 if opts is None:
2546 2546 opts = {}
2547 2547 ctx = repo[node]
2548 2548 parents = ctx.parents()
2549 2549
2550 2550 if (not opts.get('amend') and bheads and node not in bheads and not
2551 2551 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2552 2552 repo.ui.status(_('created new head\n'))
2553 2553 # The message is not printed for initial roots. For the other
2554 2554 # changesets, it is printed in the following situations:
2555 2555 #
2556 2556 # Par column: for the 2 parents with ...
2557 2557 # N: null or no parent
2558 2558 # B: parent is on another named branch
2559 2559 # C: parent is a regular non head changeset
2560 2560 # H: parent was a branch head of the current branch
2561 2561 # Msg column: whether we print "created new head" message
2562 2562 # In the following, it is assumed that there already exists some
2563 2563 # initial branch heads of the current branch, otherwise nothing is
2564 2564 # printed anyway.
2565 2565 #
2566 2566 # Par Msg Comment
2567 2567 # N N y additional topo root
2568 2568 #
2569 2569 # B N y additional branch root
2570 2570 # C N y additional topo head
2571 2571 # H N n usual case
2572 2572 #
2573 2573 # B B y weird additional branch root
2574 2574 # C B y branch merge
2575 2575 # H B n merge with named branch
2576 2576 #
2577 2577 # C C y additional head from merge
2578 2578 # C H n merge with a head
2579 2579 #
2580 2580 # H H n head merge: head count decreases
2581 2581
2582 2582 if not opts.get('close_branch'):
2583 2583 for r in parents:
2584 2584 if r.closesbranch() and r.branch() == branch:
2585 2585 repo.ui.status(_('reopening closed branch head %d\n') % r)
2586 2586
2587 2587 if repo.ui.debugflag:
2588 2588 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2589 2589 elif repo.ui.verbose:
2590 2590 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2591 2591
2592 2592 def postcommitstatus(repo, pats, opts):
2593 2593 return repo.status(match=scmutil.match(repo[None], pats, opts))
2594 2594
2595 2595 def revert(ui, repo, ctx, parents, *pats, **opts):
2596 2596 opts = pycompat.byteskwargs(opts)
2597 2597 parent, p2 = parents
2598 2598 node = ctx.node()
2599 2599
2600 2600 mf = ctx.manifest()
2601 2601 if node == p2:
2602 2602 parent = p2
2603 2603
2604 2604 # need all matching names in dirstate and manifest of target rev,
2605 2605 # so have to walk both. do not print errors if files exist in one
2606 2606 # but not other. in both cases, filesets should be evaluated against
2607 2607 # workingctx to get consistent result (issue4497). this means 'set:**'
2608 2608 # cannot be used to select missing files from target rev.
2609 2609
2610 2610 # `names` is a mapping for all elements in working copy and target revision
2611 2611 # The mapping is in the form:
2612 2612 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2613 2613 names = {}
2614 2614
2615 2615 with repo.wlock():
2616 2616 ## filling of the `names` mapping
2617 2617 # walk dirstate to fill `names`
2618 2618
2619 2619 interactive = opts.get('interactive', False)
2620 2620 wctx = repo[None]
2621 2621 m = scmutil.match(wctx, pats, opts)
2622 2622
2623 2623 # we'll need this later
2624 2624 targetsubs = sorted(s for s in wctx.substate if m(s))
2625 2625
2626 2626 if not m.always():
2627 2627 matcher = matchmod.badmatch(m, lambda x, y: False)
2628 2628 for abs in wctx.walk(matcher):
2629 2629 names[abs] = m.rel(abs), m.exact(abs)
2630 2630
2631 2631 # walk target manifest to fill `names`
2632 2632
2633 2633 def badfn(path, msg):
2634 2634 if path in names:
2635 2635 return
2636 2636 if path in ctx.substate:
2637 2637 return
2638 2638 path_ = path + '/'
2639 2639 for f in names:
2640 2640 if f.startswith(path_):
2641 2641 return
2642 2642 ui.warn("%s: %s\n" % (m.rel(path), msg))
2643 2643
2644 2644 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2645 2645 if abs not in names:
2646 2646 names[abs] = m.rel(abs), m.exact(abs)
2647 2647
2648 2648 # Find status of all file in `names`.
2649 2649 m = scmutil.matchfiles(repo, names)
2650 2650
2651 2651 changes = repo.status(node1=node, match=m,
2652 2652 unknown=True, ignored=True, clean=True)
2653 2653 else:
2654 2654 changes = repo.status(node1=node, match=m)
2655 2655 for kind in changes:
2656 2656 for abs in kind:
2657 2657 names[abs] = m.rel(abs), m.exact(abs)
2658 2658
2659 2659 m = scmutil.matchfiles(repo, names)
2660 2660
2661 2661 modified = set(changes.modified)
2662 2662 added = set(changes.added)
2663 2663 removed = set(changes.removed)
2664 2664 _deleted = set(changes.deleted)
2665 2665 unknown = set(changes.unknown)
2666 2666 unknown.update(changes.ignored)
2667 2667 clean = set(changes.clean)
2668 2668 modadded = set()
2669 2669
2670 2670 # We need to account for the state of the file in the dirstate,
2671 2671 # even when we revert against something else than parent. This will
2672 2672 # slightly alter the behavior of revert (doing back up or not, delete
2673 2673 # or just forget etc).
2674 2674 if parent == node:
2675 2675 dsmodified = modified
2676 2676 dsadded = added
2677 2677 dsremoved = removed
2678 2678 # store all local modifications, useful later for rename detection
2679 2679 localchanges = dsmodified | dsadded
2680 2680 modified, added, removed = set(), set(), set()
2681 2681 else:
2682 2682 changes = repo.status(node1=parent, match=m)
2683 2683 dsmodified = set(changes.modified)
2684 2684 dsadded = set(changes.added)
2685 2685 dsremoved = set(changes.removed)
2686 2686 # store all local modifications, useful later for rename detection
2687 2687 localchanges = dsmodified | dsadded
2688 2688
2689 2689 # only take into account for removes between wc and target
2690 2690 clean |= dsremoved - removed
2691 2691 dsremoved &= removed
2692 2692 # distinct between dirstate remove and other
2693 2693 removed -= dsremoved
2694 2694
2695 2695 modadded = added & dsmodified
2696 2696 added -= modadded
2697 2697
2698 2698 # tell newly modified apart.
2699 2699 dsmodified &= modified
2700 2700 dsmodified |= modified & dsadded # dirstate added may need backup
2701 2701 modified -= dsmodified
2702 2702
2703 2703 # We need to wait for some post-processing to update this set
2704 2704 # before making the distinction. The dirstate will be used for
2705 2705 # that purpose.
2706 2706 dsadded = added
2707 2707
2708 2708 # in case of merge, files that are actually added can be reported as
2709 2709 # modified, we need to post process the result
2710 2710 if p2 != nullid:
2711 2711 mergeadd = set(dsmodified)
2712 2712 for path in dsmodified:
2713 2713 if path in mf:
2714 2714 mergeadd.remove(path)
2715 2715 dsadded |= mergeadd
2716 2716 dsmodified -= mergeadd
2717 2717
2718 2718 # if f is a rename, update `names` to also revert the source
2719 2719 cwd = repo.getcwd()
2720 2720 for f in localchanges:
2721 2721 src = repo.dirstate.copied(f)
2722 2722 # XXX should we check for rename down to target node?
2723 2723 if src and src not in names and repo.dirstate[src] == 'r':
2724 2724 dsremoved.add(src)
2725 2725 names[src] = (repo.pathto(src, cwd), True)
2726 2726
2727 2727 # determine the exact nature of the deleted changesets
2728 2728 deladded = set(_deleted)
2729 2729 for path in _deleted:
2730 2730 if path in mf:
2731 2731 deladded.remove(path)
2732 2732 deleted = _deleted - deladded
2733 2733
2734 2734 # distinguish between file to forget and the other
2735 2735 added = set()
2736 2736 for abs in dsadded:
2737 2737 if repo.dirstate[abs] != 'a':
2738 2738 added.add(abs)
2739 2739 dsadded -= added
2740 2740
2741 2741 for abs in deladded:
2742 2742 if repo.dirstate[abs] == 'a':
2743 2743 dsadded.add(abs)
2744 2744 deladded -= dsadded
2745 2745
2746 2746 # For files marked as removed, we check if an unknown file is present at
2747 2747 # the same path. If a such file exists it may need to be backed up.
2748 2748 # Making the distinction at this stage helps have simpler backup
2749 2749 # logic.
2750 2750 removunk = set()
2751 2751 for abs in removed:
2752 2752 target = repo.wjoin(abs)
2753 2753 if os.path.lexists(target):
2754 2754 removunk.add(abs)
2755 2755 removed -= removunk
2756 2756
2757 2757 dsremovunk = set()
2758 2758 for abs in dsremoved:
2759 2759 target = repo.wjoin(abs)
2760 2760 if os.path.lexists(target):
2761 2761 dsremovunk.add(abs)
2762 2762 dsremoved -= dsremovunk
2763 2763
2764 2764 # action to be actually performed by revert
2765 2765 # (<list of file>, message>) tuple
2766 2766 actions = {'revert': ([], _('reverting %s\n')),
2767 2767 'add': ([], _('adding %s\n')),
2768 2768 'remove': ([], _('removing %s\n')),
2769 2769 'drop': ([], _('removing %s\n')),
2770 2770 'forget': ([], _('forgetting %s\n')),
2771 2771 'undelete': ([], _('undeleting %s\n')),
2772 2772 'noop': (None, _('no changes needed to %s\n')),
2773 2773 'unknown': (None, _('file not managed: %s\n')),
2774 2774 }
2775 2775
2776 2776 # "constant" that convey the backup strategy.
2777 2777 # All set to `discard` if `no-backup` is set do avoid checking
2778 2778 # no_backup lower in the code.
2779 2779 # These values are ordered for comparison purposes
2780 2780 backupinteractive = 3 # do backup if interactively modified
2781 2781 backup = 2 # unconditionally do backup
2782 2782 check = 1 # check if the existing file differs from target
2783 2783 discard = 0 # never do backup
2784 2784 if opts.get('no_backup'):
2785 2785 backupinteractive = backup = check = discard
2786 2786 if interactive:
2787 2787 dsmodifiedbackup = backupinteractive
2788 2788 else:
2789 2789 dsmodifiedbackup = backup
2790 2790 tobackup = set()
2791 2791
2792 2792 backupanddel = actions['remove']
2793 2793 if not opts.get('no_backup'):
2794 2794 backupanddel = actions['drop']
2795 2795
2796 2796 disptable = (
2797 2797 # dispatch table:
2798 2798 # file state
2799 2799 # action
2800 2800 # make backup
2801 2801
2802 2802 ## Sets that results that will change file on disk
2803 2803 # Modified compared to target, no local change
2804 2804 (modified, actions['revert'], discard),
2805 2805 # Modified compared to target, but local file is deleted
2806 2806 (deleted, actions['revert'], discard),
2807 2807 # Modified compared to target, local change
2808 2808 (dsmodified, actions['revert'], dsmodifiedbackup),
2809 2809 # Added since target
2810 2810 (added, actions['remove'], discard),
2811 2811 # Added in working directory
2812 2812 (dsadded, actions['forget'], discard),
2813 2813 # Added since target, have local modification
2814 2814 (modadded, backupanddel, backup),
2815 2815 # Added since target but file is missing in working directory
2816 2816 (deladded, actions['drop'], discard),
2817 2817 # Removed since target, before working copy parent
2818 2818 (removed, actions['add'], discard),
2819 2819 # Same as `removed` but an unknown file exists at the same path
2820 2820 (removunk, actions['add'], check),
2821 2821 # Removed since targe, marked as such in working copy parent
2822 2822 (dsremoved, actions['undelete'], discard),
2823 2823 # Same as `dsremoved` but an unknown file exists at the same path
2824 2824 (dsremovunk, actions['undelete'], check),
2825 2825 ## the following sets does not result in any file changes
2826 2826 # File with no modification
2827 2827 (clean, actions['noop'], discard),
2828 2828 # Existing file, not tracked anywhere
2829 2829 (unknown, actions['unknown'], discard),
2830 2830 )
2831 2831
2832 2832 for abs, (rel, exact) in sorted(names.items()):
2833 2833 # target file to be touch on disk (relative to cwd)
2834 2834 target = repo.wjoin(abs)
2835 2835 # search the entry in the dispatch table.
2836 2836 # if the file is in any of these sets, it was touched in the working
2837 2837 # directory parent and we are sure it needs to be reverted.
2838 2838 for table, (xlist, msg), dobackup in disptable:
2839 2839 if abs not in table:
2840 2840 continue
2841 2841 if xlist is not None:
2842 2842 xlist.append(abs)
2843 2843 if dobackup:
2844 2844 # If in interactive mode, don't automatically create
2845 2845 # .orig files (issue4793)
2846 2846 if dobackup == backupinteractive:
2847 2847 tobackup.add(abs)
2848 2848 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
2849 2849 bakname = scmutil.origpath(ui, repo, rel)
2850 2850 ui.note(_('saving current version of %s as %s\n') %
2851 2851 (rel, bakname))
2852 2852 if not opts.get('dry_run'):
2853 2853 if interactive:
2854 2854 util.copyfile(target, bakname)
2855 2855 else:
2856 2856 util.rename(target, bakname)
2857 2857 if ui.verbose or not exact:
2858 2858 if not isinstance(msg, bytes):
2859 2859 msg = msg(abs)
2860 2860 ui.status(msg % rel)
2861 2861 elif exact:
2862 2862 ui.warn(msg % rel)
2863 2863 break
2864 2864
2865 2865 if not opts.get('dry_run'):
2866 2866 needdata = ('revert', 'add', 'undelete')
2867 2867 if _revertprefetch is not _revertprefetchstub:
2868 ui.deprecwarn("'cmdutil._revertprefetch' is deprecated, use "
2869 "'cmdutil._prefetchfiles'", '4.6', stacklevel=1)
2868 ui.deprecwarn("'cmdutil._revertprefetch' is deprecated, "
2869 "add a callback to 'scmutil.fileprefetchhooks'",
2870 '4.6', stacklevel=1)
2870 2871 _revertprefetch(repo, ctx,
2871 2872 *[actions[name][0] for name in needdata])
2872 2873 oplist = [actions[name][0] for name in needdata]
2873 2874 _prefetchfiles(repo, ctx,
2874 2875 [f for sublist in oplist for f in sublist])
2875 2876 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
2876 2877
2877 2878 if targetsubs:
2878 2879 # Revert the subrepos on the revert list
2879 2880 for sub in targetsubs:
2880 2881 try:
2881 2882 wctx.sub(sub).revert(ctx.substate[sub], *pats,
2882 2883 **pycompat.strkwargs(opts))
2883 2884 except KeyError:
2884 2885 raise error.Abort("subrepository '%s' does not exist in %s!"
2885 2886 % (sub, short(ctx.node())))
2886 2887
2887 2888 def _revertprefetchstub(repo, ctx, *files):
2888 2889 """Stub method for detecting extension wrapping of _revertprefetch(), to
2889 2890 issue a deprecation warning."""
2890 2891
2891 2892 _revertprefetch = _revertprefetchstub
2892 2893
2893 2894 def _prefetchfiles(repo, ctx, files):
2894 2895 """Let extensions changing the storage layer prefetch content for any non
2895 2896 merge based command."""
2897 scmutil.fileprefetchhooks(repo, ctx, files)
2896 2898
2897 2899 def _performrevert(repo, parents, ctx, actions, interactive=False,
2898 2900 tobackup=None):
2899 2901 """function that actually perform all the actions computed for revert
2900 2902
2901 2903 This is an independent function to let extension to plug in and react to
2902 2904 the imminent revert.
2903 2905
2904 2906 Make sure you have the working directory locked when calling this function.
2905 2907 """
2906 2908 parent, p2 = parents
2907 2909 node = ctx.node()
2908 2910 excluded_files = []
2909 2911 matcher_opts = {"exclude": excluded_files}
2910 2912
2911 2913 def checkout(f):
2912 2914 fc = ctx[f]
2913 2915 repo.wwrite(f, fc.data(), fc.flags())
2914 2916
2915 2917 def doremove(f):
2916 2918 try:
2917 2919 repo.wvfs.unlinkpath(f)
2918 2920 except OSError:
2919 2921 pass
2920 2922 repo.dirstate.remove(f)
2921 2923
2922 2924 audit_path = pathutil.pathauditor(repo.root, cached=True)
2923 2925 for f in actions['forget'][0]:
2924 2926 if interactive:
2925 2927 choice = repo.ui.promptchoice(
2926 2928 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
2927 2929 if choice == 0:
2928 2930 repo.dirstate.drop(f)
2929 2931 else:
2930 2932 excluded_files.append(repo.wjoin(f))
2931 2933 else:
2932 2934 repo.dirstate.drop(f)
2933 2935 for f in actions['remove'][0]:
2934 2936 audit_path(f)
2935 2937 if interactive:
2936 2938 choice = repo.ui.promptchoice(
2937 2939 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
2938 2940 if choice == 0:
2939 2941 doremove(f)
2940 2942 else:
2941 2943 excluded_files.append(repo.wjoin(f))
2942 2944 else:
2943 2945 doremove(f)
2944 2946 for f in actions['drop'][0]:
2945 2947 audit_path(f)
2946 2948 repo.dirstate.remove(f)
2947 2949
2948 2950 normal = None
2949 2951 if node == parent:
2950 2952 # We're reverting to our parent. If possible, we'd like status
2951 2953 # to report the file as clean. We have to use normallookup for
2952 2954 # merges to avoid losing information about merged/dirty files.
2953 2955 if p2 != nullid:
2954 2956 normal = repo.dirstate.normallookup
2955 2957 else:
2956 2958 normal = repo.dirstate.normal
2957 2959
2958 2960 newlyaddedandmodifiedfiles = set()
2959 2961 if interactive:
2960 2962 # Prompt the user for changes to revert
2961 2963 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
2962 2964 m = scmutil.match(ctx, torevert, matcher_opts)
2963 2965 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
2964 2966 diffopts.nodates = True
2965 2967 diffopts.git = True
2966 2968 operation = 'discard'
2967 2969 reversehunks = True
2968 2970 if node != parent:
2969 2971 operation = 'apply'
2970 2972 reversehunks = False
2971 2973 if reversehunks:
2972 2974 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
2973 2975 else:
2974 2976 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
2975 2977 originalchunks = patch.parsepatch(diff)
2976 2978
2977 2979 try:
2978 2980
2979 2981 chunks, opts = recordfilter(repo.ui, originalchunks,
2980 2982 operation=operation)
2981 2983 if reversehunks:
2982 2984 chunks = patch.reversehunks(chunks)
2983 2985
2984 2986 except error.PatchError as err:
2985 2987 raise error.Abort(_('error parsing patch: %s') % err)
2986 2988
2987 2989 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
2988 2990 if tobackup is None:
2989 2991 tobackup = set()
2990 2992 # Apply changes
2991 2993 fp = stringio()
2992 2994 for c in chunks:
2993 2995 # Create a backup file only if this hunk should be backed up
2994 2996 if ishunk(c) and c.header.filename() in tobackup:
2995 2997 abs = c.header.filename()
2996 2998 target = repo.wjoin(abs)
2997 2999 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
2998 3000 util.copyfile(target, bakname)
2999 3001 tobackup.remove(abs)
3000 3002 c.write(fp)
3001 3003 dopatch = fp.tell()
3002 3004 fp.seek(0)
3003 3005 if dopatch:
3004 3006 try:
3005 3007 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3006 3008 except error.PatchError as err:
3007 3009 raise error.Abort(str(err))
3008 3010 del fp
3009 3011 else:
3010 3012 for f in actions['revert'][0]:
3011 3013 checkout(f)
3012 3014 if normal:
3013 3015 normal(f)
3014 3016
3015 3017 for f in actions['add'][0]:
3016 3018 # Don't checkout modified files, they are already created by the diff
3017 3019 if f not in newlyaddedandmodifiedfiles:
3018 3020 checkout(f)
3019 3021 repo.dirstate.add(f)
3020 3022
3021 3023 normal = repo.dirstate.normallookup
3022 3024 if node == parent and p2 == nullid:
3023 3025 normal = repo.dirstate.normal
3024 3026 for f in actions['undelete'][0]:
3025 3027 checkout(f)
3026 3028 normal(f)
3027 3029
3028 3030 copied = copies.pathcopies(repo[parent], ctx)
3029 3031
3030 3032 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3031 3033 if f in copied:
3032 3034 repo.dirstate.copy(copied[f], f)
3033 3035
3034 3036 class command(registrar.command):
3035 3037 """deprecated: used registrar.command instead"""
3036 3038 def _doregister(self, func, name, *args, **kwargs):
3037 3039 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3038 3040 return super(command, self)._doregister(func, name, *args, **kwargs)
3039 3041
3040 3042 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3041 3043 # commands.outgoing. "missing" is "missing" of the result of
3042 3044 # "findcommonoutgoing()"
3043 3045 outgoinghooks = util.hooks()
3044 3046
3045 3047 # a list of (ui, repo) functions called by commands.summary
3046 3048 summaryhooks = util.hooks()
3047 3049
3048 3050 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3049 3051 #
3050 3052 # functions should return tuple of booleans below, if 'changes' is None:
3051 3053 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3052 3054 #
3053 3055 # otherwise, 'changes' is a tuple of tuples below:
3054 3056 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3055 3057 # - (desturl, destbranch, destpeer, outgoing)
3056 3058 summaryremotehooks = util.hooks()
3057 3059
3058 3060 # A list of state files kept by multistep operations like graft.
3059 3061 # Since graft cannot be aborted, it is considered 'clearable' by update.
3060 3062 # note: bisect is intentionally excluded
3061 3063 # (state file, clearable, allowcommit, error, hint)
3062 3064 unfinishedstates = [
3063 3065 ('graftstate', True, False, _('graft in progress'),
3064 3066 _("use 'hg graft --continue' or 'hg update' to abort")),
3065 3067 ('updatestate', True, False, _('last update was interrupted'),
3066 3068 _("use 'hg update' to get a consistent checkout"))
3067 3069 ]
3068 3070
3069 3071 def checkunfinished(repo, commit=False):
3070 3072 '''Look for an unfinished multistep operation, like graft, and abort
3071 3073 if found. It's probably good to check this right before
3072 3074 bailifchanged().
3073 3075 '''
3074 3076 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3075 3077 if commit and allowcommit:
3076 3078 continue
3077 3079 if repo.vfs.exists(f):
3078 3080 raise error.Abort(msg, hint=hint)
3079 3081
3080 3082 def clearunfinished(repo):
3081 3083 '''Check for unfinished operations (as above), and clear the ones
3082 3084 that are clearable.
3083 3085 '''
3084 3086 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3085 3087 if not clearable and repo.vfs.exists(f):
3086 3088 raise error.Abort(msg, hint=hint)
3087 3089 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3088 3090 if clearable and repo.vfs.exists(f):
3089 3091 util.unlink(repo.vfs.join(f))
3090 3092
3091 3093 afterresolvedstates = [
3092 3094 ('graftstate',
3093 3095 _('hg graft --continue')),
3094 3096 ]
3095 3097
3096 3098 def howtocontinue(repo):
3097 3099 '''Check for an unfinished operation and return the command to finish
3098 3100 it.
3099 3101
3100 3102 afterresolvedstates tuples define a .hg/{file} and the corresponding
3101 3103 command needed to finish it.
3102 3104
3103 3105 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3104 3106 a boolean.
3105 3107 '''
3106 3108 contmsg = _("continue: %s")
3107 3109 for f, msg in afterresolvedstates:
3108 3110 if repo.vfs.exists(f):
3109 3111 return contmsg % msg, True
3110 3112 if repo[None].dirty(missing=True, merge=False, branch=False):
3111 3113 return contmsg % _("hg commit"), False
3112 3114 return None, None
3113 3115
3114 3116 def checkafterresolved(repo):
3115 3117 '''Inform the user about the next action after completing hg resolve
3116 3118
3117 3119 If there's a matching afterresolvedstates, howtocontinue will yield
3118 3120 repo.ui.warn as the reporter.
3119 3121
3120 3122 Otherwise, it will yield repo.ui.note.
3121 3123 '''
3122 3124 msg, warning = howtocontinue(repo)
3123 3125 if msg is not None:
3124 3126 if warning:
3125 3127 repo.ui.warn("%s\n" % msg)
3126 3128 else:
3127 3129 repo.ui.note("%s\n" % msg)
3128 3130
3129 3131 def wrongtooltocontinue(repo, task):
3130 3132 '''Raise an abort suggesting how to properly continue if there is an
3131 3133 active task.
3132 3134
3133 3135 Uses howtocontinue() to find the active task.
3134 3136
3135 3137 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3136 3138 a hint.
3137 3139 '''
3138 3140 after = howtocontinue(repo)
3139 3141 hint = None
3140 3142 if after[1]:
3141 3143 hint = after[0]
3142 3144 raise error.Abort(_('no %s in progress') % task, hint=hint)
3143 3145
3144 3146 class changeset_printer(logcmdutil.changesetprinter):
3145 3147
3146 3148 def __init__(self, ui, *args, **kwargs):
3147 3149 msg = ("'cmdutil.changeset_printer' is deprecated, "
3148 3150 "use 'logcmdutil.logcmdutil'")
3149 3151 ui.deprecwarn(msg, "4.6")
3150 3152 super(changeset_printer, self).__init__(ui, *args, **kwargs)
3151 3153
3152 3154 def displaygraph(ui, *args, **kwargs):
3153 3155 msg = ("'cmdutil.displaygraph' is deprecated, "
3154 3156 "use 'logcmdutil.displaygraph'")
3155 3157 ui.deprecwarn(msg, "4.6")
3156 3158 return logcmdutil.displaygraph(ui, *args, **kwargs)
3157 3159
3158 3160 def show_changeset(ui, *args, **kwargs):
3159 3161 msg = ("'cmdutil.show_changeset' is deprecated, "
3160 3162 "use 'logcmdutil.changesetdisplayer'")
3161 3163 ui.deprecwarn(msg, "4.6")
3162 3164 return logcmdutil.changesetdisplayer(ui, *args, **kwargs)
@@ -1,1415 +1,1420 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 hex,
22 22 nullid,
23 23 short,
24 24 wdirid,
25 25 wdirrev,
26 26 )
27 27
28 28 from . import (
29 29 encoding,
30 30 error,
31 31 match as matchmod,
32 32 obsolete,
33 33 obsutil,
34 34 pathutil,
35 35 phases,
36 36 pycompat,
37 37 revsetlang,
38 38 similar,
39 39 url,
40 40 util,
41 41 vfs,
42 42 )
43 43
44 44 if pycompat.iswindows:
45 45 from . import scmwindows as scmplatform
46 46 else:
47 47 from . import scmposix as scmplatform
48 48
49 49 termsize = scmplatform.termsize
50 50
51 51 class status(tuple):
52 52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
53 53 and 'ignored' properties are only relevant to the working copy.
54 54 '''
55 55
56 56 __slots__ = ()
57 57
58 58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
59 59 clean):
60 60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
61 61 ignored, clean))
62 62
63 63 @property
64 64 def modified(self):
65 65 '''files that have been modified'''
66 66 return self[0]
67 67
68 68 @property
69 69 def added(self):
70 70 '''files that have been added'''
71 71 return self[1]
72 72
73 73 @property
74 74 def removed(self):
75 75 '''files that have been removed'''
76 76 return self[2]
77 77
78 78 @property
79 79 def deleted(self):
80 80 '''files that are in the dirstate, but have been deleted from the
81 81 working copy (aka "missing")
82 82 '''
83 83 return self[3]
84 84
85 85 @property
86 86 def unknown(self):
87 87 '''files not in the dirstate that are not ignored'''
88 88 return self[4]
89 89
90 90 @property
91 91 def ignored(self):
92 92 '''files not in the dirstate that are ignored (by _dirignore())'''
93 93 return self[5]
94 94
95 95 @property
96 96 def clean(self):
97 97 '''files that have not been modified'''
98 98 return self[6]
99 99
100 100 def __repr__(self, *args, **kwargs):
101 101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
102 102 'unknown=%r, ignored=%r, clean=%r>') % self)
103 103
104 104 def itersubrepos(ctx1, ctx2):
105 105 """find subrepos in ctx1 or ctx2"""
106 106 # Create a (subpath, ctx) mapping where we prefer subpaths from
107 107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
108 108 # has been modified (in ctx2) but not yet committed (in ctx1).
109 109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
110 110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
111 111
112 112 missing = set()
113 113
114 114 for subpath in ctx2.substate:
115 115 if subpath not in ctx1.substate:
116 116 del subpaths[subpath]
117 117 missing.add(subpath)
118 118
119 119 for subpath, ctx in sorted(subpaths.iteritems()):
120 120 yield subpath, ctx.sub(subpath)
121 121
122 122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
123 123 # status and diff will have an accurate result when it does
124 124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
125 125 # against itself.
126 126 for subpath in missing:
127 127 yield subpath, ctx2.nullsub(subpath, ctx1)
128 128
129 129 def nochangesfound(ui, repo, excluded=None):
130 130 '''Report no changes for push/pull, excluded is None or a list of
131 131 nodes excluded from the push/pull.
132 132 '''
133 133 secretlist = []
134 134 if excluded:
135 135 for n in excluded:
136 136 ctx = repo[n]
137 137 if ctx.phase() >= phases.secret and not ctx.extinct():
138 138 secretlist.append(n)
139 139
140 140 if secretlist:
141 141 ui.status(_("no changes found (ignored %d secret changesets)\n")
142 142 % len(secretlist))
143 143 else:
144 144 ui.status(_("no changes found\n"))
145 145
146 146 def callcatch(ui, func):
147 147 """call func() with global exception handling
148 148
149 149 return func() if no exception happens. otherwise do some error handling
150 150 and return an exit code accordingly. does not handle all exceptions.
151 151 """
152 152 try:
153 153 try:
154 154 return func()
155 155 except: # re-raises
156 156 ui.traceback()
157 157 raise
158 158 # Global exception handling, alphabetically
159 159 # Mercurial-specific first, followed by built-in and library exceptions
160 160 except error.LockHeld as inst:
161 161 if inst.errno == errno.ETIMEDOUT:
162 162 reason = _('timed out waiting for lock held by %r') % inst.locker
163 163 else:
164 164 reason = _('lock held by %r') % inst.locker
165 165 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
166 166 if not inst.locker:
167 167 ui.warn(_("(lock might be very busy)\n"))
168 168 except error.LockUnavailable as inst:
169 169 ui.warn(_("abort: could not lock %s: %s\n") %
170 170 (inst.desc or inst.filename,
171 171 encoding.strtolocal(inst.strerror)))
172 172 except error.OutOfBandError as inst:
173 173 if inst.args:
174 174 msg = _("abort: remote error:\n")
175 175 else:
176 176 msg = _("abort: remote error\n")
177 177 ui.warn(msg)
178 178 if inst.args:
179 179 ui.warn(''.join(inst.args))
180 180 if inst.hint:
181 181 ui.warn('(%s)\n' % inst.hint)
182 182 except error.RepoError as inst:
183 183 ui.warn(_("abort: %s!\n") % inst)
184 184 if inst.hint:
185 185 ui.warn(_("(%s)\n") % inst.hint)
186 186 except error.ResponseError as inst:
187 187 ui.warn(_("abort: %s") % inst.args[0])
188 188 if not isinstance(inst.args[1], basestring):
189 189 ui.warn(" %r\n" % (inst.args[1],))
190 190 elif not inst.args[1]:
191 191 ui.warn(_(" empty string\n"))
192 192 else:
193 193 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
194 194 except error.CensoredNodeError as inst:
195 195 ui.warn(_("abort: file censored %s!\n") % inst)
196 196 except error.RevlogError as inst:
197 197 ui.warn(_("abort: %s!\n") % inst)
198 198 except error.InterventionRequired as inst:
199 199 ui.warn("%s\n" % inst)
200 200 if inst.hint:
201 201 ui.warn(_("(%s)\n") % inst.hint)
202 202 return 1
203 203 except error.WdirUnsupported:
204 204 ui.warn(_("abort: working directory revision cannot be specified\n"))
205 205 except error.Abort as inst:
206 206 ui.warn(_("abort: %s\n") % inst)
207 207 if inst.hint:
208 208 ui.warn(_("(%s)\n") % inst.hint)
209 209 except ImportError as inst:
210 210 ui.warn(_("abort: %s!\n") % inst)
211 211 m = str(inst).split()[-1]
212 212 if m in "mpatch bdiff".split():
213 213 ui.warn(_("(did you forget to compile extensions?)\n"))
214 214 elif m in "zlib".split():
215 215 ui.warn(_("(is your Python install correct?)\n"))
216 216 except IOError as inst:
217 217 if util.safehasattr(inst, "code"):
218 218 ui.warn(_("abort: %s\n") % inst)
219 219 elif util.safehasattr(inst, "reason"):
220 220 try: # usually it is in the form (errno, strerror)
221 221 reason = inst.reason.args[1]
222 222 except (AttributeError, IndexError):
223 223 # it might be anything, for example a string
224 224 reason = inst.reason
225 225 if isinstance(reason, unicode):
226 226 # SSLError of Python 2.7.9 contains a unicode
227 227 reason = encoding.unitolocal(reason)
228 228 ui.warn(_("abort: error: %s\n") % reason)
229 229 elif (util.safehasattr(inst, "args")
230 230 and inst.args and inst.args[0] == errno.EPIPE):
231 231 pass
232 232 elif getattr(inst, "strerror", None):
233 233 if getattr(inst, "filename", None):
234 234 ui.warn(_("abort: %s: %s\n") % (
235 235 encoding.strtolocal(inst.strerror), inst.filename))
236 236 else:
237 237 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
238 238 else:
239 239 raise
240 240 except OSError as inst:
241 241 if getattr(inst, "filename", None) is not None:
242 242 ui.warn(_("abort: %s: '%s'\n") % (
243 243 encoding.strtolocal(inst.strerror), inst.filename))
244 244 else:
245 245 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
246 246 except MemoryError:
247 247 ui.warn(_("abort: out of memory\n"))
248 248 except SystemExit as inst:
249 249 # Commands shouldn't sys.exit directly, but give a return code.
250 250 # Just in case catch this and and pass exit code to caller.
251 251 return inst.code
252 252 except socket.error as inst:
253 253 ui.warn(_("abort: %s\n") % inst.args[-1])
254 254
255 255 return -1
256 256
257 257 def checknewlabel(repo, lbl, kind):
258 258 # Do not use the "kind" parameter in ui output.
259 259 # It makes strings difficult to translate.
260 260 if lbl in ['tip', '.', 'null']:
261 261 raise error.Abort(_("the name '%s' is reserved") % lbl)
262 262 for c in (':', '\0', '\n', '\r'):
263 263 if c in lbl:
264 264 raise error.Abort(_("%r cannot be used in a name") % c)
265 265 try:
266 266 int(lbl)
267 267 raise error.Abort(_("cannot use an integer as a name"))
268 268 except ValueError:
269 269 pass
270 270
271 271 def checkfilename(f):
272 272 '''Check that the filename f is an acceptable filename for a tracked file'''
273 273 if '\r' in f or '\n' in f:
274 274 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
275 275
276 276 def checkportable(ui, f):
277 277 '''Check if filename f is portable and warn or abort depending on config'''
278 278 checkfilename(f)
279 279 abort, warn = checkportabilityalert(ui)
280 280 if abort or warn:
281 281 msg = util.checkwinfilename(f)
282 282 if msg:
283 283 msg = "%s: %s" % (msg, util.shellquote(f))
284 284 if abort:
285 285 raise error.Abort(msg)
286 286 ui.warn(_("warning: %s\n") % msg)
287 287
288 288 def checkportabilityalert(ui):
289 289 '''check if the user's config requests nothing, a warning, or abort for
290 290 non-portable filenames'''
291 291 val = ui.config('ui', 'portablefilenames')
292 292 lval = val.lower()
293 293 bval = util.parsebool(val)
294 294 abort = pycompat.iswindows or lval == 'abort'
295 295 warn = bval or lval == 'warn'
296 296 if bval is None and not (warn or abort or lval == 'ignore'):
297 297 raise error.ConfigError(
298 298 _("ui.portablefilenames value is invalid ('%s')") % val)
299 299 return abort, warn
300 300
301 301 class casecollisionauditor(object):
302 302 def __init__(self, ui, abort, dirstate):
303 303 self._ui = ui
304 304 self._abort = abort
305 305 allfiles = '\0'.join(dirstate._map)
306 306 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
307 307 self._dirstate = dirstate
308 308 # The purpose of _newfiles is so that we don't complain about
309 309 # case collisions if someone were to call this object with the
310 310 # same filename twice.
311 311 self._newfiles = set()
312 312
313 313 def __call__(self, f):
314 314 if f in self._newfiles:
315 315 return
316 316 fl = encoding.lower(f)
317 317 if fl in self._loweredfiles and f not in self._dirstate:
318 318 msg = _('possible case-folding collision for %s') % f
319 319 if self._abort:
320 320 raise error.Abort(msg)
321 321 self._ui.warn(_("warning: %s\n") % msg)
322 322 self._loweredfiles.add(fl)
323 323 self._newfiles.add(f)
324 324
325 325 def filteredhash(repo, maxrev):
326 326 """build hash of filtered revisions in the current repoview.
327 327
328 328 Multiple caches perform up-to-date validation by checking that the
329 329 tiprev and tipnode stored in the cache file match the current repository.
330 330 However, this is not sufficient for validating repoviews because the set
331 331 of revisions in the view may change without the repository tiprev and
332 332 tipnode changing.
333 333
334 334 This function hashes all the revs filtered from the view and returns
335 335 that SHA-1 digest.
336 336 """
337 337 cl = repo.changelog
338 338 if not cl.filteredrevs:
339 339 return None
340 340 key = None
341 341 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
342 342 if revs:
343 343 s = hashlib.sha1()
344 344 for rev in revs:
345 345 s.update('%d;' % rev)
346 346 key = s.digest()
347 347 return key
348 348
349 349 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
350 350 '''yield every hg repository under path, always recursively.
351 351 The recurse flag will only control recursion into repo working dirs'''
352 352 def errhandler(err):
353 353 if err.filename == path:
354 354 raise err
355 355 samestat = getattr(os.path, 'samestat', None)
356 356 if followsym and samestat is not None:
357 357 def adddir(dirlst, dirname):
358 358 match = False
359 359 dirstat = os.stat(dirname)
360 360 for lstdirstat in dirlst:
361 361 if samestat(dirstat, lstdirstat):
362 362 match = True
363 363 break
364 364 if not match:
365 365 dirlst.append(dirstat)
366 366 return not match
367 367 else:
368 368 followsym = False
369 369
370 370 if (seen_dirs is None) and followsym:
371 371 seen_dirs = []
372 372 adddir(seen_dirs, path)
373 373 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
374 374 dirs.sort()
375 375 if '.hg' in dirs:
376 376 yield root # found a repository
377 377 qroot = os.path.join(root, '.hg', 'patches')
378 378 if os.path.isdir(os.path.join(qroot, '.hg')):
379 379 yield qroot # we have a patch queue repo here
380 380 if recurse:
381 381 # avoid recursing inside the .hg directory
382 382 dirs.remove('.hg')
383 383 else:
384 384 dirs[:] = [] # don't descend further
385 385 elif followsym:
386 386 newdirs = []
387 387 for d in dirs:
388 388 fname = os.path.join(root, d)
389 389 if adddir(seen_dirs, fname):
390 390 if os.path.islink(fname):
391 391 for hgname in walkrepos(fname, True, seen_dirs):
392 392 yield hgname
393 393 else:
394 394 newdirs.append(d)
395 395 dirs[:] = newdirs
396 396
397 397 def binnode(ctx):
398 398 """Return binary node id for a given basectx"""
399 399 node = ctx.node()
400 400 if node is None:
401 401 return wdirid
402 402 return node
403 403
404 404 def intrev(ctx):
405 405 """Return integer for a given basectx that can be used in comparison or
406 406 arithmetic operation"""
407 407 rev = ctx.rev()
408 408 if rev is None:
409 409 return wdirrev
410 410 return rev
411 411
412 412 def formatchangeid(ctx):
413 413 """Format changectx as '{rev}:{node|formatnode}', which is the default
414 414 template provided by logcmdutil.changesettemplater"""
415 415 repo = ctx.repo()
416 416 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
417 417
418 418 def formatrevnode(ui, rev, node):
419 419 """Format given revision and node depending on the current verbosity"""
420 420 if ui.debugflag:
421 421 hexfunc = hex
422 422 else:
423 423 hexfunc = short
424 424 return '%d:%s' % (rev, hexfunc(node))
425 425
426 426 def revsingle(repo, revspec, default='.', localalias=None):
427 427 if not revspec and revspec != 0:
428 428 return repo[default]
429 429
430 430 l = revrange(repo, [revspec], localalias=localalias)
431 431 if not l:
432 432 raise error.Abort(_('empty revision set'))
433 433 return repo[l.last()]
434 434
435 435 def _pairspec(revspec):
436 436 tree = revsetlang.parse(revspec)
437 437 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
438 438
439 439 def revpair(repo, revs):
440 440 if not revs:
441 441 return repo.dirstate.p1(), None
442 442
443 443 l = revrange(repo, revs)
444 444
445 445 if not l:
446 446 first = second = None
447 447 elif l.isascending():
448 448 first = l.min()
449 449 second = l.max()
450 450 elif l.isdescending():
451 451 first = l.max()
452 452 second = l.min()
453 453 else:
454 454 first = l.first()
455 455 second = l.last()
456 456
457 457 if first is None:
458 458 raise error.Abort(_('empty revision range'))
459 459 if (first == second and len(revs) >= 2
460 460 and not all(revrange(repo, [r]) for r in revs)):
461 461 raise error.Abort(_('empty revision on one side of range'))
462 462
463 463 # if top-level is range expression, the result must always be a pair
464 464 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
465 465 return repo.lookup(first), None
466 466
467 467 return repo.lookup(first), repo.lookup(second)
468 468
469 469 def revrange(repo, specs, localalias=None):
470 470 """Execute 1 to many revsets and return the union.
471 471
472 472 This is the preferred mechanism for executing revsets using user-specified
473 473 config options, such as revset aliases.
474 474
475 475 The revsets specified by ``specs`` will be executed via a chained ``OR``
476 476 expression. If ``specs`` is empty, an empty result is returned.
477 477
478 478 ``specs`` can contain integers, in which case they are assumed to be
479 479 revision numbers.
480 480
481 481 It is assumed the revsets are already formatted. If you have arguments
482 482 that need to be expanded in the revset, call ``revsetlang.formatspec()``
483 483 and pass the result as an element of ``specs``.
484 484
485 485 Specifying a single revset is allowed.
486 486
487 487 Returns a ``revset.abstractsmartset`` which is a list-like interface over
488 488 integer revisions.
489 489 """
490 490 allspecs = []
491 491 for spec in specs:
492 492 if isinstance(spec, int):
493 493 spec = revsetlang.formatspec('rev(%d)', spec)
494 494 allspecs.append(spec)
495 495 return repo.anyrevs(allspecs, user=True, localalias=localalias)
496 496
497 497 def meaningfulparents(repo, ctx):
498 498 """Return list of meaningful (or all if debug) parentrevs for rev.
499 499
500 500 For merges (two non-nullrev revisions) both parents are meaningful.
501 501 Otherwise the first parent revision is considered meaningful if it
502 502 is not the preceding revision.
503 503 """
504 504 parents = ctx.parents()
505 505 if len(parents) > 1:
506 506 return parents
507 507 if repo.ui.debugflag:
508 508 return [parents[0], repo['null']]
509 509 if parents[0].rev() >= intrev(ctx) - 1:
510 510 return []
511 511 return parents
512 512
513 513 def expandpats(pats):
514 514 '''Expand bare globs when running on windows.
515 515 On posix we assume it already has already been done by sh.'''
516 516 if not util.expandglobs:
517 517 return list(pats)
518 518 ret = []
519 519 for kindpat in pats:
520 520 kind, pat = matchmod._patsplit(kindpat, None)
521 521 if kind is None:
522 522 try:
523 523 globbed = glob.glob(pat)
524 524 except re.error:
525 525 globbed = [pat]
526 526 if globbed:
527 527 ret.extend(globbed)
528 528 continue
529 529 ret.append(kindpat)
530 530 return ret
531 531
532 532 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
533 533 badfn=None):
534 534 '''Return a matcher and the patterns that were used.
535 535 The matcher will warn about bad matches, unless an alternate badfn callback
536 536 is provided.'''
537 537 if pats == ("",):
538 538 pats = []
539 539 if opts is None:
540 540 opts = {}
541 541 if not globbed and default == 'relpath':
542 542 pats = expandpats(pats or [])
543 543
544 544 def bad(f, msg):
545 545 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
546 546
547 547 if badfn is None:
548 548 badfn = bad
549 549
550 550 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
551 551 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
552 552
553 553 if m.always():
554 554 pats = []
555 555 return m, pats
556 556
557 557 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
558 558 badfn=None):
559 559 '''Return a matcher that will warn about bad matches.'''
560 560 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
561 561
562 562 def matchall(repo):
563 563 '''Return a matcher that will efficiently match everything.'''
564 564 return matchmod.always(repo.root, repo.getcwd())
565 565
566 566 def matchfiles(repo, files, badfn=None):
567 567 '''Return a matcher that will efficiently match exactly these files.'''
568 568 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
569 569
570 570 def parsefollowlinespattern(repo, rev, pat, msg):
571 571 """Return a file name from `pat` pattern suitable for usage in followlines
572 572 logic.
573 573 """
574 574 if not matchmod.patkind(pat):
575 575 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
576 576 else:
577 577 ctx = repo[rev]
578 578 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
579 579 files = [f for f in ctx if m(f)]
580 580 if len(files) != 1:
581 581 raise error.ParseError(msg)
582 582 return files[0]
583 583
584 584 def origpath(ui, repo, filepath):
585 585 '''customize where .orig files are created
586 586
587 587 Fetch user defined path from config file: [ui] origbackuppath = <path>
588 588 Fall back to default (filepath with .orig suffix) if not specified
589 589 '''
590 590 origbackuppath = ui.config('ui', 'origbackuppath')
591 591 if not origbackuppath:
592 592 return filepath + ".orig"
593 593
594 594 # Convert filepath from an absolute path into a path inside the repo.
595 595 filepathfromroot = util.normpath(os.path.relpath(filepath,
596 596 start=repo.root))
597 597
598 598 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
599 599 origbackupdir = origvfs.dirname(filepathfromroot)
600 600 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
601 601 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
602 602
603 603 # Remove any files that conflict with the backup file's path
604 604 for f in reversed(list(util.finddirs(filepathfromroot))):
605 605 if origvfs.isfileorlink(f):
606 606 ui.note(_('removing conflicting file: %s\n')
607 607 % origvfs.join(f))
608 608 origvfs.unlink(f)
609 609 break
610 610
611 611 origvfs.makedirs(origbackupdir)
612 612
613 613 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
614 614 ui.note(_('removing conflicting directory: %s\n')
615 615 % origvfs.join(filepathfromroot))
616 616 origvfs.rmtree(filepathfromroot, forcibly=True)
617 617
618 618 return origvfs.join(filepathfromroot)
619 619
620 620 class _containsnode(object):
621 621 """proxy __contains__(node) to container.__contains__ which accepts revs"""
622 622
623 623 def __init__(self, repo, revcontainer):
624 624 self._torev = repo.changelog.rev
625 625 self._revcontains = revcontainer.__contains__
626 626
627 627 def __contains__(self, node):
628 628 return self._revcontains(self._torev(node))
629 629
630 630 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
631 631 """do common cleanups when old nodes are replaced by new nodes
632 632
633 633 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
634 634 (we might also want to move working directory parent in the future)
635 635
636 636 By default, bookmark moves are calculated automatically from 'replacements',
637 637 but 'moves' can be used to override that. Also, 'moves' may include
638 638 additional bookmark moves that should not have associated obsmarkers.
639 639
640 640 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
641 641 have replacements. operation is a string, like "rebase".
642 642
643 643 metadata is dictionary containing metadata to be stored in obsmarker if
644 644 obsolescence is enabled.
645 645 """
646 646 if not replacements and not moves:
647 647 return
648 648
649 649 # translate mapping's other forms
650 650 if not util.safehasattr(replacements, 'items'):
651 651 replacements = {n: () for n in replacements}
652 652
653 653 # Calculate bookmark movements
654 654 if moves is None:
655 655 moves = {}
656 656 # Unfiltered repo is needed since nodes in replacements might be hidden.
657 657 unfi = repo.unfiltered()
658 658 for oldnode, newnodes in replacements.items():
659 659 if oldnode in moves:
660 660 continue
661 661 if len(newnodes) > 1:
662 662 # usually a split, take the one with biggest rev number
663 663 newnode = next(unfi.set('max(%ln)', newnodes)).node()
664 664 elif len(newnodes) == 0:
665 665 # move bookmark backwards
666 666 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
667 667 list(replacements)))
668 668 if roots:
669 669 newnode = roots[0].node()
670 670 else:
671 671 newnode = nullid
672 672 else:
673 673 newnode = newnodes[0]
674 674 moves[oldnode] = newnode
675 675
676 676 with repo.transaction('cleanup') as tr:
677 677 # Move bookmarks
678 678 bmarks = repo._bookmarks
679 679 bmarkchanges = []
680 680 allnewnodes = [n for ns in replacements.values() for n in ns]
681 681 for oldnode, newnode in moves.items():
682 682 oldbmarks = repo.nodebookmarks(oldnode)
683 683 if not oldbmarks:
684 684 continue
685 685 from . import bookmarks # avoid import cycle
686 686 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
687 687 (oldbmarks, hex(oldnode), hex(newnode)))
688 688 # Delete divergent bookmarks being parents of related newnodes
689 689 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
690 690 allnewnodes, newnode, oldnode)
691 691 deletenodes = _containsnode(repo, deleterevs)
692 692 for name in oldbmarks:
693 693 bmarkchanges.append((name, newnode))
694 694 for b in bookmarks.divergent2delete(repo, deletenodes, name):
695 695 bmarkchanges.append((b, None))
696 696
697 697 if bmarkchanges:
698 698 bmarks.applychanges(repo, tr, bmarkchanges)
699 699
700 700 # Obsolete or strip nodes
701 701 if obsolete.isenabled(repo, obsolete.createmarkersopt):
702 702 # If a node is already obsoleted, and we want to obsolete it
703 703 # without a successor, skip that obssolete request since it's
704 704 # unnecessary. That's the "if s or not isobs(n)" check below.
705 705 # Also sort the node in topology order, that might be useful for
706 706 # some obsstore logic.
707 707 # NOTE: the filtering and sorting might belong to createmarkers.
708 708 isobs = unfi.obsstore.successors.__contains__
709 709 torev = unfi.changelog.rev
710 710 sortfunc = lambda ns: torev(ns[0])
711 711 rels = [(unfi[n], tuple(unfi[m] for m in s))
712 712 for n, s in sorted(replacements.items(), key=sortfunc)
713 713 if s or not isobs(n)]
714 714 if rels:
715 715 obsolete.createmarkers(repo, rels, operation=operation,
716 716 metadata=metadata)
717 717 else:
718 718 from . import repair # avoid import cycle
719 719 tostrip = list(replacements)
720 720 if tostrip:
721 721 repair.delayedstrip(repo.ui, repo, tostrip, operation)
722 722
723 723 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
724 724 if opts is None:
725 725 opts = {}
726 726 m = matcher
727 727 if dry_run is None:
728 728 dry_run = opts.get('dry_run')
729 729 if similarity is None:
730 730 similarity = float(opts.get('similarity') or 0)
731 731
732 732 ret = 0
733 733 join = lambda f: os.path.join(prefix, f)
734 734
735 735 wctx = repo[None]
736 736 for subpath in sorted(wctx.substate):
737 737 submatch = matchmod.subdirmatcher(subpath, m)
738 738 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
739 739 sub = wctx.sub(subpath)
740 740 try:
741 741 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
742 742 ret = 1
743 743 except error.LookupError:
744 744 repo.ui.status(_("skipping missing subrepository: %s\n")
745 745 % join(subpath))
746 746
747 747 rejected = []
748 748 def badfn(f, msg):
749 749 if f in m.files():
750 750 m.bad(f, msg)
751 751 rejected.append(f)
752 752
753 753 badmatch = matchmod.badmatch(m, badfn)
754 754 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
755 755 badmatch)
756 756
757 757 unknownset = set(unknown + forgotten)
758 758 toprint = unknownset.copy()
759 759 toprint.update(deleted)
760 760 for abs in sorted(toprint):
761 761 if repo.ui.verbose or not m.exact(abs):
762 762 if abs in unknownset:
763 763 status = _('adding %s\n') % m.uipath(abs)
764 764 else:
765 765 status = _('removing %s\n') % m.uipath(abs)
766 766 repo.ui.status(status)
767 767
768 768 renames = _findrenames(repo, m, added + unknown, removed + deleted,
769 769 similarity)
770 770
771 771 if not dry_run:
772 772 _markchanges(repo, unknown + forgotten, deleted, renames)
773 773
774 774 for f in rejected:
775 775 if f in m.files():
776 776 return 1
777 777 return ret
778 778
779 779 def marktouched(repo, files, similarity=0.0):
780 780 '''Assert that files have somehow been operated upon. files are relative to
781 781 the repo root.'''
782 782 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
783 783 rejected = []
784 784
785 785 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
786 786
787 787 if repo.ui.verbose:
788 788 unknownset = set(unknown + forgotten)
789 789 toprint = unknownset.copy()
790 790 toprint.update(deleted)
791 791 for abs in sorted(toprint):
792 792 if abs in unknownset:
793 793 status = _('adding %s\n') % abs
794 794 else:
795 795 status = _('removing %s\n') % abs
796 796 repo.ui.status(status)
797 797
798 798 renames = _findrenames(repo, m, added + unknown, removed + deleted,
799 799 similarity)
800 800
801 801 _markchanges(repo, unknown + forgotten, deleted, renames)
802 802
803 803 for f in rejected:
804 804 if f in m.files():
805 805 return 1
806 806 return 0
807 807
808 808 def _interestingfiles(repo, matcher):
809 809 '''Walk dirstate with matcher, looking for files that addremove would care
810 810 about.
811 811
812 812 This is different from dirstate.status because it doesn't care about
813 813 whether files are modified or clean.'''
814 814 added, unknown, deleted, removed, forgotten = [], [], [], [], []
815 815 audit_path = pathutil.pathauditor(repo.root, cached=True)
816 816
817 817 ctx = repo[None]
818 818 dirstate = repo.dirstate
819 819 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
820 820 unknown=True, ignored=False, full=False)
821 821 for abs, st in walkresults.iteritems():
822 822 dstate = dirstate[abs]
823 823 if dstate == '?' and audit_path.check(abs):
824 824 unknown.append(abs)
825 825 elif dstate != 'r' and not st:
826 826 deleted.append(abs)
827 827 elif dstate == 'r' and st:
828 828 forgotten.append(abs)
829 829 # for finding renames
830 830 elif dstate == 'r' and not st:
831 831 removed.append(abs)
832 832 elif dstate == 'a':
833 833 added.append(abs)
834 834
835 835 return added, unknown, deleted, removed, forgotten
836 836
837 837 def _findrenames(repo, matcher, added, removed, similarity):
838 838 '''Find renames from removed files to added ones.'''
839 839 renames = {}
840 840 if similarity > 0:
841 841 for old, new, score in similar.findrenames(repo, added, removed,
842 842 similarity):
843 843 if (repo.ui.verbose or not matcher.exact(old)
844 844 or not matcher.exact(new)):
845 845 repo.ui.status(_('recording removal of %s as rename to %s '
846 846 '(%d%% similar)\n') %
847 847 (matcher.rel(old), matcher.rel(new),
848 848 score * 100))
849 849 renames[new] = old
850 850 return renames
851 851
852 852 def _markchanges(repo, unknown, deleted, renames):
853 853 '''Marks the files in unknown as added, the files in deleted as removed,
854 854 and the files in renames as copied.'''
855 855 wctx = repo[None]
856 856 with repo.wlock():
857 857 wctx.forget(deleted)
858 858 wctx.add(unknown)
859 859 for new, old in renames.iteritems():
860 860 wctx.copy(old, new)
861 861
862 862 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
863 863 """Update the dirstate to reflect the intent of copying src to dst. For
864 864 different reasons it might not end with dst being marked as copied from src.
865 865 """
866 866 origsrc = repo.dirstate.copied(src) or src
867 867 if dst == origsrc: # copying back a copy?
868 868 if repo.dirstate[dst] not in 'mn' and not dryrun:
869 869 repo.dirstate.normallookup(dst)
870 870 else:
871 871 if repo.dirstate[origsrc] == 'a' and origsrc == src:
872 872 if not ui.quiet:
873 873 ui.warn(_("%s has not been committed yet, so no copy "
874 874 "data will be stored for %s.\n")
875 875 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
876 876 if repo.dirstate[dst] in '?r' and not dryrun:
877 877 wctx.add([dst])
878 878 elif not dryrun:
879 879 wctx.copy(origsrc, dst)
880 880
881 881 def readrequires(opener, supported):
882 882 '''Reads and parses .hg/requires and checks if all entries found
883 883 are in the list of supported features.'''
884 884 requirements = set(opener.read("requires").splitlines())
885 885 missings = []
886 886 for r in requirements:
887 887 if r not in supported:
888 888 if not r or not r[0].isalnum():
889 889 raise error.RequirementError(_(".hg/requires file is corrupt"))
890 890 missings.append(r)
891 891 missings.sort()
892 892 if missings:
893 893 raise error.RequirementError(
894 894 _("repository requires features unknown to this Mercurial: %s")
895 895 % " ".join(missings),
896 896 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
897 897 " for more information"))
898 898 return requirements
899 899
900 900 def writerequires(opener, requirements):
901 901 with opener('requires', 'w') as fp:
902 902 for r in sorted(requirements):
903 903 fp.write("%s\n" % r)
904 904
905 905 class filecachesubentry(object):
906 906 def __init__(self, path, stat):
907 907 self.path = path
908 908 self.cachestat = None
909 909 self._cacheable = None
910 910
911 911 if stat:
912 912 self.cachestat = filecachesubentry.stat(self.path)
913 913
914 914 if self.cachestat:
915 915 self._cacheable = self.cachestat.cacheable()
916 916 else:
917 917 # None means we don't know yet
918 918 self._cacheable = None
919 919
920 920 def refresh(self):
921 921 if self.cacheable():
922 922 self.cachestat = filecachesubentry.stat(self.path)
923 923
924 924 def cacheable(self):
925 925 if self._cacheable is not None:
926 926 return self._cacheable
927 927
928 928 # we don't know yet, assume it is for now
929 929 return True
930 930
931 931 def changed(self):
932 932 # no point in going further if we can't cache it
933 933 if not self.cacheable():
934 934 return True
935 935
936 936 newstat = filecachesubentry.stat(self.path)
937 937
938 938 # we may not know if it's cacheable yet, check again now
939 939 if newstat and self._cacheable is None:
940 940 self._cacheable = newstat.cacheable()
941 941
942 942 # check again
943 943 if not self._cacheable:
944 944 return True
945 945
946 946 if self.cachestat != newstat:
947 947 self.cachestat = newstat
948 948 return True
949 949 else:
950 950 return False
951 951
952 952 @staticmethod
953 953 def stat(path):
954 954 try:
955 955 return util.cachestat(path)
956 956 except OSError as e:
957 957 if e.errno != errno.ENOENT:
958 958 raise
959 959
960 960 class filecacheentry(object):
961 961 def __init__(self, paths, stat=True):
962 962 self._entries = []
963 963 for path in paths:
964 964 self._entries.append(filecachesubentry(path, stat))
965 965
966 966 def changed(self):
967 967 '''true if any entry has changed'''
968 968 for entry in self._entries:
969 969 if entry.changed():
970 970 return True
971 971 return False
972 972
973 973 def refresh(self):
974 974 for entry in self._entries:
975 975 entry.refresh()
976 976
977 977 class filecache(object):
978 978 '''A property like decorator that tracks files under .hg/ for updates.
979 979
980 980 Records stat info when called in _filecache.
981 981
982 982 On subsequent calls, compares old stat info with new info, and recreates the
983 983 object when any of the files changes, updating the new stat info in
984 984 _filecache.
985 985
986 986 Mercurial either atomic renames or appends for files under .hg,
987 987 so to ensure the cache is reliable we need the filesystem to be able
988 988 to tell us if a file has been replaced. If it can't, we fallback to
989 989 recreating the object on every call (essentially the same behavior as
990 990 propertycache).
991 991
992 992 '''
993 993 def __init__(self, *paths):
994 994 self.paths = paths
995 995
996 996 def join(self, obj, fname):
997 997 """Used to compute the runtime path of a cached file.
998 998
999 999 Users should subclass filecache and provide their own version of this
1000 1000 function to call the appropriate join function on 'obj' (an instance
1001 1001 of the class that its member function was decorated).
1002 1002 """
1003 1003 raise NotImplementedError
1004 1004
1005 1005 def __call__(self, func):
1006 1006 self.func = func
1007 1007 self.name = func.__name__.encode('ascii')
1008 1008 return self
1009 1009
1010 1010 def __get__(self, obj, type=None):
1011 1011 # if accessed on the class, return the descriptor itself.
1012 1012 if obj is None:
1013 1013 return self
1014 1014 # do we need to check if the file changed?
1015 1015 if self.name in obj.__dict__:
1016 1016 assert self.name in obj._filecache, self.name
1017 1017 return obj.__dict__[self.name]
1018 1018
1019 1019 entry = obj._filecache.get(self.name)
1020 1020
1021 1021 if entry:
1022 1022 if entry.changed():
1023 1023 entry.obj = self.func(obj)
1024 1024 else:
1025 1025 paths = [self.join(obj, path) for path in self.paths]
1026 1026
1027 1027 # We stat -before- creating the object so our cache doesn't lie if
1028 1028 # a writer modified between the time we read and stat
1029 1029 entry = filecacheentry(paths, True)
1030 1030 entry.obj = self.func(obj)
1031 1031
1032 1032 obj._filecache[self.name] = entry
1033 1033
1034 1034 obj.__dict__[self.name] = entry.obj
1035 1035 return entry.obj
1036 1036
1037 1037 def __set__(self, obj, value):
1038 1038 if self.name not in obj._filecache:
1039 1039 # we add an entry for the missing value because X in __dict__
1040 1040 # implies X in _filecache
1041 1041 paths = [self.join(obj, path) for path in self.paths]
1042 1042 ce = filecacheentry(paths, False)
1043 1043 obj._filecache[self.name] = ce
1044 1044 else:
1045 1045 ce = obj._filecache[self.name]
1046 1046
1047 1047 ce.obj = value # update cached copy
1048 1048 obj.__dict__[self.name] = value # update copy returned by obj.x
1049 1049
1050 1050 def __delete__(self, obj):
1051 1051 try:
1052 1052 del obj.__dict__[self.name]
1053 1053 except KeyError:
1054 1054 raise AttributeError(self.name)
1055 1055
1056 1056 def extdatasource(repo, source):
1057 1057 """Gather a map of rev -> value dict from the specified source
1058 1058
1059 1059 A source spec is treated as a URL, with a special case shell: type
1060 1060 for parsing the output from a shell command.
1061 1061
1062 1062 The data is parsed as a series of newline-separated records where
1063 1063 each record is a revision specifier optionally followed by a space
1064 1064 and a freeform string value. If the revision is known locally, it
1065 1065 is converted to a rev, otherwise the record is skipped.
1066 1066
1067 1067 Note that both key and value are treated as UTF-8 and converted to
1068 1068 the local encoding. This allows uniformity between local and
1069 1069 remote data sources.
1070 1070 """
1071 1071
1072 1072 spec = repo.ui.config("extdata", source)
1073 1073 if not spec:
1074 1074 raise error.Abort(_("unknown extdata source '%s'") % source)
1075 1075
1076 1076 data = {}
1077 1077 src = proc = None
1078 1078 try:
1079 1079 if spec.startswith("shell:"):
1080 1080 # external commands should be run relative to the repo root
1081 1081 cmd = spec[6:]
1082 1082 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1083 1083 close_fds=util.closefds,
1084 1084 stdout=subprocess.PIPE, cwd=repo.root)
1085 1085 src = proc.stdout
1086 1086 else:
1087 1087 # treat as a URL or file
1088 1088 src = url.open(repo.ui, spec)
1089 1089 for l in src:
1090 1090 if " " in l:
1091 1091 k, v = l.strip().split(" ", 1)
1092 1092 else:
1093 1093 k, v = l.strip(), ""
1094 1094
1095 1095 k = encoding.tolocal(k)
1096 1096 try:
1097 1097 data[repo[k].rev()] = encoding.tolocal(v)
1098 1098 except (error.LookupError, error.RepoLookupError):
1099 1099 pass # we ignore data for nodes that don't exist locally
1100 1100 finally:
1101 1101 if proc:
1102 1102 proc.communicate()
1103 1103 if src:
1104 1104 src.close()
1105 1105 if proc and proc.returncode != 0:
1106 1106 raise error.Abort(_("extdata command '%s' failed: %s")
1107 1107 % (cmd, util.explainexit(proc.returncode)[0]))
1108 1108
1109 1109 return data
1110 1110
1111 1111 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1112 1112 if lock is None:
1113 1113 raise error.LockInheritanceContractViolation(
1114 1114 'lock can only be inherited while held')
1115 1115 if environ is None:
1116 1116 environ = {}
1117 1117 with lock.inherit() as locker:
1118 1118 environ[envvar] = locker
1119 1119 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1120 1120
1121 1121 def wlocksub(repo, cmd, *args, **kwargs):
1122 1122 """run cmd as a subprocess that allows inheriting repo's wlock
1123 1123
1124 1124 This can only be called while the wlock is held. This takes all the
1125 1125 arguments that ui.system does, and returns the exit code of the
1126 1126 subprocess."""
1127 1127 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1128 1128 **kwargs)
1129 1129
1130 1130 def gdinitconfig(ui):
1131 1131 """helper function to know if a repo should be created as general delta
1132 1132 """
1133 1133 # experimental config: format.generaldelta
1134 1134 return (ui.configbool('format', 'generaldelta')
1135 1135 or ui.configbool('format', 'usegeneraldelta'))
1136 1136
1137 1137 def gddeltaconfig(ui):
1138 1138 """helper function to know if incoming delta should be optimised
1139 1139 """
1140 1140 # experimental config: format.generaldelta
1141 1141 return ui.configbool('format', 'generaldelta')
1142 1142
1143 1143 class simplekeyvaluefile(object):
1144 1144 """A simple file with key=value lines
1145 1145
1146 1146 Keys must be alphanumerics and start with a letter, values must not
1147 1147 contain '\n' characters"""
1148 1148 firstlinekey = '__firstline'
1149 1149
1150 1150 def __init__(self, vfs, path, keys=None):
1151 1151 self.vfs = vfs
1152 1152 self.path = path
1153 1153
1154 1154 def read(self, firstlinenonkeyval=False):
1155 1155 """Read the contents of a simple key-value file
1156 1156
1157 1157 'firstlinenonkeyval' indicates whether the first line of file should
1158 1158 be treated as a key-value pair or reuturned fully under the
1159 1159 __firstline key."""
1160 1160 lines = self.vfs.readlines(self.path)
1161 1161 d = {}
1162 1162 if firstlinenonkeyval:
1163 1163 if not lines:
1164 1164 e = _("empty simplekeyvalue file")
1165 1165 raise error.CorruptedState(e)
1166 1166 # we don't want to include '\n' in the __firstline
1167 1167 d[self.firstlinekey] = lines[0][:-1]
1168 1168 del lines[0]
1169 1169
1170 1170 try:
1171 1171 # the 'if line.strip()' part prevents us from failing on empty
1172 1172 # lines which only contain '\n' therefore are not skipped
1173 1173 # by 'if line'
1174 1174 updatedict = dict(line[:-1].split('=', 1) for line in lines
1175 1175 if line.strip())
1176 1176 if self.firstlinekey in updatedict:
1177 1177 e = _("%r can't be used as a key")
1178 1178 raise error.CorruptedState(e % self.firstlinekey)
1179 1179 d.update(updatedict)
1180 1180 except ValueError as e:
1181 1181 raise error.CorruptedState(str(e))
1182 1182 return d
1183 1183
1184 1184 def write(self, data, firstline=None):
1185 1185 """Write key=>value mapping to a file
1186 1186 data is a dict. Keys must be alphanumerical and start with a letter.
1187 1187 Values must not contain newline characters.
1188 1188
1189 1189 If 'firstline' is not None, it is written to file before
1190 1190 everything else, as it is, not in a key=value form"""
1191 1191 lines = []
1192 1192 if firstline is not None:
1193 1193 lines.append('%s\n' % firstline)
1194 1194
1195 1195 for k, v in data.items():
1196 1196 if k == self.firstlinekey:
1197 1197 e = "key name '%s' is reserved" % self.firstlinekey
1198 1198 raise error.ProgrammingError(e)
1199 1199 if not k[0:1].isalpha():
1200 1200 e = "keys must start with a letter in a key-value file"
1201 1201 raise error.ProgrammingError(e)
1202 1202 if not k.isalnum():
1203 1203 e = "invalid key name in a simple key-value file"
1204 1204 raise error.ProgrammingError(e)
1205 1205 if '\n' in v:
1206 1206 e = "invalid value in a simple key-value file"
1207 1207 raise error.ProgrammingError(e)
1208 1208 lines.append("%s=%s\n" % (k, v))
1209 1209 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1210 1210 fp.write(''.join(lines))
1211 1211
1212 1212 _reportobsoletedsource = [
1213 1213 'debugobsolete',
1214 1214 'pull',
1215 1215 'push',
1216 1216 'serve',
1217 1217 'unbundle',
1218 1218 ]
1219 1219
1220 1220 _reportnewcssource = [
1221 1221 'pull',
1222 1222 'unbundle',
1223 1223 ]
1224 1224
1225 # a list of (repo, ctx, files) functions called by various commands to allow
1226 # extensions to ensure the corresponding files are available locally, before the
1227 # command uses them.
1228 fileprefetchhooks = util.hooks()
1229
1225 1230 # A marker that tells the evolve extension to suppress its own reporting
1226 1231 _reportstroubledchangesets = True
1227 1232
1228 1233 def registersummarycallback(repo, otr, txnname=''):
1229 1234 """register a callback to issue a summary after the transaction is closed
1230 1235 """
1231 1236 def txmatch(sources):
1232 1237 return any(txnname.startswith(source) for source in sources)
1233 1238
1234 1239 categories = []
1235 1240
1236 1241 def reportsummary(func):
1237 1242 """decorator for report callbacks."""
1238 1243 # The repoview life cycle is shorter than the one of the actual
1239 1244 # underlying repository. So the filtered object can die before the
1240 1245 # weakref is used leading to troubles. We keep a reference to the
1241 1246 # unfiltered object and restore the filtering when retrieving the
1242 1247 # repository through the weakref.
1243 1248 filtername = repo.filtername
1244 1249 reporef = weakref.ref(repo.unfiltered())
1245 1250 def wrapped(tr):
1246 1251 repo = reporef()
1247 1252 if filtername:
1248 1253 repo = repo.filtered(filtername)
1249 1254 func(repo, tr)
1250 1255 newcat = '%02i-txnreport' % len(categories)
1251 1256 otr.addpostclose(newcat, wrapped)
1252 1257 categories.append(newcat)
1253 1258 return wrapped
1254 1259
1255 1260 if txmatch(_reportobsoletedsource):
1256 1261 @reportsummary
1257 1262 def reportobsoleted(repo, tr):
1258 1263 obsoleted = obsutil.getobsoleted(repo, tr)
1259 1264 if obsoleted:
1260 1265 repo.ui.status(_('obsoleted %i changesets\n')
1261 1266 % len(obsoleted))
1262 1267
1263 1268 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1264 1269 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1265 1270 instabilitytypes = [
1266 1271 ('orphan', 'orphan'),
1267 1272 ('phase-divergent', 'phasedivergent'),
1268 1273 ('content-divergent', 'contentdivergent'),
1269 1274 ]
1270 1275
1271 1276 def getinstabilitycounts(repo):
1272 1277 filtered = repo.changelog.filteredrevs
1273 1278 counts = {}
1274 1279 for instability, revset in instabilitytypes:
1275 1280 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1276 1281 filtered)
1277 1282 return counts
1278 1283
1279 1284 oldinstabilitycounts = getinstabilitycounts(repo)
1280 1285 @reportsummary
1281 1286 def reportnewinstabilities(repo, tr):
1282 1287 newinstabilitycounts = getinstabilitycounts(repo)
1283 1288 for instability, revset in instabilitytypes:
1284 1289 delta = (newinstabilitycounts[instability] -
1285 1290 oldinstabilitycounts[instability])
1286 1291 if delta > 0:
1287 1292 repo.ui.warn(_('%i new %s changesets\n') %
1288 1293 (delta, instability))
1289 1294
1290 1295 if txmatch(_reportnewcssource):
1291 1296 @reportsummary
1292 1297 def reportnewcs(repo, tr):
1293 1298 """Report the range of new revisions pulled/unbundled."""
1294 1299 newrevs = tr.changes.get('revs', xrange(0, 0))
1295 1300 if not newrevs:
1296 1301 return
1297 1302
1298 1303 # Compute the bounds of new revisions' range, excluding obsoletes.
1299 1304 unfi = repo.unfiltered()
1300 1305 revs = unfi.revs('%ld and not obsolete()', newrevs)
1301 1306 if not revs:
1302 1307 # Got only obsoletes.
1303 1308 return
1304 1309 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1305 1310
1306 1311 if minrev == maxrev:
1307 1312 revrange = minrev
1308 1313 else:
1309 1314 revrange = '%s:%s' % (minrev, maxrev)
1310 1315 repo.ui.status(_('new changesets %s\n') % revrange)
1311 1316
1312 1317 def nodesummaries(repo, nodes, maxnumnodes=4):
1313 1318 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1314 1319 return ' '.join(short(h) for h in nodes)
1315 1320 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1316 1321 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1317 1322
1318 1323 def enforcesinglehead(repo, tr, desc):
1319 1324 """check that no named branch has multiple heads"""
1320 1325 if desc in ('strip', 'repair'):
1321 1326 # skip the logic during strip
1322 1327 return
1323 1328 visible = repo.filtered('visible')
1324 1329 # possible improvement: we could restrict the check to affected branch
1325 1330 for name, heads in visible.branchmap().iteritems():
1326 1331 if len(heads) > 1:
1327 1332 msg = _('rejecting multiple heads on branch "%s"')
1328 1333 msg %= name
1329 1334 hint = _('%d heads: %s')
1330 1335 hint %= (len(heads), nodesummaries(repo, heads))
1331 1336 raise error.Abort(msg, hint=hint)
1332 1337
1333 1338 def wrapconvertsink(sink):
1334 1339 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1335 1340 before it is used, whether or not the convert extension was formally loaded.
1336 1341 """
1337 1342 return sink
1338 1343
1339 1344 def unhidehashlikerevs(repo, specs, hiddentype):
1340 1345 """parse the user specs and unhide changesets whose hash or revision number
1341 1346 is passed.
1342 1347
1343 1348 hiddentype can be: 1) 'warn': warn while unhiding changesets
1344 1349 2) 'nowarn': don't warn while unhiding changesets
1345 1350
1346 1351 returns a repo object with the required changesets unhidden
1347 1352 """
1348 1353 if not repo.filtername or not repo.ui.configbool('experimental',
1349 1354 'directaccess'):
1350 1355 return repo
1351 1356
1352 1357 if repo.filtername not in ('visible', 'visible-hidden'):
1353 1358 return repo
1354 1359
1355 1360 symbols = set()
1356 1361 for spec in specs:
1357 1362 try:
1358 1363 tree = revsetlang.parse(spec)
1359 1364 except error.ParseError: # will be reported by scmutil.revrange()
1360 1365 continue
1361 1366
1362 1367 symbols.update(revsetlang.gethashlikesymbols(tree))
1363 1368
1364 1369 if not symbols:
1365 1370 return repo
1366 1371
1367 1372 revs = _getrevsfromsymbols(repo, symbols)
1368 1373
1369 1374 if not revs:
1370 1375 return repo
1371 1376
1372 1377 if hiddentype == 'warn':
1373 1378 unfi = repo.unfiltered()
1374 1379 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1375 1380 repo.ui.warn(_("warning: accessing hidden changesets for write "
1376 1381 "operation: %s\n") % revstr)
1377 1382
1378 1383 # we have to use new filtername to separate branch/tags cache until we can
1379 1384 # disbale these cache when revisions are dynamically pinned.
1380 1385 return repo.filtered('visible-hidden', revs)
1381 1386
1382 1387 def _getrevsfromsymbols(repo, symbols):
1383 1388 """parse the list of symbols and returns a set of revision numbers of hidden
1384 1389 changesets present in symbols"""
1385 1390 revs = set()
1386 1391 unfi = repo.unfiltered()
1387 1392 unficl = unfi.changelog
1388 1393 cl = repo.changelog
1389 1394 tiprev = len(unficl)
1390 1395 pmatch = unficl._partialmatch
1391 1396 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1392 1397 for s in symbols:
1393 1398 try:
1394 1399 n = int(s)
1395 1400 if n <= tiprev:
1396 1401 if not allowrevnums:
1397 1402 continue
1398 1403 else:
1399 1404 if n not in cl:
1400 1405 revs.add(n)
1401 1406 continue
1402 1407 except ValueError:
1403 1408 pass
1404 1409
1405 1410 try:
1406 1411 s = pmatch(s)
1407 1412 except error.LookupError:
1408 1413 s = None
1409 1414
1410 1415 if s is not None:
1411 1416 rev = unficl.rev(s)
1412 1417 if rev not in cl:
1413 1418 revs.add(rev)
1414 1419
1415 1420 return revs
General Comments 0
You need to be logged in to leave comments. Login now