##// END OF EJS Templates
subrepo: adjust subrepo prefix before calling subrepo.add() (API)...
Martin von Zweigbergk -
r41777:ed046348 default
parent child Browse files
Show More
@@ -1,3334 +1,3335 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import os
12 12 import re
13 13
14 14 from .i18n import _
15 15 from .node import (
16 16 hex,
17 17 nullid,
18 18 nullrev,
19 19 short,
20 20 )
21 21
22 22 from . import (
23 23 bookmarks,
24 24 changelog,
25 25 copies,
26 26 crecord as crecordmod,
27 27 dirstateguard,
28 28 encoding,
29 29 error,
30 30 formatter,
31 31 logcmdutil,
32 32 match as matchmod,
33 33 merge as mergemod,
34 34 mergeutil,
35 35 obsolete,
36 36 patch,
37 37 pathutil,
38 38 phases,
39 39 pycompat,
40 40 revlog,
41 41 rewriteutil,
42 42 scmutil,
43 43 smartset,
44 44 subrepoutil,
45 45 templatekw,
46 46 templater,
47 47 util,
48 48 vfs as vfsmod,
49 49 )
50 50
51 51 from .utils import (
52 52 dateutil,
53 53 stringutil,
54 54 )
55 55
56 56 stringio = util.stringio
57 57
58 58 # templates of common command options
59 59
60 60 dryrunopts = [
61 61 ('n', 'dry-run', None,
62 62 _('do not perform actions, just print output')),
63 63 ]
64 64
65 65 confirmopts = [
66 66 ('', 'confirm', None,
67 67 _('ask before applying actions')),
68 68 ]
69 69
70 70 remoteopts = [
71 71 ('e', 'ssh', '',
72 72 _('specify ssh command to use'), _('CMD')),
73 73 ('', 'remotecmd', '',
74 74 _('specify hg command to run on the remote side'), _('CMD')),
75 75 ('', 'insecure', None,
76 76 _('do not verify server certificate (ignoring web.cacerts config)')),
77 77 ]
78 78
79 79 walkopts = [
80 80 ('I', 'include', [],
81 81 _('include names matching the given patterns'), _('PATTERN')),
82 82 ('X', 'exclude', [],
83 83 _('exclude names matching the given patterns'), _('PATTERN')),
84 84 ]
85 85
86 86 commitopts = [
87 87 ('m', 'message', '',
88 88 _('use text as commit message'), _('TEXT')),
89 89 ('l', 'logfile', '',
90 90 _('read commit message from file'), _('FILE')),
91 91 ]
92 92
93 93 commitopts2 = [
94 94 ('d', 'date', '',
95 95 _('record the specified date as commit date'), _('DATE')),
96 96 ('u', 'user', '',
97 97 _('record the specified user as committer'), _('USER')),
98 98 ]
99 99
100 100 formatteropts = [
101 101 ('T', 'template', '',
102 102 _('display with template'), _('TEMPLATE')),
103 103 ]
104 104
105 105 templateopts = [
106 106 ('', 'style', '',
107 107 _('display using template map file (DEPRECATED)'), _('STYLE')),
108 108 ('T', 'template', '',
109 109 _('display with template'), _('TEMPLATE')),
110 110 ]
111 111
112 112 logopts = [
113 113 ('p', 'patch', None, _('show patch')),
114 114 ('g', 'git', None, _('use git extended diff format')),
115 115 ('l', 'limit', '',
116 116 _('limit number of changes displayed'), _('NUM')),
117 117 ('M', 'no-merges', None, _('do not show merges')),
118 118 ('', 'stat', None, _('output diffstat-style summary of changes')),
119 119 ('G', 'graph', None, _("show the revision DAG")),
120 120 ] + templateopts
121 121
122 122 diffopts = [
123 123 ('a', 'text', None, _('treat all files as text')),
124 124 ('g', 'git', None, _('use git extended diff format')),
125 125 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
126 126 ('', 'nodates', None, _('omit dates from diff headers'))
127 127 ]
128 128
129 129 diffwsopts = [
130 130 ('w', 'ignore-all-space', None,
131 131 _('ignore white space when comparing lines')),
132 132 ('b', 'ignore-space-change', None,
133 133 _('ignore changes in the amount of white space')),
134 134 ('B', 'ignore-blank-lines', None,
135 135 _('ignore changes whose lines are all blank')),
136 136 ('Z', 'ignore-space-at-eol', None,
137 137 _('ignore changes in whitespace at EOL')),
138 138 ]
139 139
140 140 diffopts2 = [
141 141 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
142 142 ('p', 'show-function', None, _('show which function each change is in')),
143 143 ('', 'reverse', None, _('produce a diff that undoes the changes')),
144 144 ] + diffwsopts + [
145 145 ('U', 'unified', '',
146 146 _('number of lines of context to show'), _('NUM')),
147 147 ('', 'stat', None, _('output diffstat-style summary of changes')),
148 148 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
149 149 ]
150 150
151 151 mergetoolopts = [
152 152 ('t', 'tool', '', _('specify merge tool'), _('TOOL')),
153 153 ]
154 154
155 155 similarityopts = [
156 156 ('s', 'similarity', '',
157 157 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
158 158 ]
159 159
160 160 subrepoopts = [
161 161 ('S', 'subrepos', None,
162 162 _('recurse into subrepositories'))
163 163 ]
164 164
165 165 debugrevlogopts = [
166 166 ('c', 'changelog', False, _('open changelog')),
167 167 ('m', 'manifest', False, _('open manifest')),
168 168 ('', 'dir', '', _('open directory manifest')),
169 169 ]
170 170
171 171 # special string such that everything below this line will be ingored in the
172 172 # editor text
173 173 _linebelow = "^HG: ------------------------ >8 ------------------------$"
174 174
175 175 def ishunk(x):
176 176 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
177 177 return isinstance(x, hunkclasses)
178 178
179 179 def newandmodified(chunks, originalchunks):
180 180 newlyaddedandmodifiedfiles = set()
181 181 for chunk in chunks:
182 182 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
183 183 originalchunks:
184 184 newlyaddedandmodifiedfiles.add(chunk.header.filename())
185 185 return newlyaddedandmodifiedfiles
186 186
187 187 def parsealiases(cmd):
188 188 return cmd.split("|")
189 189
190 190 def setupwrapcolorwrite(ui):
191 191 # wrap ui.write so diff output can be labeled/colorized
192 192 def wrapwrite(orig, *args, **kw):
193 193 label = kw.pop(r'label', '')
194 194 for chunk, l in patch.difflabel(lambda: args):
195 195 orig(chunk, label=label + l)
196 196
197 197 oldwrite = ui.write
198 198 def wrap(*args, **kwargs):
199 199 return wrapwrite(oldwrite, *args, **kwargs)
200 200 setattr(ui, 'write', wrap)
201 201 return oldwrite
202 202
203 203 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
204 204 try:
205 205 if usecurses:
206 206 if testfile:
207 207 recordfn = crecordmod.testdecorator(
208 208 testfile, crecordmod.testchunkselector)
209 209 else:
210 210 recordfn = crecordmod.chunkselector
211 211
212 212 return crecordmod.filterpatch(ui, originalhunks, recordfn,
213 213 operation)
214 214 except crecordmod.fallbackerror as e:
215 215 ui.warn('%s\n' % e.message)
216 216 ui.warn(_('falling back to text mode\n'))
217 217
218 218 return patch.filterpatch(ui, originalhunks, operation)
219 219
220 220 def recordfilter(ui, originalhunks, operation=None):
221 221 """ Prompts the user to filter the originalhunks and return a list of
222 222 selected hunks.
223 223 *operation* is used for to build ui messages to indicate the user what
224 224 kind of filtering they are doing: reverting, committing, shelving, etc.
225 225 (see patch.filterpatch).
226 226 """
227 227 usecurses = crecordmod.checkcurses(ui)
228 228 testfile = ui.config('experimental', 'crecordtest')
229 229 oldwrite = setupwrapcolorwrite(ui)
230 230 try:
231 231 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
232 232 testfile, operation)
233 233 finally:
234 234 ui.write = oldwrite
235 235 return newchunks, newopts
236 236
237 237 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
238 238 filterfn, *pats, **opts):
239 239 opts = pycompat.byteskwargs(opts)
240 240 if not ui.interactive():
241 241 if cmdsuggest:
242 242 msg = _('running non-interactively, use %s instead') % cmdsuggest
243 243 else:
244 244 msg = _('running non-interactively')
245 245 raise error.Abort(msg)
246 246
247 247 # make sure username is set before going interactive
248 248 if not opts.get('user'):
249 249 ui.username() # raise exception, username not provided
250 250
251 251 def recordfunc(ui, repo, message, match, opts):
252 252 """This is generic record driver.
253 253
254 254 Its job is to interactively filter local changes, and
255 255 accordingly prepare working directory into a state in which the
256 256 job can be delegated to a non-interactive commit command such as
257 257 'commit' or 'qrefresh'.
258 258
259 259 After the actual job is done by non-interactive command, the
260 260 working directory is restored to its original state.
261 261
262 262 In the end we'll record interesting changes, and everything else
263 263 will be left in place, so the user can continue working.
264 264 """
265 265
266 266 checkunfinished(repo, commit=True)
267 267 wctx = repo[None]
268 268 merge = len(wctx.parents()) > 1
269 269 if merge:
270 270 raise error.Abort(_('cannot partially commit a merge '
271 271 '(use "hg commit" instead)'))
272 272
273 273 def fail(f, msg):
274 274 raise error.Abort('%s: %s' % (f, msg))
275 275
276 276 force = opts.get('force')
277 277 if not force:
278 278 vdirs = []
279 279 match.explicitdir = vdirs.append
280 280 match.bad = fail
281 281
282 282 status = repo.status(match=match)
283 283 if not force:
284 284 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
285 285 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True,
286 286 section='commands',
287 287 configprefix='commit.interactive.')
288 288 diffopts.nodates = True
289 289 diffopts.git = True
290 290 diffopts.showfunc = True
291 291 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
292 292 originalchunks = patch.parsepatch(originaldiff)
293 293
294 294 # 1. filter patch, since we are intending to apply subset of it
295 295 try:
296 296 chunks, newopts = filterfn(ui, originalchunks)
297 297 except error.PatchError as err:
298 298 raise error.Abort(_('error parsing patch: %s') % err)
299 299 opts.update(newopts)
300 300
301 301 # We need to keep a backup of files that have been newly added and
302 302 # modified during the recording process because there is a previous
303 303 # version without the edit in the workdir
304 304 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
305 305 contenders = set()
306 306 for h in chunks:
307 307 try:
308 308 contenders.update(set(h.files()))
309 309 except AttributeError:
310 310 pass
311 311
312 312 changed = status.modified + status.added + status.removed
313 313 newfiles = [f for f in changed if f in contenders]
314 314 if not newfiles:
315 315 ui.status(_('no changes to record\n'))
316 316 return 0
317 317
318 318 modified = set(status.modified)
319 319
320 320 # 2. backup changed files, so we can restore them in the end
321 321
322 322 if backupall:
323 323 tobackup = changed
324 324 else:
325 325 tobackup = [f for f in newfiles if f in modified or f in \
326 326 newlyaddedandmodifiedfiles]
327 327 backups = {}
328 328 if tobackup:
329 329 backupdir = repo.vfs.join('record-backups')
330 330 try:
331 331 os.mkdir(backupdir)
332 332 except OSError as err:
333 333 if err.errno != errno.EEXIST:
334 334 raise
335 335 try:
336 336 # backup continues
337 337 for f in tobackup:
338 338 fd, tmpname = pycompat.mkstemp(prefix=f.replace('/', '_') + '.',
339 339 dir=backupdir)
340 340 os.close(fd)
341 341 ui.debug('backup %r as %r\n' % (f, tmpname))
342 342 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
343 343 backups[f] = tmpname
344 344
345 345 fp = stringio()
346 346 for c in chunks:
347 347 fname = c.filename()
348 348 if fname in backups:
349 349 c.write(fp)
350 350 dopatch = fp.tell()
351 351 fp.seek(0)
352 352
353 353 # 2.5 optionally review / modify patch in text editor
354 354 if opts.get('review', False):
355 355 patchtext = (crecordmod.diffhelptext
356 356 + crecordmod.patchhelptext
357 357 + fp.read())
358 358 reviewedpatch = ui.edit(patchtext, "",
359 359 action="diff",
360 360 repopath=repo.path)
361 361 fp.truncate(0)
362 362 fp.write(reviewedpatch)
363 363 fp.seek(0)
364 364
365 365 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
366 366 # 3a. apply filtered patch to clean repo (clean)
367 367 if backups:
368 368 # Equivalent to hg.revert
369 369 m = scmutil.matchfiles(repo, backups.keys())
370 370 mergemod.update(repo, repo.dirstate.p1(), branchmerge=False,
371 371 force=True, matcher=m)
372 372
373 373 # 3b. (apply)
374 374 if dopatch:
375 375 try:
376 376 ui.debug('applying patch\n')
377 377 ui.debug(fp.getvalue())
378 378 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
379 379 except error.PatchError as err:
380 380 raise error.Abort(pycompat.bytestr(err))
381 381 del fp
382 382
383 383 # 4. We prepared working directory according to filtered
384 384 # patch. Now is the time to delegate the job to
385 385 # commit/qrefresh or the like!
386 386
387 387 # Make all of the pathnames absolute.
388 388 newfiles = [repo.wjoin(nf) for nf in newfiles]
389 389 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
390 390 finally:
391 391 # 5. finally restore backed-up files
392 392 try:
393 393 dirstate = repo.dirstate
394 394 for realname, tmpname in backups.iteritems():
395 395 ui.debug('restoring %r to %r\n' % (tmpname, realname))
396 396
397 397 if dirstate[realname] == 'n':
398 398 # without normallookup, restoring timestamp
399 399 # may cause partially committed files
400 400 # to be treated as unmodified
401 401 dirstate.normallookup(realname)
402 402
403 403 # copystat=True here and above are a hack to trick any
404 404 # editors that have f open that we haven't modified them.
405 405 #
406 406 # Also note that this racy as an editor could notice the
407 407 # file's mtime before we've finished writing it.
408 408 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
409 409 os.unlink(tmpname)
410 410 if tobackup:
411 411 os.rmdir(backupdir)
412 412 except OSError:
413 413 pass
414 414
415 415 def recordinwlock(ui, repo, message, match, opts):
416 416 with repo.wlock():
417 417 return recordfunc(ui, repo, message, match, opts)
418 418
419 419 return commit(ui, repo, recordinwlock, pats, opts)
420 420
421 421 class dirnode(object):
422 422 """
423 423 Represent a directory in user working copy with information required for
424 424 the purpose of tersing its status.
425 425
426 426 path is the path to the directory, without a trailing '/'
427 427
428 428 statuses is a set of statuses of all files in this directory (this includes
429 429 all the files in all the subdirectories too)
430 430
431 431 files is a list of files which are direct child of this directory
432 432
433 433 subdirs is a dictionary of sub-directory name as the key and it's own
434 434 dirnode object as the value
435 435 """
436 436
437 437 def __init__(self, dirpath):
438 438 self.path = dirpath
439 439 self.statuses = set([])
440 440 self.files = []
441 441 self.subdirs = {}
442 442
443 443 def _addfileindir(self, filename, status):
444 444 """Add a file in this directory as a direct child."""
445 445 self.files.append((filename, status))
446 446
447 447 def addfile(self, filename, status):
448 448 """
449 449 Add a file to this directory or to its direct parent directory.
450 450
451 451 If the file is not direct child of this directory, we traverse to the
452 452 directory of which this file is a direct child of and add the file
453 453 there.
454 454 """
455 455
456 456 # the filename contains a path separator, it means it's not the direct
457 457 # child of this directory
458 458 if '/' in filename:
459 459 subdir, filep = filename.split('/', 1)
460 460
461 461 # does the dirnode object for subdir exists
462 462 if subdir not in self.subdirs:
463 463 subdirpath = pathutil.join(self.path, subdir)
464 464 self.subdirs[subdir] = dirnode(subdirpath)
465 465
466 466 # try adding the file in subdir
467 467 self.subdirs[subdir].addfile(filep, status)
468 468
469 469 else:
470 470 self._addfileindir(filename, status)
471 471
472 472 if status not in self.statuses:
473 473 self.statuses.add(status)
474 474
475 475 def iterfilepaths(self):
476 476 """Yield (status, path) for files directly under this directory."""
477 477 for f, st in self.files:
478 478 yield st, pathutil.join(self.path, f)
479 479
480 480 def tersewalk(self, terseargs):
481 481 """
482 482 Yield (status, path) obtained by processing the status of this
483 483 dirnode.
484 484
485 485 terseargs is the string of arguments passed by the user with `--terse`
486 486 flag.
487 487
488 488 Following are the cases which can happen:
489 489
490 490 1) All the files in the directory (including all the files in its
491 491 subdirectories) share the same status and the user has asked us to terse
492 492 that status. -> yield (status, dirpath). dirpath will end in '/'.
493 493
494 494 2) Otherwise, we do following:
495 495
496 496 a) Yield (status, filepath) for all the files which are in this
497 497 directory (only the ones in this directory, not the subdirs)
498 498
499 499 b) Recurse the function on all the subdirectories of this
500 500 directory
501 501 """
502 502
503 503 if len(self.statuses) == 1:
504 504 onlyst = self.statuses.pop()
505 505
506 506 # Making sure we terse only when the status abbreviation is
507 507 # passed as terse argument
508 508 if onlyst in terseargs:
509 509 yield onlyst, self.path + '/'
510 510 return
511 511
512 512 # add the files to status list
513 513 for st, fpath in self.iterfilepaths():
514 514 yield st, fpath
515 515
516 516 #recurse on the subdirs
517 517 for dirobj in self.subdirs.values():
518 518 for st, fpath in dirobj.tersewalk(terseargs):
519 519 yield st, fpath
520 520
521 521 def tersedir(statuslist, terseargs):
522 522 """
523 523 Terse the status if all the files in a directory shares the same status.
524 524
525 525 statuslist is scmutil.status() object which contains a list of files for
526 526 each status.
527 527 terseargs is string which is passed by the user as the argument to `--terse`
528 528 flag.
529 529
530 530 The function makes a tree of objects of dirnode class, and at each node it
531 531 stores the information required to know whether we can terse a certain
532 532 directory or not.
533 533 """
534 534 # the order matters here as that is used to produce final list
535 535 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
536 536
537 537 # checking the argument validity
538 538 for s in pycompat.bytestr(terseargs):
539 539 if s not in allst:
540 540 raise error.Abort(_("'%s' not recognized") % s)
541 541
542 542 # creating a dirnode object for the root of the repo
543 543 rootobj = dirnode('')
544 544 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
545 545 'ignored', 'removed')
546 546
547 547 tersedict = {}
548 548 for attrname in pstatus:
549 549 statuschar = attrname[0:1]
550 550 for f in getattr(statuslist, attrname):
551 551 rootobj.addfile(f, statuschar)
552 552 tersedict[statuschar] = []
553 553
554 554 # we won't be tersing the root dir, so add files in it
555 555 for st, fpath in rootobj.iterfilepaths():
556 556 tersedict[st].append(fpath)
557 557
558 558 # process each sub-directory and build tersedict
559 559 for subdir in rootobj.subdirs.values():
560 560 for st, f in subdir.tersewalk(terseargs):
561 561 tersedict[st].append(f)
562 562
563 563 tersedlist = []
564 564 for st in allst:
565 565 tersedict[st].sort()
566 566 tersedlist.append(tersedict[st])
567 567
568 568 return tersedlist
569 569
570 570 def _commentlines(raw):
571 571 '''Surround lineswith a comment char and a new line'''
572 572 lines = raw.splitlines()
573 573 commentedlines = ['# %s' % line for line in lines]
574 574 return '\n'.join(commentedlines) + '\n'
575 575
576 576 def _conflictsmsg(repo):
577 577 mergestate = mergemod.mergestate.read(repo)
578 578 if not mergestate.active():
579 579 return
580 580
581 581 m = scmutil.match(repo[None])
582 582 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
583 583 if unresolvedlist:
584 584 mergeliststr = '\n'.join(
585 585 [' %s' % util.pathto(repo.root, encoding.getcwd(), path)
586 586 for path in sorted(unresolvedlist)])
587 587 msg = _('''Unresolved merge conflicts:
588 588
589 589 %s
590 590
591 591 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
592 592 else:
593 593 msg = _('No unresolved merge conflicts.')
594 594
595 595 return _commentlines(msg)
596 596
597 597 def _helpmessage(continuecmd, abortcmd):
598 598 msg = _('To continue: %s\n'
599 599 'To abort: %s') % (continuecmd, abortcmd)
600 600 return _commentlines(msg)
601 601
602 602 def _rebasemsg():
603 603 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
604 604
605 605 def _histeditmsg():
606 606 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
607 607
608 608 def _unshelvemsg():
609 609 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
610 610
611 611 def _graftmsg():
612 612 return _helpmessage('hg graft --continue', 'hg graft --abort')
613 613
614 614 def _mergemsg():
615 615 return _helpmessage('hg commit', 'hg merge --abort')
616 616
617 617 def _bisectmsg():
618 618 msg = _('To mark the changeset good: hg bisect --good\n'
619 619 'To mark the changeset bad: hg bisect --bad\n'
620 620 'To abort: hg bisect --reset\n')
621 621 return _commentlines(msg)
622 622
623 623 def fileexistspredicate(filename):
624 624 return lambda repo: repo.vfs.exists(filename)
625 625
626 626 def _mergepredicate(repo):
627 627 return len(repo[None].parents()) > 1
628 628
629 629 STATES = (
630 630 # (state, predicate to detect states, helpful message function)
631 631 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
632 632 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
633 633 ('graft', fileexistspredicate('graftstate'), _graftmsg),
634 634 ('unshelve', fileexistspredicate('shelvedstate'), _unshelvemsg),
635 635 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
636 636 # The merge state is part of a list that will be iterated over.
637 637 # They need to be last because some of the other unfinished states may also
638 638 # be in a merge or update state (eg. rebase, histedit, graft, etc).
639 639 # We want those to have priority.
640 640 ('merge', _mergepredicate, _mergemsg),
641 641 )
642 642
643 643 def _getrepostate(repo):
644 644 # experimental config: commands.status.skipstates
645 645 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
646 646 for state, statedetectionpredicate, msgfn in STATES:
647 647 if state in skip:
648 648 continue
649 649 if statedetectionpredicate(repo):
650 650 return (state, statedetectionpredicate, msgfn)
651 651
652 652 def morestatus(repo, fm):
653 653 statetuple = _getrepostate(repo)
654 654 label = 'status.morestatus'
655 655 if statetuple:
656 656 state, statedetectionpredicate, helpfulmsg = statetuple
657 657 statemsg = _('The repository is in an unfinished *%s* state.') % state
658 658 fm.plain('%s\n' % _commentlines(statemsg), label=label)
659 659 conmsg = _conflictsmsg(repo)
660 660 if conmsg:
661 661 fm.plain('%s\n' % conmsg, label=label)
662 662 if helpfulmsg:
663 663 helpmsg = helpfulmsg()
664 664 fm.plain('%s\n' % helpmsg, label=label)
665 665
666 666 def findpossible(cmd, table, strict=False):
667 667 """
668 668 Return cmd -> (aliases, command table entry)
669 669 for each matching command.
670 670 Return debug commands (or their aliases) only if no normal command matches.
671 671 """
672 672 choice = {}
673 673 debugchoice = {}
674 674
675 675 if cmd in table:
676 676 # short-circuit exact matches, "log" alias beats "log|history"
677 677 keys = [cmd]
678 678 else:
679 679 keys = table.keys()
680 680
681 681 allcmds = []
682 682 for e in keys:
683 683 aliases = parsealiases(e)
684 684 allcmds.extend(aliases)
685 685 found = None
686 686 if cmd in aliases:
687 687 found = cmd
688 688 elif not strict:
689 689 for a in aliases:
690 690 if a.startswith(cmd):
691 691 found = a
692 692 break
693 693 if found is not None:
694 694 if aliases[0].startswith("debug") or found.startswith("debug"):
695 695 debugchoice[found] = (aliases, table[e])
696 696 else:
697 697 choice[found] = (aliases, table[e])
698 698
699 699 if not choice and debugchoice:
700 700 choice = debugchoice
701 701
702 702 return choice, allcmds
703 703
704 704 def findcmd(cmd, table, strict=True):
705 705 """Return (aliases, command table entry) for command string."""
706 706 choice, allcmds = findpossible(cmd, table, strict)
707 707
708 708 if cmd in choice:
709 709 return choice[cmd]
710 710
711 711 if len(choice) > 1:
712 712 clist = sorted(choice)
713 713 raise error.AmbiguousCommand(cmd, clist)
714 714
715 715 if choice:
716 716 return list(choice.values())[0]
717 717
718 718 raise error.UnknownCommand(cmd, allcmds)
719 719
720 720 def changebranch(ui, repo, revs, label):
721 721 """ Change the branch name of given revs to label """
722 722
723 723 with repo.wlock(), repo.lock(), repo.transaction('branches'):
724 724 # abort in case of uncommitted merge or dirty wdir
725 725 bailifchanged(repo)
726 726 revs = scmutil.revrange(repo, revs)
727 727 if not revs:
728 728 raise error.Abort("empty revision set")
729 729 roots = repo.revs('roots(%ld)', revs)
730 730 if len(roots) > 1:
731 731 raise error.Abort(_("cannot change branch of non-linear revisions"))
732 732 rewriteutil.precheck(repo, revs, 'change branch of')
733 733
734 734 root = repo[roots.first()]
735 735 rpb = {parent.branch() for parent in root.parents()}
736 736 if label not in rpb and label in repo.branchmap():
737 737 raise error.Abort(_("a branch of the same name already exists"))
738 738
739 739 if repo.revs('obsolete() and %ld', revs):
740 740 raise error.Abort(_("cannot change branch of a obsolete changeset"))
741 741
742 742 # make sure only topological heads
743 743 if repo.revs('heads(%ld) - head()', revs):
744 744 raise error.Abort(_("cannot change branch in middle of a stack"))
745 745
746 746 replacements = {}
747 747 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
748 748 # mercurial.subrepo -> mercurial.cmdutil
749 749 from . import context
750 750 for rev in revs:
751 751 ctx = repo[rev]
752 752 oldbranch = ctx.branch()
753 753 # check if ctx has same branch
754 754 if oldbranch == label:
755 755 continue
756 756
757 757 def filectxfn(repo, newctx, path):
758 758 try:
759 759 return ctx[path]
760 760 except error.ManifestLookupError:
761 761 return None
762 762
763 763 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
764 764 % (hex(ctx.node()), oldbranch, label))
765 765 extra = ctx.extra()
766 766 extra['branch_change'] = hex(ctx.node())
767 767 # While changing branch of set of linear commits, make sure that
768 768 # we base our commits on new parent rather than old parent which
769 769 # was obsoleted while changing the branch
770 770 p1 = ctx.p1().node()
771 771 p2 = ctx.p2().node()
772 772 if p1 in replacements:
773 773 p1 = replacements[p1][0]
774 774 if p2 in replacements:
775 775 p2 = replacements[p2][0]
776 776
777 777 mc = context.memctx(repo, (p1, p2),
778 778 ctx.description(),
779 779 ctx.files(),
780 780 filectxfn,
781 781 user=ctx.user(),
782 782 date=ctx.date(),
783 783 extra=extra,
784 784 branch=label)
785 785
786 786 newnode = repo.commitctx(mc)
787 787 replacements[ctx.node()] = (newnode,)
788 788 ui.debug('new node id is %s\n' % hex(newnode))
789 789
790 790 # create obsmarkers and move bookmarks
791 791 scmutil.cleanupnodes(repo, replacements, 'branch-change', fixphase=True)
792 792
793 793 # move the working copy too
794 794 wctx = repo[None]
795 795 # in-progress merge is a bit too complex for now.
796 796 if len(wctx.parents()) == 1:
797 797 newid = replacements.get(wctx.p1().node())
798 798 if newid is not None:
799 799 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
800 800 # mercurial.cmdutil
801 801 from . import hg
802 802 hg.update(repo, newid[0], quietempty=True)
803 803
804 804 ui.status(_("changed branch on %d changesets\n") % len(replacements))
805 805
806 806 def findrepo(p):
807 807 while not os.path.isdir(os.path.join(p, ".hg")):
808 808 oldp, p = p, os.path.dirname(p)
809 809 if p == oldp:
810 810 return None
811 811
812 812 return p
813 813
814 814 def bailifchanged(repo, merge=True, hint=None):
815 815 """ enforce the precondition that working directory must be clean.
816 816
817 817 'merge' can be set to false if a pending uncommitted merge should be
818 818 ignored (such as when 'update --check' runs).
819 819
820 820 'hint' is the usual hint given to Abort exception.
821 821 """
822 822
823 823 if merge and repo.dirstate.p2() != nullid:
824 824 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
825 825 modified, added, removed, deleted = repo.status()[:4]
826 826 if modified or added or removed or deleted:
827 827 raise error.Abort(_('uncommitted changes'), hint=hint)
828 828 ctx = repo[None]
829 829 for s in sorted(ctx.substate):
830 830 ctx.sub(s).bailifchanged(hint=hint)
831 831
832 832 def logmessage(ui, opts):
833 833 """ get the log message according to -m and -l option """
834 834 message = opts.get('message')
835 835 logfile = opts.get('logfile')
836 836
837 837 if message and logfile:
838 838 raise error.Abort(_('options --message and --logfile are mutually '
839 839 'exclusive'))
840 840 if not message and logfile:
841 841 try:
842 842 if isstdiofilename(logfile):
843 843 message = ui.fin.read()
844 844 else:
845 845 message = '\n'.join(util.readfile(logfile).splitlines())
846 846 except IOError as inst:
847 847 raise error.Abort(_("can't read commit message '%s': %s") %
848 848 (logfile, encoding.strtolocal(inst.strerror)))
849 849 return message
850 850
851 851 def mergeeditform(ctxorbool, baseformname):
852 852 """return appropriate editform name (referencing a committemplate)
853 853
854 854 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
855 855 merging is committed.
856 856
857 857 This returns baseformname with '.merge' appended if it is a merge,
858 858 otherwise '.normal' is appended.
859 859 """
860 860 if isinstance(ctxorbool, bool):
861 861 if ctxorbool:
862 862 return baseformname + ".merge"
863 863 elif len(ctxorbool.parents()) > 1:
864 864 return baseformname + ".merge"
865 865
866 866 return baseformname + ".normal"
867 867
868 868 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
869 869 editform='', **opts):
870 870 """get appropriate commit message editor according to '--edit' option
871 871
872 872 'finishdesc' is a function to be called with edited commit message
873 873 (= 'description' of the new changeset) just after editing, but
874 874 before checking empty-ness. It should return actual text to be
875 875 stored into history. This allows to change description before
876 876 storing.
877 877
878 878 'extramsg' is a extra message to be shown in the editor instead of
879 879 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
880 880 is automatically added.
881 881
882 882 'editform' is a dot-separated list of names, to distinguish
883 883 the purpose of commit text editing.
884 884
885 885 'getcommiteditor' returns 'commitforceeditor' regardless of
886 886 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
887 887 they are specific for usage in MQ.
888 888 """
889 889 if edit or finishdesc or extramsg:
890 890 return lambda r, c, s: commitforceeditor(r, c, s,
891 891 finishdesc=finishdesc,
892 892 extramsg=extramsg,
893 893 editform=editform)
894 894 elif editform:
895 895 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
896 896 else:
897 897 return commiteditor
898 898
899 899 def _escapecommandtemplate(tmpl):
900 900 parts = []
901 901 for typ, start, end in templater.scantemplate(tmpl, raw=True):
902 902 if typ == b'string':
903 903 parts.append(stringutil.escapestr(tmpl[start:end]))
904 904 else:
905 905 parts.append(tmpl[start:end])
906 906 return b''.join(parts)
907 907
908 908 def rendercommandtemplate(ui, tmpl, props):
909 909 r"""Expand a literal template 'tmpl' in a way suitable for command line
910 910
911 911 '\' in outermost string is not taken as an escape character because it
912 912 is a directory separator on Windows.
913 913
914 914 >>> from . import ui as uimod
915 915 >>> ui = uimod.ui()
916 916 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
917 917 'c:\\foo'
918 918 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
919 919 'c:{path}'
920 920 """
921 921 if not tmpl:
922 922 return tmpl
923 923 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
924 924 return t.renderdefault(props)
925 925
926 926 def rendertemplate(ctx, tmpl, props=None):
927 927 """Expand a literal template 'tmpl' byte-string against one changeset
928 928
929 929 Each props item must be a stringify-able value or a callable returning
930 930 such value, i.e. no bare list nor dict should be passed.
931 931 """
932 932 repo = ctx.repo()
933 933 tres = formatter.templateresources(repo.ui, repo)
934 934 t = formatter.maketemplater(repo.ui, tmpl, defaults=templatekw.keywords,
935 935 resources=tres)
936 936 mapping = {'ctx': ctx}
937 937 if props:
938 938 mapping.update(props)
939 939 return t.renderdefault(mapping)
940 940
941 941 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
942 942 r"""Convert old-style filename format string to template string
943 943
944 944 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
945 945 'foo-{reporoot|basename}-{seqno}.patch'
946 946 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
947 947 '{rev}{tags % "{tag}"}{node}'
948 948
949 949 '\' in outermost strings has to be escaped because it is a directory
950 950 separator on Windows:
951 951
952 952 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
953 953 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
954 954 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
955 955 '\\\\\\\\foo\\\\bar.patch'
956 956 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
957 957 '\\\\{tags % "{tag}"}'
958 958
959 959 but inner strings follow the template rules (i.e. '\' is taken as an
960 960 escape character):
961 961
962 962 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
963 963 '{"c:\\tmp"}'
964 964 """
965 965 expander = {
966 966 b'H': b'{node}',
967 967 b'R': b'{rev}',
968 968 b'h': b'{node|short}',
969 969 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
970 970 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
971 971 b'%': b'%',
972 972 b'b': b'{reporoot|basename}',
973 973 }
974 974 if total is not None:
975 975 expander[b'N'] = b'{total}'
976 976 if seqno is not None:
977 977 expander[b'n'] = b'{seqno}'
978 978 if total is not None and seqno is not None:
979 979 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
980 980 if pathname is not None:
981 981 expander[b's'] = b'{pathname|basename}'
982 982 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
983 983 expander[b'p'] = b'{pathname}'
984 984
985 985 newname = []
986 986 for typ, start, end in templater.scantemplate(pat, raw=True):
987 987 if typ != b'string':
988 988 newname.append(pat[start:end])
989 989 continue
990 990 i = start
991 991 while i < end:
992 992 n = pat.find(b'%', i, end)
993 993 if n < 0:
994 994 newname.append(stringutil.escapestr(pat[i:end]))
995 995 break
996 996 newname.append(stringutil.escapestr(pat[i:n]))
997 997 if n + 2 > end:
998 998 raise error.Abort(_("incomplete format spec in output "
999 999 "filename"))
1000 1000 c = pat[n + 1:n + 2]
1001 1001 i = n + 2
1002 1002 try:
1003 1003 newname.append(expander[c])
1004 1004 except KeyError:
1005 1005 raise error.Abort(_("invalid format spec '%%%s' in output "
1006 1006 "filename") % c)
1007 1007 return ''.join(newname)
1008 1008
1009 1009 def makefilename(ctx, pat, **props):
1010 1010 if not pat:
1011 1011 return pat
1012 1012 tmpl = _buildfntemplate(pat, **props)
1013 1013 # BUG: alias expansion shouldn't be made against template fragments
1014 1014 # rewritten from %-format strings, but we have no easy way to partially
1015 1015 # disable the expansion.
1016 1016 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1017 1017
1018 1018 def isstdiofilename(pat):
1019 1019 """True if the given pat looks like a filename denoting stdin/stdout"""
1020 1020 return not pat or pat == '-'
1021 1021
1022 1022 class _unclosablefile(object):
1023 1023 def __init__(self, fp):
1024 1024 self._fp = fp
1025 1025
1026 1026 def close(self):
1027 1027 pass
1028 1028
1029 1029 def __iter__(self):
1030 1030 return iter(self._fp)
1031 1031
1032 1032 def __getattr__(self, attr):
1033 1033 return getattr(self._fp, attr)
1034 1034
1035 1035 def __enter__(self):
1036 1036 return self
1037 1037
1038 1038 def __exit__(self, exc_type, exc_value, exc_tb):
1039 1039 pass
1040 1040
1041 1041 def makefileobj(ctx, pat, mode='wb', **props):
1042 1042 writable = mode not in ('r', 'rb')
1043 1043
1044 1044 if isstdiofilename(pat):
1045 1045 repo = ctx.repo()
1046 1046 if writable:
1047 1047 fp = repo.ui.fout
1048 1048 else:
1049 1049 fp = repo.ui.fin
1050 1050 return _unclosablefile(fp)
1051 1051 fn = makefilename(ctx, pat, **props)
1052 1052 return open(fn, mode)
1053 1053
1054 1054 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1055 1055 """opens the changelog, manifest, a filelog or a given revlog"""
1056 1056 cl = opts['changelog']
1057 1057 mf = opts['manifest']
1058 1058 dir = opts['dir']
1059 1059 msg = None
1060 1060 if cl and mf:
1061 1061 msg = _('cannot specify --changelog and --manifest at the same time')
1062 1062 elif cl and dir:
1063 1063 msg = _('cannot specify --changelog and --dir at the same time')
1064 1064 elif cl or mf or dir:
1065 1065 if file_:
1066 1066 msg = _('cannot specify filename with --changelog or --manifest')
1067 1067 elif not repo:
1068 1068 msg = _('cannot specify --changelog or --manifest or --dir '
1069 1069 'without a repository')
1070 1070 if msg:
1071 1071 raise error.Abort(msg)
1072 1072
1073 1073 r = None
1074 1074 if repo:
1075 1075 if cl:
1076 1076 r = repo.unfiltered().changelog
1077 1077 elif dir:
1078 1078 if 'treemanifest' not in repo.requirements:
1079 1079 raise error.Abort(_("--dir can only be used on repos with "
1080 1080 "treemanifest enabled"))
1081 1081 if not dir.endswith('/'):
1082 1082 dir = dir + '/'
1083 1083 dirlog = repo.manifestlog.getstorage(dir)
1084 1084 if len(dirlog):
1085 1085 r = dirlog
1086 1086 elif mf:
1087 1087 r = repo.manifestlog.getstorage(b'')
1088 1088 elif file_:
1089 1089 filelog = repo.file(file_)
1090 1090 if len(filelog):
1091 1091 r = filelog
1092 1092
1093 1093 # Not all storage may be revlogs. If requested, try to return an actual
1094 1094 # revlog instance.
1095 1095 if returnrevlog:
1096 1096 if isinstance(r, revlog.revlog):
1097 1097 pass
1098 1098 elif util.safehasattr(r, '_revlog'):
1099 1099 r = r._revlog
1100 1100 elif r is not None:
1101 1101 raise error.Abort(_('%r does not appear to be a revlog') % r)
1102 1102
1103 1103 if not r:
1104 1104 if not returnrevlog:
1105 1105 raise error.Abort(_('cannot give path to non-revlog'))
1106 1106
1107 1107 if not file_:
1108 1108 raise error.CommandError(cmd, _('invalid arguments'))
1109 1109 if not os.path.isfile(file_):
1110 1110 raise error.Abort(_("revlog '%s' not found") % file_)
1111 1111 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
1112 1112 file_[:-2] + ".i")
1113 1113 return r
1114 1114
1115 1115 def openrevlog(repo, cmd, file_, opts):
1116 1116 """Obtain a revlog backing storage of an item.
1117 1117
1118 1118 This is similar to ``openstorage()`` except it always returns a revlog.
1119 1119
1120 1120 In most cases, a caller cares about the main storage object - not the
1121 1121 revlog backing it. Therefore, this function should only be used by code
1122 1122 that needs to examine low-level revlog implementation details. e.g. debug
1123 1123 commands.
1124 1124 """
1125 1125 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1126 1126
1127 1127 def copy(ui, repo, pats, opts, rename=False):
1128 1128 # called with the repo lock held
1129 1129 #
1130 1130 # hgsep => pathname that uses "/" to separate directories
1131 1131 # ossep => pathname that uses os.sep to separate directories
1132 1132 cwd = repo.getcwd()
1133 1133 targets = {}
1134 1134 after = opts.get("after")
1135 1135 dryrun = opts.get("dry_run")
1136 1136 wctx = repo[None]
1137 1137
1138 1138 def walkpat(pat):
1139 1139 srcs = []
1140 1140 if after:
1141 1141 badstates = '?'
1142 1142 else:
1143 1143 badstates = '?r'
1144 1144 m = scmutil.match(wctx, [pat], opts, globbed=True)
1145 1145 for abs in wctx.walk(m):
1146 1146 state = repo.dirstate[abs]
1147 1147 rel = m.rel(abs)
1148 1148 exact = m.exact(abs)
1149 1149 if state in badstates:
1150 1150 if exact and state == '?':
1151 1151 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1152 1152 if exact and state == 'r':
1153 1153 ui.warn(_('%s: not copying - file has been marked for'
1154 1154 ' remove\n') % rel)
1155 1155 continue
1156 1156 # abs: hgsep
1157 1157 # rel: ossep
1158 1158 srcs.append((abs, rel, exact))
1159 1159 return srcs
1160 1160
1161 1161 # abssrc: hgsep
1162 1162 # relsrc: ossep
1163 1163 # otarget: ossep
1164 1164 def copyfile(abssrc, relsrc, otarget, exact):
1165 1165 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1166 1166 if '/' in abstarget:
1167 1167 # We cannot normalize abstarget itself, this would prevent
1168 1168 # case only renames, like a => A.
1169 1169 abspath, absname = abstarget.rsplit('/', 1)
1170 1170 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1171 1171 reltarget = repo.pathto(abstarget, cwd)
1172 1172 target = repo.wjoin(abstarget)
1173 1173 src = repo.wjoin(abssrc)
1174 1174 state = repo.dirstate[abstarget]
1175 1175
1176 1176 scmutil.checkportable(ui, abstarget)
1177 1177
1178 1178 # check for collisions
1179 1179 prevsrc = targets.get(abstarget)
1180 1180 if prevsrc is not None:
1181 1181 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1182 1182 (reltarget, repo.pathto(abssrc, cwd),
1183 1183 repo.pathto(prevsrc, cwd)))
1184 1184 return True # report a failure
1185 1185
1186 1186 # check for overwrites
1187 1187 exists = os.path.lexists(target)
1188 1188 samefile = False
1189 1189 if exists and abssrc != abstarget:
1190 1190 if (repo.dirstate.normalize(abssrc) ==
1191 1191 repo.dirstate.normalize(abstarget)):
1192 1192 if not rename:
1193 1193 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1194 1194 return True # report a failure
1195 1195 exists = False
1196 1196 samefile = True
1197 1197
1198 1198 if not after and exists or after and state in 'mn':
1199 1199 if not opts['force']:
1200 1200 if state in 'mn':
1201 1201 msg = _('%s: not overwriting - file already committed\n')
1202 1202 if after:
1203 1203 flags = '--after --force'
1204 1204 else:
1205 1205 flags = '--force'
1206 1206 if rename:
1207 1207 hint = _("('hg rename %s' to replace the file by "
1208 1208 'recording a rename)\n') % flags
1209 1209 else:
1210 1210 hint = _("('hg copy %s' to replace the file by "
1211 1211 'recording a copy)\n') % flags
1212 1212 else:
1213 1213 msg = _('%s: not overwriting - file exists\n')
1214 1214 if rename:
1215 1215 hint = _("('hg rename --after' to record the rename)\n")
1216 1216 else:
1217 1217 hint = _("('hg copy --after' to record the copy)\n")
1218 1218 ui.warn(msg % reltarget)
1219 1219 ui.warn(hint)
1220 1220 return True # report a failure
1221 1221
1222 1222 if after:
1223 1223 if not exists:
1224 1224 if rename:
1225 1225 ui.warn(_('%s: not recording move - %s does not exist\n') %
1226 1226 (relsrc, reltarget))
1227 1227 else:
1228 1228 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1229 1229 (relsrc, reltarget))
1230 1230 return True # report a failure
1231 1231 elif not dryrun:
1232 1232 try:
1233 1233 if exists:
1234 1234 os.unlink(target)
1235 1235 targetdir = os.path.dirname(target) or '.'
1236 1236 if not os.path.isdir(targetdir):
1237 1237 os.makedirs(targetdir)
1238 1238 if samefile:
1239 1239 tmp = target + "~hgrename"
1240 1240 os.rename(src, tmp)
1241 1241 os.rename(tmp, target)
1242 1242 else:
1243 1243 # Preserve stat info on renames, not on copies; this matches
1244 1244 # Linux CLI behavior.
1245 1245 util.copyfile(src, target, copystat=rename)
1246 1246 srcexists = True
1247 1247 except IOError as inst:
1248 1248 if inst.errno == errno.ENOENT:
1249 1249 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1250 1250 srcexists = False
1251 1251 else:
1252 1252 ui.warn(_('%s: cannot copy - %s\n') %
1253 1253 (relsrc, encoding.strtolocal(inst.strerror)))
1254 1254 return True # report a failure
1255 1255
1256 1256 if ui.verbose or not exact:
1257 1257 if rename:
1258 1258 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1259 1259 else:
1260 1260 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1261 1261
1262 1262 targets[abstarget] = abssrc
1263 1263
1264 1264 # fix up dirstate
1265 1265 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1266 1266 dryrun=dryrun, cwd=cwd)
1267 1267 if rename and not dryrun:
1268 1268 if not after and srcexists and not samefile:
1269 1269 rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
1270 1270 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1271 1271 wctx.forget([abssrc])
1272 1272
1273 1273 # pat: ossep
1274 1274 # dest ossep
1275 1275 # srcs: list of (hgsep, hgsep, ossep, bool)
1276 1276 # return: function that takes hgsep and returns ossep
1277 1277 def targetpathfn(pat, dest, srcs):
1278 1278 if os.path.isdir(pat):
1279 1279 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1280 1280 abspfx = util.localpath(abspfx)
1281 1281 if destdirexists:
1282 1282 striplen = len(os.path.split(abspfx)[0])
1283 1283 else:
1284 1284 striplen = len(abspfx)
1285 1285 if striplen:
1286 1286 striplen += len(pycompat.ossep)
1287 1287 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1288 1288 elif destdirexists:
1289 1289 res = lambda p: os.path.join(dest,
1290 1290 os.path.basename(util.localpath(p)))
1291 1291 else:
1292 1292 res = lambda p: dest
1293 1293 return res
1294 1294
1295 1295 # pat: ossep
1296 1296 # dest ossep
1297 1297 # srcs: list of (hgsep, hgsep, ossep, bool)
1298 1298 # return: function that takes hgsep and returns ossep
1299 1299 def targetpathafterfn(pat, dest, srcs):
1300 1300 if matchmod.patkind(pat):
1301 1301 # a mercurial pattern
1302 1302 res = lambda p: os.path.join(dest,
1303 1303 os.path.basename(util.localpath(p)))
1304 1304 else:
1305 1305 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1306 1306 if len(abspfx) < len(srcs[0][0]):
1307 1307 # A directory. Either the target path contains the last
1308 1308 # component of the source path or it does not.
1309 1309 def evalpath(striplen):
1310 1310 score = 0
1311 1311 for s in srcs:
1312 1312 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1313 1313 if os.path.lexists(t):
1314 1314 score += 1
1315 1315 return score
1316 1316
1317 1317 abspfx = util.localpath(abspfx)
1318 1318 striplen = len(abspfx)
1319 1319 if striplen:
1320 1320 striplen += len(pycompat.ossep)
1321 1321 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1322 1322 score = evalpath(striplen)
1323 1323 striplen1 = len(os.path.split(abspfx)[0])
1324 1324 if striplen1:
1325 1325 striplen1 += len(pycompat.ossep)
1326 1326 if evalpath(striplen1) > score:
1327 1327 striplen = striplen1
1328 1328 res = lambda p: os.path.join(dest,
1329 1329 util.localpath(p)[striplen:])
1330 1330 else:
1331 1331 # a file
1332 1332 if destdirexists:
1333 1333 res = lambda p: os.path.join(dest,
1334 1334 os.path.basename(util.localpath(p)))
1335 1335 else:
1336 1336 res = lambda p: dest
1337 1337 return res
1338 1338
1339 1339 pats = scmutil.expandpats(pats)
1340 1340 if not pats:
1341 1341 raise error.Abort(_('no source or destination specified'))
1342 1342 if len(pats) == 1:
1343 1343 raise error.Abort(_('no destination specified'))
1344 1344 dest = pats.pop()
1345 1345 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1346 1346 if not destdirexists:
1347 1347 if len(pats) > 1 or matchmod.patkind(pats[0]):
1348 1348 raise error.Abort(_('with multiple sources, destination must be an '
1349 1349 'existing directory'))
1350 1350 if util.endswithsep(dest):
1351 1351 raise error.Abort(_('destination %s is not a directory') % dest)
1352 1352
1353 1353 tfn = targetpathfn
1354 1354 if after:
1355 1355 tfn = targetpathafterfn
1356 1356 copylist = []
1357 1357 for pat in pats:
1358 1358 srcs = walkpat(pat)
1359 1359 if not srcs:
1360 1360 continue
1361 1361 copylist.append((tfn(pat, dest, srcs), srcs))
1362 1362 if not copylist:
1363 1363 raise error.Abort(_('no files to copy'))
1364 1364
1365 1365 errors = 0
1366 1366 for targetpath, srcs in copylist:
1367 1367 for abssrc, relsrc, exact in srcs:
1368 1368 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1369 1369 errors += 1
1370 1370
1371 1371 return errors != 0
1372 1372
1373 1373 ## facility to let extension process additional data into an import patch
1374 1374 # list of identifier to be executed in order
1375 1375 extrapreimport = [] # run before commit
1376 1376 extrapostimport = [] # run after commit
1377 1377 # mapping from identifier to actual import function
1378 1378 #
1379 1379 # 'preimport' are run before the commit is made and are provided the following
1380 1380 # arguments:
1381 1381 # - repo: the localrepository instance,
1382 1382 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1383 1383 # - extra: the future extra dictionary of the changeset, please mutate it,
1384 1384 # - opts: the import options.
1385 1385 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1386 1386 # mutation of in memory commit and more. Feel free to rework the code to get
1387 1387 # there.
1388 1388 extrapreimportmap = {}
1389 1389 # 'postimport' are run after the commit is made and are provided the following
1390 1390 # argument:
1391 1391 # - ctx: the changectx created by import.
1392 1392 extrapostimportmap = {}
1393 1393
1394 1394 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1395 1395 """Utility function used by commands.import to import a single patch
1396 1396
1397 1397 This function is explicitly defined here to help the evolve extension to
1398 1398 wrap this part of the import logic.
1399 1399
1400 1400 The API is currently a bit ugly because it a simple code translation from
1401 1401 the import command. Feel free to make it better.
1402 1402
1403 1403 :patchdata: a dictionary containing parsed patch data (such as from
1404 1404 ``patch.extract()``)
1405 1405 :parents: nodes that will be parent of the created commit
1406 1406 :opts: the full dict of option passed to the import command
1407 1407 :msgs: list to save commit message to.
1408 1408 (used in case we need to save it when failing)
1409 1409 :updatefunc: a function that update a repo to a given node
1410 1410 updatefunc(<repo>, <node>)
1411 1411 """
1412 1412 # avoid cycle context -> subrepo -> cmdutil
1413 1413 from . import context
1414 1414
1415 1415 tmpname = patchdata.get('filename')
1416 1416 message = patchdata.get('message')
1417 1417 user = opts.get('user') or patchdata.get('user')
1418 1418 date = opts.get('date') or patchdata.get('date')
1419 1419 branch = patchdata.get('branch')
1420 1420 nodeid = patchdata.get('nodeid')
1421 1421 p1 = patchdata.get('p1')
1422 1422 p2 = patchdata.get('p2')
1423 1423
1424 1424 nocommit = opts.get('no_commit')
1425 1425 importbranch = opts.get('import_branch')
1426 1426 update = not opts.get('bypass')
1427 1427 strip = opts["strip"]
1428 1428 prefix = opts["prefix"]
1429 1429 sim = float(opts.get('similarity') or 0)
1430 1430
1431 1431 if not tmpname:
1432 1432 return None, None, False
1433 1433
1434 1434 rejects = False
1435 1435
1436 1436 cmdline_message = logmessage(ui, opts)
1437 1437 if cmdline_message:
1438 1438 # pickup the cmdline msg
1439 1439 message = cmdline_message
1440 1440 elif message:
1441 1441 # pickup the patch msg
1442 1442 message = message.strip()
1443 1443 else:
1444 1444 # launch the editor
1445 1445 message = None
1446 1446 ui.debug('message:\n%s\n' % (message or ''))
1447 1447
1448 1448 if len(parents) == 1:
1449 1449 parents.append(repo[nullid])
1450 1450 if opts.get('exact'):
1451 1451 if not nodeid or not p1:
1452 1452 raise error.Abort(_('not a Mercurial patch'))
1453 1453 p1 = repo[p1]
1454 1454 p2 = repo[p2 or nullid]
1455 1455 elif p2:
1456 1456 try:
1457 1457 p1 = repo[p1]
1458 1458 p2 = repo[p2]
1459 1459 # Without any options, consider p2 only if the
1460 1460 # patch is being applied on top of the recorded
1461 1461 # first parent.
1462 1462 if p1 != parents[0]:
1463 1463 p1 = parents[0]
1464 1464 p2 = repo[nullid]
1465 1465 except error.RepoError:
1466 1466 p1, p2 = parents
1467 1467 if p2.node() == nullid:
1468 1468 ui.warn(_("warning: import the patch as a normal revision\n"
1469 1469 "(use --exact to import the patch as a merge)\n"))
1470 1470 else:
1471 1471 p1, p2 = parents
1472 1472
1473 1473 n = None
1474 1474 if update:
1475 1475 if p1 != parents[0]:
1476 1476 updatefunc(repo, p1.node())
1477 1477 if p2 != parents[1]:
1478 1478 repo.setparents(p1.node(), p2.node())
1479 1479
1480 1480 if opts.get('exact') or importbranch:
1481 1481 repo.dirstate.setbranch(branch or 'default')
1482 1482
1483 1483 partial = opts.get('partial', False)
1484 1484 files = set()
1485 1485 try:
1486 1486 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1487 1487 files=files, eolmode=None, similarity=sim / 100.0)
1488 1488 except error.PatchError as e:
1489 1489 if not partial:
1490 1490 raise error.Abort(pycompat.bytestr(e))
1491 1491 if partial:
1492 1492 rejects = True
1493 1493
1494 1494 files = list(files)
1495 1495 if nocommit:
1496 1496 if message:
1497 1497 msgs.append(message)
1498 1498 else:
1499 1499 if opts.get('exact') or p2:
1500 1500 # If you got here, you either use --force and know what
1501 1501 # you are doing or used --exact or a merge patch while
1502 1502 # being updated to its first parent.
1503 1503 m = None
1504 1504 else:
1505 1505 m = scmutil.matchfiles(repo, files or [])
1506 1506 editform = mergeeditform(repo[None], 'import.normal')
1507 1507 if opts.get('exact'):
1508 1508 editor = None
1509 1509 else:
1510 1510 editor = getcommiteditor(editform=editform,
1511 1511 **pycompat.strkwargs(opts))
1512 1512 extra = {}
1513 1513 for idfunc in extrapreimport:
1514 1514 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1515 1515 overrides = {}
1516 1516 if partial:
1517 1517 overrides[('ui', 'allowemptycommit')] = True
1518 1518 with repo.ui.configoverride(overrides, 'import'):
1519 1519 n = repo.commit(message, user,
1520 1520 date, match=m,
1521 1521 editor=editor, extra=extra)
1522 1522 for idfunc in extrapostimport:
1523 1523 extrapostimportmap[idfunc](repo[n])
1524 1524 else:
1525 1525 if opts.get('exact') or importbranch:
1526 1526 branch = branch or 'default'
1527 1527 else:
1528 1528 branch = p1.branch()
1529 1529 store = patch.filestore()
1530 1530 try:
1531 1531 files = set()
1532 1532 try:
1533 1533 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1534 1534 files, eolmode=None)
1535 1535 except error.PatchError as e:
1536 1536 raise error.Abort(stringutil.forcebytestr(e))
1537 1537 if opts.get('exact'):
1538 1538 editor = None
1539 1539 else:
1540 1540 editor = getcommiteditor(editform='import.bypass')
1541 1541 memctx = context.memctx(repo, (p1.node(), p2.node()),
1542 1542 message,
1543 1543 files=files,
1544 1544 filectxfn=store,
1545 1545 user=user,
1546 1546 date=date,
1547 1547 branch=branch,
1548 1548 editor=editor)
1549 1549 n = memctx.commit()
1550 1550 finally:
1551 1551 store.close()
1552 1552 if opts.get('exact') and nocommit:
1553 1553 # --exact with --no-commit is still useful in that it does merge
1554 1554 # and branch bits
1555 1555 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1556 1556 elif opts.get('exact') and (not n or hex(n) != nodeid):
1557 1557 raise error.Abort(_('patch is damaged or loses information'))
1558 1558 msg = _('applied to working directory')
1559 1559 if n:
1560 1560 # i18n: refers to a short changeset id
1561 1561 msg = _('created %s') % short(n)
1562 1562 return msg, n, rejects
1563 1563
1564 1564 # facility to let extensions include additional data in an exported patch
1565 1565 # list of identifiers to be executed in order
1566 1566 extraexport = []
1567 1567 # mapping from identifier to actual export function
1568 1568 # function as to return a string to be added to the header or None
1569 1569 # it is given two arguments (sequencenumber, changectx)
1570 1570 extraexportmap = {}
1571 1571
1572 1572 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
1573 1573 node = scmutil.binnode(ctx)
1574 1574 parents = [p.node() for p in ctx.parents() if p]
1575 1575 branch = ctx.branch()
1576 1576 if switch_parent:
1577 1577 parents.reverse()
1578 1578
1579 1579 if parents:
1580 1580 prev = parents[0]
1581 1581 else:
1582 1582 prev = nullid
1583 1583
1584 1584 fm.context(ctx=ctx)
1585 1585 fm.plain('# HG changeset patch\n')
1586 1586 fm.write('user', '# User %s\n', ctx.user())
1587 1587 fm.plain('# Date %d %d\n' % ctx.date())
1588 1588 fm.write('date', '# %s\n', fm.formatdate(ctx.date()))
1589 1589 fm.condwrite(branch and branch != 'default',
1590 1590 'branch', '# Branch %s\n', branch)
1591 1591 fm.write('node', '# Node ID %s\n', hex(node))
1592 1592 fm.plain('# Parent %s\n' % hex(prev))
1593 1593 if len(parents) > 1:
1594 1594 fm.plain('# Parent %s\n' % hex(parents[1]))
1595 1595 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name='node'))
1596 1596
1597 1597 # TODO: redesign extraexportmap function to support formatter
1598 1598 for headerid in extraexport:
1599 1599 header = extraexportmap[headerid](seqno, ctx)
1600 1600 if header is not None:
1601 1601 fm.plain('# %s\n' % header)
1602 1602
1603 1603 fm.write('desc', '%s\n', ctx.description().rstrip())
1604 1604 fm.plain('\n')
1605 1605
1606 1606 if fm.isplain():
1607 1607 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
1608 1608 for chunk, label in chunkiter:
1609 1609 fm.plain(chunk, label=label)
1610 1610 else:
1611 1611 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
1612 1612 # TODO: make it structured?
1613 1613 fm.data(diff=b''.join(chunkiter))
1614 1614
1615 1615 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
1616 1616 """Export changesets to stdout or a single file"""
1617 1617 for seqno, rev in enumerate(revs, 1):
1618 1618 ctx = repo[rev]
1619 1619 if not dest.startswith('<'):
1620 1620 repo.ui.note("%s\n" % dest)
1621 1621 fm.startitem()
1622 1622 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
1623 1623
1624 1624 def _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, diffopts,
1625 1625 match):
1626 1626 """Export changesets to possibly multiple files"""
1627 1627 total = len(revs)
1628 1628 revwidth = max(len(str(rev)) for rev in revs)
1629 1629 filemap = util.sortdict() # filename: [(seqno, rev), ...]
1630 1630
1631 1631 for seqno, rev in enumerate(revs, 1):
1632 1632 ctx = repo[rev]
1633 1633 dest = makefilename(ctx, fntemplate,
1634 1634 total=total, seqno=seqno, revwidth=revwidth)
1635 1635 filemap.setdefault(dest, []).append((seqno, rev))
1636 1636
1637 1637 for dest in filemap:
1638 1638 with formatter.maybereopen(basefm, dest) as fm:
1639 1639 repo.ui.note("%s\n" % dest)
1640 1640 for seqno, rev in filemap[dest]:
1641 1641 fm.startitem()
1642 1642 ctx = repo[rev]
1643 1643 _exportsingle(repo, ctx, fm, match, switch_parent, seqno,
1644 1644 diffopts)
1645 1645
1646 1646 def export(repo, revs, basefm, fntemplate='hg-%h.patch', switch_parent=False,
1647 1647 opts=None, match=None):
1648 1648 '''export changesets as hg patches
1649 1649
1650 1650 Args:
1651 1651 repo: The repository from which we're exporting revisions.
1652 1652 revs: A list of revisions to export as revision numbers.
1653 1653 basefm: A formatter to which patches should be written.
1654 1654 fntemplate: An optional string to use for generating patch file names.
1655 1655 switch_parent: If True, show diffs against second parent when not nullid.
1656 1656 Default is false, which always shows diff against p1.
1657 1657 opts: diff options to use for generating the patch.
1658 1658 match: If specified, only export changes to files matching this matcher.
1659 1659
1660 1660 Returns:
1661 1661 Nothing.
1662 1662
1663 1663 Side Effect:
1664 1664 "HG Changeset Patch" data is emitted to one of the following
1665 1665 destinations:
1666 1666 fntemplate specified: Each rev is written to a unique file named using
1667 1667 the given template.
1668 1668 Otherwise: All revs will be written to basefm.
1669 1669 '''
1670 1670 scmutil.prefetchfiles(repo, revs, match)
1671 1671
1672 1672 if not fntemplate:
1673 1673 _exportfile(repo, revs, basefm, '<unnamed>', switch_parent, opts, match)
1674 1674 else:
1675 1675 _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, opts,
1676 1676 match)
1677 1677
1678 1678 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
1679 1679 """Export changesets to the given file stream"""
1680 1680 scmutil.prefetchfiles(repo, revs, match)
1681 1681
1682 1682 dest = getattr(fp, 'name', '<unnamed>')
1683 1683 with formatter.formatter(repo.ui, fp, 'export', {}) as fm:
1684 1684 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
1685 1685
1686 1686 def showmarker(fm, marker, index=None):
1687 1687 """utility function to display obsolescence marker in a readable way
1688 1688
1689 1689 To be used by debug function."""
1690 1690 if index is not None:
1691 1691 fm.write('index', '%i ', index)
1692 1692 fm.write('prednode', '%s ', hex(marker.prednode()))
1693 1693 succs = marker.succnodes()
1694 1694 fm.condwrite(succs, 'succnodes', '%s ',
1695 1695 fm.formatlist(map(hex, succs), name='node'))
1696 1696 fm.write('flag', '%X ', marker.flags())
1697 1697 parents = marker.parentnodes()
1698 1698 if parents is not None:
1699 1699 fm.write('parentnodes', '{%s} ',
1700 1700 fm.formatlist(map(hex, parents), name='node', sep=', '))
1701 1701 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1702 1702 meta = marker.metadata().copy()
1703 1703 meta.pop('date', None)
1704 1704 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
1705 1705 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1706 1706 fm.plain('\n')
1707 1707
1708 1708 def finddate(ui, repo, date):
1709 1709 """Find the tipmost changeset that matches the given date spec"""
1710 1710
1711 1711 df = dateutil.matchdate(date)
1712 1712 m = scmutil.matchall(repo)
1713 1713 results = {}
1714 1714
1715 1715 def prep(ctx, fns):
1716 1716 d = ctx.date()
1717 1717 if df(d[0]):
1718 1718 results[ctx.rev()] = d
1719 1719
1720 1720 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1721 1721 rev = ctx.rev()
1722 1722 if rev in results:
1723 1723 ui.status(_("found revision %s from %s\n") %
1724 1724 (rev, dateutil.datestr(results[rev])))
1725 1725 return '%d' % rev
1726 1726
1727 1727 raise error.Abort(_("revision matching date not found"))
1728 1728
1729 1729 def increasingwindows(windowsize=8, sizelimit=512):
1730 1730 while True:
1731 1731 yield windowsize
1732 1732 if windowsize < sizelimit:
1733 1733 windowsize *= 2
1734 1734
1735 1735 def _walkrevs(repo, opts):
1736 1736 # Default --rev value depends on --follow but --follow behavior
1737 1737 # depends on revisions resolved from --rev...
1738 1738 follow = opts.get('follow') or opts.get('follow_first')
1739 1739 if opts.get('rev'):
1740 1740 revs = scmutil.revrange(repo, opts['rev'])
1741 1741 elif follow and repo.dirstate.p1() == nullid:
1742 1742 revs = smartset.baseset()
1743 1743 elif follow:
1744 1744 revs = repo.revs('reverse(:.)')
1745 1745 else:
1746 1746 revs = smartset.spanset(repo)
1747 1747 revs.reverse()
1748 1748 return revs
1749 1749
1750 1750 class FileWalkError(Exception):
1751 1751 pass
1752 1752
1753 1753 def walkfilerevs(repo, match, follow, revs, fncache):
1754 1754 '''Walks the file history for the matched files.
1755 1755
1756 1756 Returns the changeset revs that are involved in the file history.
1757 1757
1758 1758 Throws FileWalkError if the file history can't be walked using
1759 1759 filelogs alone.
1760 1760 '''
1761 1761 wanted = set()
1762 1762 copies = []
1763 1763 minrev, maxrev = min(revs), max(revs)
1764 1764 def filerevgen(filelog, last):
1765 1765 """
1766 1766 Only files, no patterns. Check the history of each file.
1767 1767
1768 1768 Examines filelog entries within minrev, maxrev linkrev range
1769 1769 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1770 1770 tuples in backwards order
1771 1771 """
1772 1772 cl_count = len(repo)
1773 1773 revs = []
1774 1774 for j in pycompat.xrange(0, last + 1):
1775 1775 linkrev = filelog.linkrev(j)
1776 1776 if linkrev < minrev:
1777 1777 continue
1778 1778 # only yield rev for which we have the changelog, it can
1779 1779 # happen while doing "hg log" during a pull or commit
1780 1780 if linkrev >= cl_count:
1781 1781 break
1782 1782
1783 1783 parentlinkrevs = []
1784 1784 for p in filelog.parentrevs(j):
1785 1785 if p != nullrev:
1786 1786 parentlinkrevs.append(filelog.linkrev(p))
1787 1787 n = filelog.node(j)
1788 1788 revs.append((linkrev, parentlinkrevs,
1789 1789 follow and filelog.renamed(n)))
1790 1790
1791 1791 return reversed(revs)
1792 1792 def iterfiles():
1793 1793 pctx = repo['.']
1794 1794 for filename in match.files():
1795 1795 if follow:
1796 1796 if filename not in pctx:
1797 1797 raise error.Abort(_('cannot follow file not in parent '
1798 1798 'revision: "%s"') % filename)
1799 1799 yield filename, pctx[filename].filenode()
1800 1800 else:
1801 1801 yield filename, None
1802 1802 for filename_node in copies:
1803 1803 yield filename_node
1804 1804
1805 1805 for file_, node in iterfiles():
1806 1806 filelog = repo.file(file_)
1807 1807 if not len(filelog):
1808 1808 if node is None:
1809 1809 # A zero count may be a directory or deleted file, so
1810 1810 # try to find matching entries on the slow path.
1811 1811 if follow:
1812 1812 raise error.Abort(
1813 1813 _('cannot follow nonexistent file: "%s"') % file_)
1814 1814 raise FileWalkError("Cannot walk via filelog")
1815 1815 else:
1816 1816 continue
1817 1817
1818 1818 if node is None:
1819 1819 last = len(filelog) - 1
1820 1820 else:
1821 1821 last = filelog.rev(node)
1822 1822
1823 1823 # keep track of all ancestors of the file
1824 1824 ancestors = {filelog.linkrev(last)}
1825 1825
1826 1826 # iterate from latest to oldest revision
1827 1827 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1828 1828 if not follow:
1829 1829 if rev > maxrev:
1830 1830 continue
1831 1831 else:
1832 1832 # Note that last might not be the first interesting
1833 1833 # rev to us:
1834 1834 # if the file has been changed after maxrev, we'll
1835 1835 # have linkrev(last) > maxrev, and we still need
1836 1836 # to explore the file graph
1837 1837 if rev not in ancestors:
1838 1838 continue
1839 1839 # XXX insert 1327 fix here
1840 1840 if flparentlinkrevs:
1841 1841 ancestors.update(flparentlinkrevs)
1842 1842
1843 1843 fncache.setdefault(rev, []).append(file_)
1844 1844 wanted.add(rev)
1845 1845 if copied:
1846 1846 copies.append(copied)
1847 1847
1848 1848 return wanted
1849 1849
1850 1850 class _followfilter(object):
1851 1851 def __init__(self, repo, onlyfirst=False):
1852 1852 self.repo = repo
1853 1853 self.startrev = nullrev
1854 1854 self.roots = set()
1855 1855 self.onlyfirst = onlyfirst
1856 1856
1857 1857 def match(self, rev):
1858 1858 def realparents(rev):
1859 1859 if self.onlyfirst:
1860 1860 return self.repo.changelog.parentrevs(rev)[0:1]
1861 1861 else:
1862 1862 return filter(lambda x: x != nullrev,
1863 1863 self.repo.changelog.parentrevs(rev))
1864 1864
1865 1865 if self.startrev == nullrev:
1866 1866 self.startrev = rev
1867 1867 return True
1868 1868
1869 1869 if rev > self.startrev:
1870 1870 # forward: all descendants
1871 1871 if not self.roots:
1872 1872 self.roots.add(self.startrev)
1873 1873 for parent in realparents(rev):
1874 1874 if parent in self.roots:
1875 1875 self.roots.add(rev)
1876 1876 return True
1877 1877 else:
1878 1878 # backwards: all parents
1879 1879 if not self.roots:
1880 1880 self.roots.update(realparents(self.startrev))
1881 1881 if rev in self.roots:
1882 1882 self.roots.remove(rev)
1883 1883 self.roots.update(realparents(rev))
1884 1884 return True
1885 1885
1886 1886 return False
1887 1887
1888 1888 def walkchangerevs(repo, match, opts, prepare):
1889 1889 '''Iterate over files and the revs in which they changed.
1890 1890
1891 1891 Callers most commonly need to iterate backwards over the history
1892 1892 in which they are interested. Doing so has awful (quadratic-looking)
1893 1893 performance, so we use iterators in a "windowed" way.
1894 1894
1895 1895 We walk a window of revisions in the desired order. Within the
1896 1896 window, we first walk forwards to gather data, then in the desired
1897 1897 order (usually backwards) to display it.
1898 1898
1899 1899 This function returns an iterator yielding contexts. Before
1900 1900 yielding each context, the iterator will first call the prepare
1901 1901 function on each context in the window in forward order.'''
1902 1902
1903 1903 allfiles = opts.get('all_files')
1904 1904 follow = opts.get('follow') or opts.get('follow_first')
1905 1905 revs = _walkrevs(repo, opts)
1906 1906 if not revs:
1907 1907 return []
1908 1908 wanted = set()
1909 1909 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1910 1910 fncache = {}
1911 1911 change = repo.__getitem__
1912 1912
1913 1913 # First step is to fill wanted, the set of revisions that we want to yield.
1914 1914 # When it does not induce extra cost, we also fill fncache for revisions in
1915 1915 # wanted: a cache of filenames that were changed (ctx.files()) and that
1916 1916 # match the file filtering conditions.
1917 1917
1918 1918 if match.always() or allfiles:
1919 1919 # No files, no patterns. Display all revs.
1920 1920 wanted = revs
1921 1921 elif not slowpath:
1922 1922 # We only have to read through the filelog to find wanted revisions
1923 1923
1924 1924 try:
1925 1925 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1926 1926 except FileWalkError:
1927 1927 slowpath = True
1928 1928
1929 1929 # We decided to fall back to the slowpath because at least one
1930 1930 # of the paths was not a file. Check to see if at least one of them
1931 1931 # existed in history, otherwise simply return
1932 1932 for path in match.files():
1933 1933 if path == '.' or path in repo.store:
1934 1934 break
1935 1935 else:
1936 1936 return []
1937 1937
1938 1938 if slowpath:
1939 1939 # We have to read the changelog to match filenames against
1940 1940 # changed files
1941 1941
1942 1942 if follow:
1943 1943 raise error.Abort(_('can only follow copies/renames for explicit '
1944 1944 'filenames'))
1945 1945
1946 1946 # The slow path checks files modified in every changeset.
1947 1947 # This is really slow on large repos, so compute the set lazily.
1948 1948 class lazywantedset(object):
1949 1949 def __init__(self):
1950 1950 self.set = set()
1951 1951 self.revs = set(revs)
1952 1952
1953 1953 # No need to worry about locality here because it will be accessed
1954 1954 # in the same order as the increasing window below.
1955 1955 def __contains__(self, value):
1956 1956 if value in self.set:
1957 1957 return True
1958 1958 elif not value in self.revs:
1959 1959 return False
1960 1960 else:
1961 1961 self.revs.discard(value)
1962 1962 ctx = change(value)
1963 1963 matches = [f for f in ctx.files() if match(f)]
1964 1964 if matches:
1965 1965 fncache[value] = matches
1966 1966 self.set.add(value)
1967 1967 return True
1968 1968 return False
1969 1969
1970 1970 def discard(self, value):
1971 1971 self.revs.discard(value)
1972 1972 self.set.discard(value)
1973 1973
1974 1974 wanted = lazywantedset()
1975 1975
1976 1976 # it might be worthwhile to do this in the iterator if the rev range
1977 1977 # is descending and the prune args are all within that range
1978 1978 for rev in opts.get('prune', ()):
1979 1979 rev = repo[rev].rev()
1980 1980 ff = _followfilter(repo)
1981 1981 stop = min(revs[0], revs[-1])
1982 1982 for x in pycompat.xrange(rev, stop - 1, -1):
1983 1983 if ff.match(x):
1984 1984 wanted = wanted - [x]
1985 1985
1986 1986 # Now that wanted is correctly initialized, we can iterate over the
1987 1987 # revision range, yielding only revisions in wanted.
1988 1988 def iterate():
1989 1989 if follow and match.always():
1990 1990 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1991 1991 def want(rev):
1992 1992 return ff.match(rev) and rev in wanted
1993 1993 else:
1994 1994 def want(rev):
1995 1995 return rev in wanted
1996 1996
1997 1997 it = iter(revs)
1998 1998 stopiteration = False
1999 1999 for windowsize in increasingwindows():
2000 2000 nrevs = []
2001 2001 for i in pycompat.xrange(windowsize):
2002 2002 rev = next(it, None)
2003 2003 if rev is None:
2004 2004 stopiteration = True
2005 2005 break
2006 2006 elif want(rev):
2007 2007 nrevs.append(rev)
2008 2008 for rev in sorted(nrevs):
2009 2009 fns = fncache.get(rev)
2010 2010 ctx = change(rev)
2011 2011 if not fns:
2012 2012 def fns_generator():
2013 2013 if allfiles:
2014 2014 fiter = iter(ctx)
2015 2015 else:
2016 2016 fiter = ctx.files()
2017 2017 for f in fiter:
2018 2018 if match(f):
2019 2019 yield f
2020 2020 fns = fns_generator()
2021 2021 prepare(ctx, fns)
2022 2022 for rev in nrevs:
2023 2023 yield change(rev)
2024 2024
2025 2025 if stopiteration:
2026 2026 break
2027 2027
2028 2028 return iterate()
2029 2029
2030 2030 def add(ui, repo, match, prefix, explicitonly, **opts):
2031 2031 join = lambda f: os.path.join(prefix, f)
2032 2032 bad = []
2033 2033
2034 2034 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2035 2035 names = []
2036 2036 wctx = repo[None]
2037 2037 cca = None
2038 2038 abort, warn = scmutil.checkportabilityalert(ui)
2039 2039 if abort or warn:
2040 2040 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2041 2041
2042 2042 match = repo.narrowmatch(match, includeexact=True)
2043 2043 badmatch = matchmod.badmatch(match, badfn)
2044 2044 dirstate = repo.dirstate
2045 2045 # We don't want to just call wctx.walk here, since it would return a lot of
2046 2046 # clean files, which we aren't interested in and takes time.
2047 2047 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2048 2048 unknown=True, ignored=False, full=False)):
2049 2049 exact = match.exact(f)
2050 2050 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2051 2051 if cca:
2052 2052 cca(f)
2053 2053 names.append(f)
2054 2054 if ui.verbose or not exact:
2055 2055 ui.status(_('adding %s\n') % match.rel(f),
2056 2056 label='ui.addremove.added')
2057 2057
2058 2058 for subpath in sorted(wctx.substate):
2059 2059 sub = wctx.sub(subpath)
2060 2060 try:
2061 2061 submatch = matchmod.subdirmatcher(subpath, match)
2062 subprefix = repo.wvfs.reljoin(prefix, subpath)
2062 2063 if opts.get(r'subrepos'):
2063 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2064 bad.extend(sub.add(ui, submatch, subprefix, False, **opts))
2064 2065 else:
2065 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2066 bad.extend(sub.add(ui, submatch, subprefix, True, **opts))
2066 2067 except error.LookupError:
2067 2068 ui.status(_("skipping missing subrepository: %s\n")
2068 2069 % join(subpath))
2069 2070
2070 2071 if not opts.get(r'dry_run'):
2071 2072 rejected = wctx.add(names, prefix)
2072 2073 bad.extend(f for f in rejected if f in match.files())
2073 2074 return bad
2074 2075
2075 2076 def addwebdirpath(repo, serverpath, webconf):
2076 2077 webconf[serverpath] = repo.root
2077 2078 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2078 2079
2079 2080 for r in repo.revs('filelog("path:.hgsub")'):
2080 2081 ctx = repo[r]
2081 2082 for subpath in ctx.substate:
2082 2083 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2083 2084
2084 2085 def forget(ui, repo, match, prefix, explicitonly, dryrun, interactive):
2085 2086 if dryrun and interactive:
2086 2087 raise error.Abort(_("cannot specify both --dry-run and --interactive"))
2087 2088 join = lambda f: os.path.join(prefix, f)
2088 2089 bad = []
2089 2090 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2090 2091 wctx = repo[None]
2091 2092 forgot = []
2092 2093
2093 2094 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2094 2095 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2095 2096 if explicitonly:
2096 2097 forget = [f for f in forget if match.exact(f)]
2097 2098
2098 2099 for subpath in sorted(wctx.substate):
2099 2100 sub = wctx.sub(subpath)
2100 2101 submatch = matchmod.subdirmatcher(subpath, match)
2101 2102 subprefix = repo.wvfs.reljoin(prefix, subpath)
2102 2103 try:
2103 2104 subbad, subforgot = sub.forget(submatch, subprefix, dryrun=dryrun,
2104 2105 interactive=interactive)
2105 2106 bad.extend([subpath + '/' + f for f in subbad])
2106 2107 forgot.extend([subpath + '/' + f for f in subforgot])
2107 2108 except error.LookupError:
2108 2109 ui.status(_("skipping missing subrepository: %s\n")
2109 2110 % join(subpath))
2110 2111
2111 2112 if not explicitonly:
2112 2113 for f in match.files():
2113 2114 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2114 2115 if f not in forgot:
2115 2116 if repo.wvfs.exists(f):
2116 2117 # Don't complain if the exact case match wasn't given.
2117 2118 # But don't do this until after checking 'forgot', so
2118 2119 # that subrepo files aren't normalized, and this op is
2119 2120 # purely from data cached by the status walk above.
2120 2121 if repo.dirstate.normalize(f) in repo.dirstate:
2121 2122 continue
2122 2123 ui.warn(_('not removing %s: '
2123 2124 'file is already untracked\n')
2124 2125 % match.rel(f))
2125 2126 bad.append(f)
2126 2127
2127 2128 if interactive:
2128 2129 responses = _('[Ynsa?]'
2129 2130 '$$ &Yes, forget this file'
2130 2131 '$$ &No, skip this file'
2131 2132 '$$ &Skip remaining files'
2132 2133 '$$ Include &all remaining files'
2133 2134 '$$ &? (display help)')
2134 2135 for filename in forget[:]:
2135 2136 r = ui.promptchoice(_('forget %s %s') % (filename, responses))
2136 2137 if r == 4: # ?
2137 2138 while r == 4:
2138 2139 for c, t in ui.extractchoices(responses)[1]:
2139 2140 ui.write('%s - %s\n' % (c, encoding.lower(t)))
2140 2141 r = ui.promptchoice(_('forget %s %s') % (filename,
2141 2142 responses))
2142 2143 if r == 0: # yes
2143 2144 continue
2144 2145 elif r == 1: # no
2145 2146 forget.remove(filename)
2146 2147 elif r == 2: # Skip
2147 2148 fnindex = forget.index(filename)
2148 2149 del forget[fnindex:]
2149 2150 break
2150 2151 elif r == 3: # All
2151 2152 break
2152 2153
2153 2154 for f in forget:
2154 2155 if ui.verbose or not match.exact(f) or interactive:
2155 2156 ui.status(_('removing %s\n') % match.rel(f),
2156 2157 label='ui.addremove.removed')
2157 2158
2158 2159 if not dryrun:
2159 2160 rejected = wctx.forget(forget, prefix)
2160 2161 bad.extend(f for f in rejected if f in match.files())
2161 2162 forgot.extend(f for f in forget if f not in rejected)
2162 2163 return bad, forgot
2163 2164
2164 2165 def files(ui, ctx, m, fm, fmt, subrepos):
2165 2166 ret = 1
2166 2167
2167 2168 needsfctx = ui.verbose or {'size', 'flags'} & fm.datahint()
2168 2169 uipathfn = scmutil.getuipathfn(ctx.repo(), legacyrelativevalue=True)
2169 2170 for f in ctx.matches(m):
2170 2171 fm.startitem()
2171 2172 fm.context(ctx=ctx)
2172 2173 if needsfctx:
2173 2174 fc = ctx[f]
2174 2175 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2175 2176 fm.data(path=f)
2176 2177 fm.plain(fmt % uipathfn(f))
2177 2178 ret = 0
2178 2179
2179 2180 for subpath in sorted(ctx.substate):
2180 2181 submatch = matchmod.subdirmatcher(subpath, m)
2181 2182 if (subrepos or m.exact(subpath) or any(submatch.files())):
2182 2183 sub = ctx.sub(subpath)
2183 2184 try:
2184 2185 recurse = m.exact(subpath) or subrepos
2185 2186 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2186 2187 ret = 0
2187 2188 except error.LookupError:
2188 2189 ui.status(_("skipping missing subrepository: %s\n")
2189 2190 % m.abs(subpath))
2190 2191
2191 2192 return ret
2192 2193
2193 2194 def remove(ui, repo, m, prefix, after, force, subrepos, dryrun, warnings=None):
2194 2195 join = lambda f: os.path.join(prefix, f)
2195 2196 ret = 0
2196 2197 s = repo.status(match=m, clean=True)
2197 2198 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2198 2199
2199 2200 wctx = repo[None]
2200 2201
2201 2202 if warnings is None:
2202 2203 warnings = []
2203 2204 warn = True
2204 2205 else:
2205 2206 warn = False
2206 2207
2207 2208 subs = sorted(wctx.substate)
2208 2209 progress = ui.makeprogress(_('searching'), total=len(subs),
2209 2210 unit=_('subrepos'))
2210 2211 for subpath in subs:
2211 2212 submatch = matchmod.subdirmatcher(subpath, m)
2212 2213 subprefix = repo.wvfs.reljoin(prefix, subpath)
2213 2214 if subrepos or m.exact(subpath) or any(submatch.files()):
2214 2215 progress.increment()
2215 2216 sub = wctx.sub(subpath)
2216 2217 try:
2217 2218 if sub.removefiles(submatch, subprefix, after, force, subrepos,
2218 2219 dryrun, warnings):
2219 2220 ret = 1
2220 2221 except error.LookupError:
2221 2222 warnings.append(_("skipping missing subrepository: %s\n")
2222 2223 % join(subpath))
2223 2224 progress.complete()
2224 2225
2225 2226 # warn about failure to delete explicit files/dirs
2226 2227 deleteddirs = util.dirs(deleted)
2227 2228 files = m.files()
2228 2229 progress = ui.makeprogress(_('deleting'), total=len(files),
2229 2230 unit=_('files'))
2230 2231 for f in files:
2231 2232 def insubrepo():
2232 2233 for subpath in wctx.substate:
2233 2234 if f.startswith(subpath + '/'):
2234 2235 return True
2235 2236 return False
2236 2237
2237 2238 progress.increment()
2238 2239 isdir = f in deleteddirs or wctx.hasdir(f)
2239 2240 if (f in repo.dirstate or isdir or f == '.'
2240 2241 or insubrepo() or f in subs):
2241 2242 continue
2242 2243
2243 2244 if repo.wvfs.exists(f):
2244 2245 if repo.wvfs.isdir(f):
2245 2246 warnings.append(_('not removing %s: no tracked files\n')
2246 2247 % m.rel(f))
2247 2248 else:
2248 2249 warnings.append(_('not removing %s: file is untracked\n')
2249 2250 % m.rel(f))
2250 2251 # missing files will generate a warning elsewhere
2251 2252 ret = 1
2252 2253 progress.complete()
2253 2254
2254 2255 if force:
2255 2256 list = modified + deleted + clean + added
2256 2257 elif after:
2257 2258 list = deleted
2258 2259 remaining = modified + added + clean
2259 2260 progress = ui.makeprogress(_('skipping'), total=len(remaining),
2260 2261 unit=_('files'))
2261 2262 for f in remaining:
2262 2263 progress.increment()
2263 2264 if ui.verbose or (f in files):
2264 2265 warnings.append(_('not removing %s: file still exists\n')
2265 2266 % m.rel(f))
2266 2267 ret = 1
2267 2268 progress.complete()
2268 2269 else:
2269 2270 list = deleted + clean
2270 2271 progress = ui.makeprogress(_('skipping'),
2271 2272 total=(len(modified) + len(added)),
2272 2273 unit=_('files'))
2273 2274 for f in modified:
2274 2275 progress.increment()
2275 2276 warnings.append(_('not removing %s: file is modified (use -f'
2276 2277 ' to force removal)\n') % m.rel(f))
2277 2278 ret = 1
2278 2279 for f in added:
2279 2280 progress.increment()
2280 2281 warnings.append(_("not removing %s: file has been marked for add"
2281 2282 " (use 'hg forget' to undo add)\n") % m.rel(f))
2282 2283 ret = 1
2283 2284 progress.complete()
2284 2285
2285 2286 list = sorted(list)
2286 2287 progress = ui.makeprogress(_('deleting'), total=len(list),
2287 2288 unit=_('files'))
2288 2289 for f in list:
2289 2290 if ui.verbose or not m.exact(f):
2290 2291 progress.increment()
2291 2292 ui.status(_('removing %s\n') % m.rel(f),
2292 2293 label='ui.addremove.removed')
2293 2294 progress.complete()
2294 2295
2295 2296 if not dryrun:
2296 2297 with repo.wlock():
2297 2298 if not after:
2298 2299 for f in list:
2299 2300 if f in added:
2300 2301 continue # we never unlink added files on remove
2301 2302 rmdir = repo.ui.configbool('experimental',
2302 2303 'removeemptydirs')
2303 2304 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2304 2305 repo[None].forget(list)
2305 2306
2306 2307 if warn:
2307 2308 for warning in warnings:
2308 2309 ui.warn(warning)
2309 2310
2310 2311 return ret
2311 2312
2312 2313 def _updatecatformatter(fm, ctx, matcher, path, decode):
2313 2314 """Hook for adding data to the formatter used by ``hg cat``.
2314 2315
2315 2316 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2316 2317 this method first."""
2317 2318 data = ctx[path].data()
2318 2319 if decode:
2319 2320 data = ctx.repo().wwritedata(path, data)
2320 2321 fm.startitem()
2321 2322 fm.context(ctx=ctx)
2322 2323 fm.write('data', '%s', data)
2323 2324 fm.data(path=path)
2324 2325
2325 2326 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2326 2327 err = 1
2327 2328 opts = pycompat.byteskwargs(opts)
2328 2329
2329 2330 def write(path):
2330 2331 filename = None
2331 2332 if fntemplate:
2332 2333 filename = makefilename(ctx, fntemplate,
2333 2334 pathname=os.path.join(prefix, path))
2334 2335 # attempt to create the directory if it does not already exist
2335 2336 try:
2336 2337 os.makedirs(os.path.dirname(filename))
2337 2338 except OSError:
2338 2339 pass
2339 2340 with formatter.maybereopen(basefm, filename) as fm:
2340 2341 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2341 2342
2342 2343 # Automation often uses hg cat on single files, so special case it
2343 2344 # for performance to avoid the cost of parsing the manifest.
2344 2345 if len(matcher.files()) == 1 and not matcher.anypats():
2345 2346 file = matcher.files()[0]
2346 2347 mfl = repo.manifestlog
2347 2348 mfnode = ctx.manifestnode()
2348 2349 try:
2349 2350 if mfnode and mfl[mfnode].find(file)[0]:
2350 2351 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2351 2352 write(file)
2352 2353 return 0
2353 2354 except KeyError:
2354 2355 pass
2355 2356
2356 2357 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2357 2358
2358 2359 for abs in ctx.walk(matcher):
2359 2360 write(abs)
2360 2361 err = 0
2361 2362
2362 2363 for subpath in sorted(ctx.substate):
2363 2364 sub = ctx.sub(subpath)
2364 2365 try:
2365 2366 submatch = matchmod.subdirmatcher(subpath, matcher)
2366 2367 subprefix = os.path.join(prefix, sub._path)
2367 2368 if not sub.cat(submatch, basefm, fntemplate, subprefix,
2368 2369 **pycompat.strkwargs(opts)):
2369 2370 err = 0
2370 2371 except error.RepoLookupError:
2371 2372 ui.status(_("skipping missing subrepository: %s\n") % subprefix)
2372 2373
2373 2374 return err
2374 2375
2375 2376 def commit(ui, repo, commitfunc, pats, opts):
2376 2377 '''commit the specified files or all outstanding changes'''
2377 2378 date = opts.get('date')
2378 2379 if date:
2379 2380 opts['date'] = dateutil.parsedate(date)
2380 2381 message = logmessage(ui, opts)
2381 2382 matcher = scmutil.match(repo[None], pats, opts)
2382 2383
2383 2384 dsguard = None
2384 2385 # extract addremove carefully -- this function can be called from a command
2385 2386 # that doesn't support addremove
2386 2387 if opts.get('addremove'):
2387 2388 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2388 2389 with dsguard or util.nullcontextmanager():
2389 2390 if dsguard:
2390 2391 if scmutil.addremove(repo, matcher, "", opts) != 0:
2391 2392 raise error.Abort(
2392 2393 _("failed to mark all new/missing files as added/removed"))
2393 2394
2394 2395 return commitfunc(ui, repo, message, matcher, opts)
2395 2396
2396 2397 def samefile(f, ctx1, ctx2):
2397 2398 if f in ctx1.manifest():
2398 2399 a = ctx1.filectx(f)
2399 2400 if f in ctx2.manifest():
2400 2401 b = ctx2.filectx(f)
2401 2402 return (not a.cmp(b)
2402 2403 and a.flags() == b.flags())
2403 2404 else:
2404 2405 return False
2405 2406 else:
2406 2407 return f not in ctx2.manifest()
2407 2408
2408 2409 def amend(ui, repo, old, extra, pats, opts):
2409 2410 # avoid cycle context -> subrepo -> cmdutil
2410 2411 from . import context
2411 2412
2412 2413 # amend will reuse the existing user if not specified, but the obsolete
2413 2414 # marker creation requires that the current user's name is specified.
2414 2415 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2415 2416 ui.username() # raise exception if username not set
2416 2417
2417 2418 ui.note(_('amending changeset %s\n') % old)
2418 2419 base = old.p1()
2419 2420
2420 2421 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2421 2422 # Participating changesets:
2422 2423 #
2423 2424 # wctx o - workingctx that contains changes from working copy
2424 2425 # | to go into amending commit
2425 2426 # |
2426 2427 # old o - changeset to amend
2427 2428 # |
2428 2429 # base o - first parent of the changeset to amend
2429 2430 wctx = repo[None]
2430 2431
2431 2432 # Copy to avoid mutating input
2432 2433 extra = extra.copy()
2433 2434 # Update extra dict from amended commit (e.g. to preserve graft
2434 2435 # source)
2435 2436 extra.update(old.extra())
2436 2437
2437 2438 # Also update it from the from the wctx
2438 2439 extra.update(wctx.extra())
2439 2440
2440 2441 user = opts.get('user') or old.user()
2441 2442
2442 2443 datemaydiffer = False # date-only change should be ignored?
2443 2444 if opts.get('date') and opts.get('currentdate'):
2444 2445 raise error.Abort(_('--date and --currentdate are mutually '
2445 2446 'exclusive'))
2446 2447 if opts.get('date'):
2447 2448 date = dateutil.parsedate(opts.get('date'))
2448 2449 elif opts.get('currentdate'):
2449 2450 date = dateutil.makedate()
2450 2451 elif (ui.configbool('rewrite', 'update-timestamp')
2451 2452 and opts.get('currentdate') is None):
2452 2453 date = dateutil.makedate()
2453 2454 datemaydiffer = True
2454 2455 else:
2455 2456 date = old.date()
2456 2457
2457 2458 if len(old.parents()) > 1:
2458 2459 # ctx.files() isn't reliable for merges, so fall back to the
2459 2460 # slower repo.status() method
2460 2461 files = set([fn for st in base.status(old)[:3]
2461 2462 for fn in st])
2462 2463 else:
2463 2464 files = set(old.files())
2464 2465
2465 2466 # add/remove the files to the working copy if the "addremove" option
2466 2467 # was specified.
2467 2468 matcher = scmutil.match(wctx, pats, opts)
2468 2469 if (opts.get('addremove')
2469 2470 and scmutil.addremove(repo, matcher, "", opts)):
2470 2471 raise error.Abort(
2471 2472 _("failed to mark all new/missing files as added/removed"))
2472 2473
2473 2474 # Check subrepos. This depends on in-place wctx._status update in
2474 2475 # subrepo.precommit(). To minimize the risk of this hack, we do
2475 2476 # nothing if .hgsub does not exist.
2476 2477 if '.hgsub' in wctx or '.hgsub' in old:
2477 2478 subs, commitsubs, newsubstate = subrepoutil.precommit(
2478 2479 ui, wctx, wctx._status, matcher)
2479 2480 # amend should abort if commitsubrepos is enabled
2480 2481 assert not commitsubs
2481 2482 if subs:
2482 2483 subrepoutil.writestate(repo, newsubstate)
2483 2484
2484 2485 ms = mergemod.mergestate.read(repo)
2485 2486 mergeutil.checkunresolved(ms)
2486 2487
2487 2488 filestoamend = set(f for f in wctx.files() if matcher(f))
2488 2489
2489 2490 changes = (len(filestoamend) > 0)
2490 2491 if changes:
2491 2492 # Recompute copies (avoid recording a -> b -> a)
2492 2493 copied = copies.pathcopies(base, wctx, matcher)
2493 2494 if old.p2:
2494 2495 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2495 2496
2496 2497 # Prune files which were reverted by the updates: if old
2497 2498 # introduced file X and the file was renamed in the working
2498 2499 # copy, then those two files are the same and
2499 2500 # we can discard X from our list of files. Likewise if X
2500 2501 # was removed, it's no longer relevant. If X is missing (aka
2501 2502 # deleted), old X must be preserved.
2502 2503 files.update(filestoamend)
2503 2504 files = [f for f in files if (not samefile(f, wctx, base)
2504 2505 or f in wctx.deleted())]
2505 2506
2506 2507 def filectxfn(repo, ctx_, path):
2507 2508 try:
2508 2509 # If the file being considered is not amongst the files
2509 2510 # to be amended, we should return the file context from the
2510 2511 # old changeset. This avoids issues when only some files in
2511 2512 # the working copy are being amended but there are also
2512 2513 # changes to other files from the old changeset.
2513 2514 if path not in filestoamend:
2514 2515 return old.filectx(path)
2515 2516
2516 2517 # Return None for removed files.
2517 2518 if path in wctx.removed():
2518 2519 return None
2519 2520
2520 2521 fctx = wctx[path]
2521 2522 flags = fctx.flags()
2522 2523 mctx = context.memfilectx(repo, ctx_,
2523 2524 fctx.path(), fctx.data(),
2524 2525 islink='l' in flags,
2525 2526 isexec='x' in flags,
2526 2527 copied=copied.get(path))
2527 2528 return mctx
2528 2529 except KeyError:
2529 2530 return None
2530 2531 else:
2531 2532 ui.note(_('copying changeset %s to %s\n') % (old, base))
2532 2533
2533 2534 # Use version of files as in the old cset
2534 2535 def filectxfn(repo, ctx_, path):
2535 2536 try:
2536 2537 return old.filectx(path)
2537 2538 except KeyError:
2538 2539 return None
2539 2540
2540 2541 # See if we got a message from -m or -l, if not, open the editor with
2541 2542 # the message of the changeset to amend.
2542 2543 message = logmessage(ui, opts)
2543 2544
2544 2545 editform = mergeeditform(old, 'commit.amend')
2545 2546 editor = getcommiteditor(editform=editform,
2546 2547 **pycompat.strkwargs(opts))
2547 2548
2548 2549 if not message:
2549 2550 editor = getcommiteditor(edit=True, editform=editform)
2550 2551 message = old.description()
2551 2552
2552 2553 pureextra = extra.copy()
2553 2554 extra['amend_source'] = old.hex()
2554 2555
2555 2556 new = context.memctx(repo,
2556 2557 parents=[base.node(), old.p2().node()],
2557 2558 text=message,
2558 2559 files=files,
2559 2560 filectxfn=filectxfn,
2560 2561 user=user,
2561 2562 date=date,
2562 2563 extra=extra,
2563 2564 editor=editor)
2564 2565
2565 2566 newdesc = changelog.stripdesc(new.description())
2566 2567 if ((not changes)
2567 2568 and newdesc == old.description()
2568 2569 and user == old.user()
2569 2570 and (date == old.date() or datemaydiffer)
2570 2571 and pureextra == old.extra()):
2571 2572 # nothing changed. continuing here would create a new node
2572 2573 # anyway because of the amend_source noise.
2573 2574 #
2574 2575 # This not what we expect from amend.
2575 2576 return old.node()
2576 2577
2577 2578 commitphase = None
2578 2579 if opts.get('secret'):
2579 2580 commitphase = phases.secret
2580 2581 newid = repo.commitctx(new)
2581 2582
2582 2583 # Reroute the working copy parent to the new changeset
2583 2584 repo.setparents(newid, nullid)
2584 2585 mapping = {old.node(): (newid,)}
2585 2586 obsmetadata = None
2586 2587 if opts.get('note'):
2587 2588 obsmetadata = {'note': encoding.fromlocal(opts['note'])}
2588 2589 backup = ui.configbool('rewrite', 'backup-bundle')
2589 2590 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata,
2590 2591 fixphase=True, targetphase=commitphase,
2591 2592 backup=backup)
2592 2593
2593 2594 # Fixing the dirstate because localrepo.commitctx does not update
2594 2595 # it. This is rather convenient because we did not need to update
2595 2596 # the dirstate for all the files in the new commit which commitctx
2596 2597 # could have done if it updated the dirstate. Now, we can
2597 2598 # selectively update the dirstate only for the amended files.
2598 2599 dirstate = repo.dirstate
2599 2600
2600 2601 # Update the state of the files which were added and
2601 2602 # and modified in the amend to "normal" in the dirstate.
2602 2603 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2603 2604 for f in normalfiles:
2604 2605 dirstate.normal(f)
2605 2606
2606 2607 # Update the state of files which were removed in the amend
2607 2608 # to "removed" in the dirstate.
2608 2609 removedfiles = set(wctx.removed()) & filestoamend
2609 2610 for f in removedfiles:
2610 2611 dirstate.drop(f)
2611 2612
2612 2613 return newid
2613 2614
2614 2615 def commiteditor(repo, ctx, subs, editform=''):
2615 2616 if ctx.description():
2616 2617 return ctx.description()
2617 2618 return commitforceeditor(repo, ctx, subs, editform=editform,
2618 2619 unchangedmessagedetection=True)
2619 2620
2620 2621 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2621 2622 editform='', unchangedmessagedetection=False):
2622 2623 if not extramsg:
2623 2624 extramsg = _("Leave message empty to abort commit.")
2624 2625
2625 2626 forms = [e for e in editform.split('.') if e]
2626 2627 forms.insert(0, 'changeset')
2627 2628 templatetext = None
2628 2629 while forms:
2629 2630 ref = '.'.join(forms)
2630 2631 if repo.ui.config('committemplate', ref):
2631 2632 templatetext = committext = buildcommittemplate(
2632 2633 repo, ctx, subs, extramsg, ref)
2633 2634 break
2634 2635 forms.pop()
2635 2636 else:
2636 2637 committext = buildcommittext(repo, ctx, subs, extramsg)
2637 2638
2638 2639 # run editor in the repository root
2639 2640 olddir = encoding.getcwd()
2640 2641 os.chdir(repo.root)
2641 2642
2642 2643 # make in-memory changes visible to external process
2643 2644 tr = repo.currenttransaction()
2644 2645 repo.dirstate.write(tr)
2645 2646 pending = tr and tr.writepending() and repo.root
2646 2647
2647 2648 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2648 2649 editform=editform, pending=pending,
2649 2650 repopath=repo.path, action='commit')
2650 2651 text = editortext
2651 2652
2652 2653 # strip away anything below this special string (used for editors that want
2653 2654 # to display the diff)
2654 2655 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2655 2656 if stripbelow:
2656 2657 text = text[:stripbelow.start()]
2657 2658
2658 2659 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2659 2660 os.chdir(olddir)
2660 2661
2661 2662 if finishdesc:
2662 2663 text = finishdesc(text)
2663 2664 if not text.strip():
2664 2665 raise error.Abort(_("empty commit message"))
2665 2666 if unchangedmessagedetection and editortext == templatetext:
2666 2667 raise error.Abort(_("commit message unchanged"))
2667 2668
2668 2669 return text
2669 2670
2670 2671 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2671 2672 ui = repo.ui
2672 2673 spec = formatter.templatespec(ref, None, None)
2673 2674 t = logcmdutil.changesettemplater(ui, repo, spec)
2674 2675 t.t.cache.update((k, templater.unquotestring(v))
2675 2676 for k, v in repo.ui.configitems('committemplate'))
2676 2677
2677 2678 if not extramsg:
2678 2679 extramsg = '' # ensure that extramsg is string
2679 2680
2680 2681 ui.pushbuffer()
2681 2682 t.show(ctx, extramsg=extramsg)
2682 2683 return ui.popbuffer()
2683 2684
2684 2685 def hgprefix(msg):
2685 2686 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2686 2687
2687 2688 def buildcommittext(repo, ctx, subs, extramsg):
2688 2689 edittext = []
2689 2690 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2690 2691 if ctx.description():
2691 2692 edittext.append(ctx.description())
2692 2693 edittext.append("")
2693 2694 edittext.append("") # Empty line between message and comments.
2694 2695 edittext.append(hgprefix(_("Enter commit message."
2695 2696 " Lines beginning with 'HG:' are removed.")))
2696 2697 edittext.append(hgprefix(extramsg))
2697 2698 edittext.append("HG: --")
2698 2699 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2699 2700 if ctx.p2():
2700 2701 edittext.append(hgprefix(_("branch merge")))
2701 2702 if ctx.branch():
2702 2703 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2703 2704 if bookmarks.isactivewdirparent(repo):
2704 2705 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2705 2706 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2706 2707 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2707 2708 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2708 2709 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2709 2710 if not added and not modified and not removed:
2710 2711 edittext.append(hgprefix(_("no files changed")))
2711 2712 edittext.append("")
2712 2713
2713 2714 return "\n".join(edittext)
2714 2715
2715 2716 def commitstatus(repo, node, branch, bheads=None, opts=None):
2716 2717 if opts is None:
2717 2718 opts = {}
2718 2719 ctx = repo[node]
2719 2720 parents = ctx.parents()
2720 2721
2721 2722 if (not opts.get('amend') and bheads and node not in bheads and not
2722 2723 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2723 2724 repo.ui.status(_('created new head\n'))
2724 2725 # The message is not printed for initial roots. For the other
2725 2726 # changesets, it is printed in the following situations:
2726 2727 #
2727 2728 # Par column: for the 2 parents with ...
2728 2729 # N: null or no parent
2729 2730 # B: parent is on another named branch
2730 2731 # C: parent is a regular non head changeset
2731 2732 # H: parent was a branch head of the current branch
2732 2733 # Msg column: whether we print "created new head" message
2733 2734 # In the following, it is assumed that there already exists some
2734 2735 # initial branch heads of the current branch, otherwise nothing is
2735 2736 # printed anyway.
2736 2737 #
2737 2738 # Par Msg Comment
2738 2739 # N N y additional topo root
2739 2740 #
2740 2741 # B N y additional branch root
2741 2742 # C N y additional topo head
2742 2743 # H N n usual case
2743 2744 #
2744 2745 # B B y weird additional branch root
2745 2746 # C B y branch merge
2746 2747 # H B n merge with named branch
2747 2748 #
2748 2749 # C C y additional head from merge
2749 2750 # C H n merge with a head
2750 2751 #
2751 2752 # H H n head merge: head count decreases
2752 2753
2753 2754 if not opts.get('close_branch'):
2754 2755 for r in parents:
2755 2756 if r.closesbranch() and r.branch() == branch:
2756 2757 repo.ui.status(_('reopening closed branch head %d\n') % r.rev())
2757 2758
2758 2759 if repo.ui.debugflag:
2759 2760 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx.hex()))
2760 2761 elif repo.ui.verbose:
2761 2762 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx))
2762 2763
2763 2764 def postcommitstatus(repo, pats, opts):
2764 2765 return repo.status(match=scmutil.match(repo[None], pats, opts))
2765 2766
2766 2767 def revert(ui, repo, ctx, parents, *pats, **opts):
2767 2768 opts = pycompat.byteskwargs(opts)
2768 2769 parent, p2 = parents
2769 2770 node = ctx.node()
2770 2771
2771 2772 mf = ctx.manifest()
2772 2773 if node == p2:
2773 2774 parent = p2
2774 2775
2775 2776 # need all matching names in dirstate and manifest of target rev,
2776 2777 # so have to walk both. do not print errors if files exist in one
2777 2778 # but not other. in both cases, filesets should be evaluated against
2778 2779 # workingctx to get consistent result (issue4497). this means 'set:**'
2779 2780 # cannot be used to select missing files from target rev.
2780 2781
2781 2782 # `names` is a mapping for all elements in working copy and target revision
2782 2783 # The mapping is in the form:
2783 2784 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2784 2785 names = {}
2785 2786 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2786 2787
2787 2788 with repo.wlock():
2788 2789 ## filling of the `names` mapping
2789 2790 # walk dirstate to fill `names`
2790 2791
2791 2792 interactive = opts.get('interactive', False)
2792 2793 wctx = repo[None]
2793 2794 m = scmutil.match(wctx, pats, opts)
2794 2795
2795 2796 # we'll need this later
2796 2797 targetsubs = sorted(s for s in wctx.substate if m(s))
2797 2798
2798 2799 if not m.always():
2799 2800 matcher = matchmod.badmatch(m, lambda x, y: False)
2800 2801 for abs in wctx.walk(matcher):
2801 2802 names[abs] = m.exact(abs)
2802 2803
2803 2804 # walk target manifest to fill `names`
2804 2805
2805 2806 def badfn(path, msg):
2806 2807 if path in names:
2807 2808 return
2808 2809 if path in ctx.substate:
2809 2810 return
2810 2811 path_ = path + '/'
2811 2812 for f in names:
2812 2813 if f.startswith(path_):
2813 2814 return
2814 2815 ui.warn("%s: %s\n" % (m.rel(path), msg))
2815 2816
2816 2817 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2817 2818 if abs not in names:
2818 2819 names[abs] = m.exact(abs)
2819 2820
2820 2821 # Find status of all file in `names`.
2821 2822 m = scmutil.matchfiles(repo, names)
2822 2823
2823 2824 changes = repo.status(node1=node, match=m,
2824 2825 unknown=True, ignored=True, clean=True)
2825 2826 else:
2826 2827 changes = repo.status(node1=node, match=m)
2827 2828 for kind in changes:
2828 2829 for abs in kind:
2829 2830 names[abs] = m.exact(abs)
2830 2831
2831 2832 m = scmutil.matchfiles(repo, names)
2832 2833
2833 2834 modified = set(changes.modified)
2834 2835 added = set(changes.added)
2835 2836 removed = set(changes.removed)
2836 2837 _deleted = set(changes.deleted)
2837 2838 unknown = set(changes.unknown)
2838 2839 unknown.update(changes.ignored)
2839 2840 clean = set(changes.clean)
2840 2841 modadded = set()
2841 2842
2842 2843 # We need to account for the state of the file in the dirstate,
2843 2844 # even when we revert against something else than parent. This will
2844 2845 # slightly alter the behavior of revert (doing back up or not, delete
2845 2846 # or just forget etc).
2846 2847 if parent == node:
2847 2848 dsmodified = modified
2848 2849 dsadded = added
2849 2850 dsremoved = removed
2850 2851 # store all local modifications, useful later for rename detection
2851 2852 localchanges = dsmodified | dsadded
2852 2853 modified, added, removed = set(), set(), set()
2853 2854 else:
2854 2855 changes = repo.status(node1=parent, match=m)
2855 2856 dsmodified = set(changes.modified)
2856 2857 dsadded = set(changes.added)
2857 2858 dsremoved = set(changes.removed)
2858 2859 # store all local modifications, useful later for rename detection
2859 2860 localchanges = dsmodified | dsadded
2860 2861
2861 2862 # only take into account for removes between wc and target
2862 2863 clean |= dsremoved - removed
2863 2864 dsremoved &= removed
2864 2865 # distinct between dirstate remove and other
2865 2866 removed -= dsremoved
2866 2867
2867 2868 modadded = added & dsmodified
2868 2869 added -= modadded
2869 2870
2870 2871 # tell newly modified apart.
2871 2872 dsmodified &= modified
2872 2873 dsmodified |= modified & dsadded # dirstate added may need backup
2873 2874 modified -= dsmodified
2874 2875
2875 2876 # We need to wait for some post-processing to update this set
2876 2877 # before making the distinction. The dirstate will be used for
2877 2878 # that purpose.
2878 2879 dsadded = added
2879 2880
2880 2881 # in case of merge, files that are actually added can be reported as
2881 2882 # modified, we need to post process the result
2882 2883 if p2 != nullid:
2883 2884 mergeadd = set(dsmodified)
2884 2885 for path in dsmodified:
2885 2886 if path in mf:
2886 2887 mergeadd.remove(path)
2887 2888 dsadded |= mergeadd
2888 2889 dsmodified -= mergeadd
2889 2890
2890 2891 # if f is a rename, update `names` to also revert the source
2891 2892 for f in localchanges:
2892 2893 src = repo.dirstate.copied(f)
2893 2894 # XXX should we check for rename down to target node?
2894 2895 if src and src not in names and repo.dirstate[src] == 'r':
2895 2896 dsremoved.add(src)
2896 2897 names[src] = True
2897 2898
2898 2899 # determine the exact nature of the deleted changesets
2899 2900 deladded = set(_deleted)
2900 2901 for path in _deleted:
2901 2902 if path in mf:
2902 2903 deladded.remove(path)
2903 2904 deleted = _deleted - deladded
2904 2905
2905 2906 # distinguish between file to forget and the other
2906 2907 added = set()
2907 2908 for abs in dsadded:
2908 2909 if repo.dirstate[abs] != 'a':
2909 2910 added.add(abs)
2910 2911 dsadded -= added
2911 2912
2912 2913 for abs in deladded:
2913 2914 if repo.dirstate[abs] == 'a':
2914 2915 dsadded.add(abs)
2915 2916 deladded -= dsadded
2916 2917
2917 2918 # For files marked as removed, we check if an unknown file is present at
2918 2919 # the same path. If a such file exists it may need to be backed up.
2919 2920 # Making the distinction at this stage helps have simpler backup
2920 2921 # logic.
2921 2922 removunk = set()
2922 2923 for abs in removed:
2923 2924 target = repo.wjoin(abs)
2924 2925 if os.path.lexists(target):
2925 2926 removunk.add(abs)
2926 2927 removed -= removunk
2927 2928
2928 2929 dsremovunk = set()
2929 2930 for abs in dsremoved:
2930 2931 target = repo.wjoin(abs)
2931 2932 if os.path.lexists(target):
2932 2933 dsremovunk.add(abs)
2933 2934 dsremoved -= dsremovunk
2934 2935
2935 2936 # action to be actually performed by revert
2936 2937 # (<list of file>, message>) tuple
2937 2938 actions = {'revert': ([], _('reverting %s\n')),
2938 2939 'add': ([], _('adding %s\n')),
2939 2940 'remove': ([], _('removing %s\n')),
2940 2941 'drop': ([], _('removing %s\n')),
2941 2942 'forget': ([], _('forgetting %s\n')),
2942 2943 'undelete': ([], _('undeleting %s\n')),
2943 2944 'noop': (None, _('no changes needed to %s\n')),
2944 2945 'unknown': (None, _('file not managed: %s\n')),
2945 2946 }
2946 2947
2947 2948 # "constant" that convey the backup strategy.
2948 2949 # All set to `discard` if `no-backup` is set do avoid checking
2949 2950 # no_backup lower in the code.
2950 2951 # These values are ordered for comparison purposes
2951 2952 backupinteractive = 3 # do backup if interactively modified
2952 2953 backup = 2 # unconditionally do backup
2953 2954 check = 1 # check if the existing file differs from target
2954 2955 discard = 0 # never do backup
2955 2956 if opts.get('no_backup'):
2956 2957 backupinteractive = backup = check = discard
2957 2958 if interactive:
2958 2959 dsmodifiedbackup = backupinteractive
2959 2960 else:
2960 2961 dsmodifiedbackup = backup
2961 2962 tobackup = set()
2962 2963
2963 2964 backupanddel = actions['remove']
2964 2965 if not opts.get('no_backup'):
2965 2966 backupanddel = actions['drop']
2966 2967
2967 2968 disptable = (
2968 2969 # dispatch table:
2969 2970 # file state
2970 2971 # action
2971 2972 # make backup
2972 2973
2973 2974 ## Sets that results that will change file on disk
2974 2975 # Modified compared to target, no local change
2975 2976 (modified, actions['revert'], discard),
2976 2977 # Modified compared to target, but local file is deleted
2977 2978 (deleted, actions['revert'], discard),
2978 2979 # Modified compared to target, local change
2979 2980 (dsmodified, actions['revert'], dsmodifiedbackup),
2980 2981 # Added since target
2981 2982 (added, actions['remove'], discard),
2982 2983 # Added in working directory
2983 2984 (dsadded, actions['forget'], discard),
2984 2985 # Added since target, have local modification
2985 2986 (modadded, backupanddel, backup),
2986 2987 # Added since target but file is missing in working directory
2987 2988 (deladded, actions['drop'], discard),
2988 2989 # Removed since target, before working copy parent
2989 2990 (removed, actions['add'], discard),
2990 2991 # Same as `removed` but an unknown file exists at the same path
2991 2992 (removunk, actions['add'], check),
2992 2993 # Removed since targe, marked as such in working copy parent
2993 2994 (dsremoved, actions['undelete'], discard),
2994 2995 # Same as `dsremoved` but an unknown file exists at the same path
2995 2996 (dsremovunk, actions['undelete'], check),
2996 2997 ## the following sets does not result in any file changes
2997 2998 # File with no modification
2998 2999 (clean, actions['noop'], discard),
2999 3000 # Existing file, not tracked anywhere
3000 3001 (unknown, actions['unknown'], discard),
3001 3002 )
3002 3003
3003 3004 for abs, exact in sorted(names.items()):
3004 3005 # target file to be touch on disk (relative to cwd)
3005 3006 target = repo.wjoin(abs)
3006 3007 # search the entry in the dispatch table.
3007 3008 # if the file is in any of these sets, it was touched in the working
3008 3009 # directory parent and we are sure it needs to be reverted.
3009 3010 for table, (xlist, msg), dobackup in disptable:
3010 3011 if abs not in table:
3011 3012 continue
3012 3013 if xlist is not None:
3013 3014 xlist.append(abs)
3014 3015 if dobackup:
3015 3016 # If in interactive mode, don't automatically create
3016 3017 # .orig files (issue4793)
3017 3018 if dobackup == backupinteractive:
3018 3019 tobackup.add(abs)
3019 3020 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3020 3021 absbakname = scmutil.backuppath(ui, repo, abs)
3021 3022 bakname = os.path.relpath(absbakname,
3022 3023 start=repo.root)
3023 3024 ui.note(_('saving current version of %s as %s\n') %
3024 3025 (uipathfn(abs), uipathfn(bakname)))
3025 3026 if not opts.get('dry_run'):
3026 3027 if interactive:
3027 3028 util.copyfile(target, absbakname)
3028 3029 else:
3029 3030 util.rename(target, absbakname)
3030 3031 if opts.get('dry_run'):
3031 3032 if ui.verbose or not exact:
3032 3033 ui.status(msg % uipathfn(abs))
3033 3034 elif exact:
3034 3035 ui.warn(msg % uipathfn(abs))
3035 3036 break
3036 3037
3037 3038 if not opts.get('dry_run'):
3038 3039 needdata = ('revert', 'add', 'undelete')
3039 3040 oplist = [actions[name][0] for name in needdata]
3040 3041 prefetch = scmutil.prefetchfiles
3041 3042 matchfiles = scmutil.matchfiles
3042 3043 prefetch(repo, [ctx.rev()],
3043 3044 matchfiles(repo,
3044 3045 [f for sublist in oplist for f in sublist]))
3045 3046 _performrevert(repo, parents, ctx, names, uipathfn, actions,
3046 3047 interactive, tobackup)
3047 3048
3048 3049 if targetsubs:
3049 3050 # Revert the subrepos on the revert list
3050 3051 for sub in targetsubs:
3051 3052 try:
3052 3053 wctx.sub(sub).revert(ctx.substate[sub], *pats,
3053 3054 **pycompat.strkwargs(opts))
3054 3055 except KeyError:
3055 3056 raise error.Abort("subrepository '%s' does not exist in %s!"
3056 3057 % (sub, short(ctx.node())))
3057 3058
3058 3059 def _performrevert(repo, parents, ctx, names, uipathfn, actions,
3059 3060 interactive=False, tobackup=None):
3060 3061 """function that actually perform all the actions computed for revert
3061 3062
3062 3063 This is an independent function to let extension to plug in and react to
3063 3064 the imminent revert.
3064 3065
3065 3066 Make sure you have the working directory locked when calling this function.
3066 3067 """
3067 3068 parent, p2 = parents
3068 3069 node = ctx.node()
3069 3070 excluded_files = []
3070 3071
3071 3072 def checkout(f):
3072 3073 fc = ctx[f]
3073 3074 repo.wwrite(f, fc.data(), fc.flags())
3074 3075
3075 3076 def doremove(f):
3076 3077 try:
3077 3078 rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
3078 3079 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3079 3080 except OSError:
3080 3081 pass
3081 3082 repo.dirstate.remove(f)
3082 3083
3083 3084 def prntstatusmsg(action, f):
3084 3085 exact = names[f]
3085 3086 if repo.ui.verbose or not exact:
3086 3087 repo.ui.status(actions[action][1] % uipathfn(f))
3087 3088
3088 3089 audit_path = pathutil.pathauditor(repo.root, cached=True)
3089 3090 for f in actions['forget'][0]:
3090 3091 if interactive:
3091 3092 choice = repo.ui.promptchoice(
3092 3093 _("forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f))
3093 3094 if choice == 0:
3094 3095 prntstatusmsg('forget', f)
3095 3096 repo.dirstate.drop(f)
3096 3097 else:
3097 3098 excluded_files.append(f)
3098 3099 else:
3099 3100 prntstatusmsg('forget', f)
3100 3101 repo.dirstate.drop(f)
3101 3102 for f in actions['remove'][0]:
3102 3103 audit_path(f)
3103 3104 if interactive:
3104 3105 choice = repo.ui.promptchoice(
3105 3106 _("remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f))
3106 3107 if choice == 0:
3107 3108 prntstatusmsg('remove', f)
3108 3109 doremove(f)
3109 3110 else:
3110 3111 excluded_files.append(f)
3111 3112 else:
3112 3113 prntstatusmsg('remove', f)
3113 3114 doremove(f)
3114 3115 for f in actions['drop'][0]:
3115 3116 audit_path(f)
3116 3117 prntstatusmsg('drop', f)
3117 3118 repo.dirstate.remove(f)
3118 3119
3119 3120 normal = None
3120 3121 if node == parent:
3121 3122 # We're reverting to our parent. If possible, we'd like status
3122 3123 # to report the file as clean. We have to use normallookup for
3123 3124 # merges to avoid losing information about merged/dirty files.
3124 3125 if p2 != nullid:
3125 3126 normal = repo.dirstate.normallookup
3126 3127 else:
3127 3128 normal = repo.dirstate.normal
3128 3129
3129 3130 newlyaddedandmodifiedfiles = set()
3130 3131 if interactive:
3131 3132 # Prompt the user for changes to revert
3132 3133 torevert = [f for f in actions['revert'][0] if f not in excluded_files]
3133 3134 m = scmutil.matchfiles(repo, torevert)
3134 3135 diffopts = patch.difffeatureopts(repo.ui, whitespace=True,
3135 3136 section='commands',
3136 3137 configprefix='revert.interactive.')
3137 3138 diffopts.nodates = True
3138 3139 diffopts.git = True
3139 3140 operation = 'discard'
3140 3141 reversehunks = True
3141 3142 if node != parent:
3142 3143 operation = 'apply'
3143 3144 reversehunks = False
3144 3145 if reversehunks:
3145 3146 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3146 3147 else:
3147 3148 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3148 3149 originalchunks = patch.parsepatch(diff)
3149 3150
3150 3151 try:
3151 3152
3152 3153 chunks, opts = recordfilter(repo.ui, originalchunks,
3153 3154 operation=operation)
3154 3155 if reversehunks:
3155 3156 chunks = patch.reversehunks(chunks)
3156 3157
3157 3158 except error.PatchError as err:
3158 3159 raise error.Abort(_('error parsing patch: %s') % err)
3159 3160
3160 3161 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3161 3162 if tobackup is None:
3162 3163 tobackup = set()
3163 3164 # Apply changes
3164 3165 fp = stringio()
3165 3166 # chunks are serialized per file, but files aren't sorted
3166 3167 for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))):
3167 3168 prntstatusmsg('revert', f)
3168 3169 for c in chunks:
3169 3170 if ishunk(c):
3170 3171 abs = c.header.filename()
3171 3172 # Create a backup file only if this hunk should be backed up
3172 3173 if c.header.filename() in tobackup:
3173 3174 target = repo.wjoin(abs)
3174 3175 bakname = scmutil.backuppath(repo.ui, repo, abs)
3175 3176 util.copyfile(target, bakname)
3176 3177 tobackup.remove(abs)
3177 3178 c.write(fp)
3178 3179 dopatch = fp.tell()
3179 3180 fp.seek(0)
3180 3181 if dopatch:
3181 3182 try:
3182 3183 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3183 3184 except error.PatchError as err:
3184 3185 raise error.Abort(pycompat.bytestr(err))
3185 3186 del fp
3186 3187 else:
3187 3188 for f in actions['revert'][0]:
3188 3189 prntstatusmsg('revert', f)
3189 3190 checkout(f)
3190 3191 if normal:
3191 3192 normal(f)
3192 3193
3193 3194 for f in actions['add'][0]:
3194 3195 # Don't checkout modified files, they are already created by the diff
3195 3196 if f not in newlyaddedandmodifiedfiles:
3196 3197 prntstatusmsg('add', f)
3197 3198 checkout(f)
3198 3199 repo.dirstate.add(f)
3199 3200
3200 3201 normal = repo.dirstate.normallookup
3201 3202 if node == parent and p2 == nullid:
3202 3203 normal = repo.dirstate.normal
3203 3204 for f in actions['undelete'][0]:
3204 3205 if interactive:
3205 3206 choice = repo.ui.promptchoice(
3206 3207 _("add back removed file %s (Yn)?$$ &Yes $$ &No") % f)
3207 3208 if choice == 0:
3208 3209 prntstatusmsg('undelete', f)
3209 3210 checkout(f)
3210 3211 normal(f)
3211 3212 else:
3212 3213 excluded_files.append(f)
3213 3214 else:
3214 3215 prntstatusmsg('undelete', f)
3215 3216 checkout(f)
3216 3217 normal(f)
3217 3218
3218 3219 copied = copies.pathcopies(repo[parent], ctx)
3219 3220
3220 3221 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3221 3222 if f in copied:
3222 3223 repo.dirstate.copy(copied[f], f)
3223 3224
3224 3225 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3225 3226 # commands.outgoing. "missing" is "missing" of the result of
3226 3227 # "findcommonoutgoing()"
3227 3228 outgoinghooks = util.hooks()
3228 3229
3229 3230 # a list of (ui, repo) functions called by commands.summary
3230 3231 summaryhooks = util.hooks()
3231 3232
3232 3233 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3233 3234 #
3234 3235 # functions should return tuple of booleans below, if 'changes' is None:
3235 3236 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3236 3237 #
3237 3238 # otherwise, 'changes' is a tuple of tuples below:
3238 3239 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3239 3240 # - (desturl, destbranch, destpeer, outgoing)
3240 3241 summaryremotehooks = util.hooks()
3241 3242
3242 3243 # A list of state files kept by multistep operations like graft.
3243 3244 # Since graft cannot be aborted, it is considered 'clearable' by update.
3244 3245 # note: bisect is intentionally excluded
3245 3246 # (state file, clearable, allowcommit, error, hint)
3246 3247 unfinishedstates = [
3247 3248 ('graftstate', True, False, _('graft in progress'),
3248 3249 _("use 'hg graft --continue' or 'hg graft --stop' to stop")),
3249 3250 ('updatestate', True, False, _('last update was interrupted'),
3250 3251 _("use 'hg update' to get a consistent checkout"))
3251 3252 ]
3252 3253
3253 3254 def checkunfinished(repo, commit=False):
3254 3255 '''Look for an unfinished multistep operation, like graft, and abort
3255 3256 if found. It's probably good to check this right before
3256 3257 bailifchanged().
3257 3258 '''
3258 3259 # Check for non-clearable states first, so things like rebase will take
3259 3260 # precedence over update.
3260 3261 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3261 3262 if clearable or (commit and allowcommit):
3262 3263 continue
3263 3264 if repo.vfs.exists(f):
3264 3265 raise error.Abort(msg, hint=hint)
3265 3266
3266 3267 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3267 3268 if not clearable or (commit and allowcommit):
3268 3269 continue
3269 3270 if repo.vfs.exists(f):
3270 3271 raise error.Abort(msg, hint=hint)
3271 3272
3272 3273 def clearunfinished(repo):
3273 3274 '''Check for unfinished operations (as above), and clear the ones
3274 3275 that are clearable.
3275 3276 '''
3276 3277 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3277 3278 if not clearable and repo.vfs.exists(f):
3278 3279 raise error.Abort(msg, hint=hint)
3279 3280 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3280 3281 if clearable and repo.vfs.exists(f):
3281 3282 util.unlink(repo.vfs.join(f))
3282 3283
3283 3284 afterresolvedstates = [
3284 3285 ('graftstate',
3285 3286 _('hg graft --continue')),
3286 3287 ]
3287 3288
3288 3289 def howtocontinue(repo):
3289 3290 '''Check for an unfinished operation and return the command to finish
3290 3291 it.
3291 3292
3292 3293 afterresolvedstates tuples define a .hg/{file} and the corresponding
3293 3294 command needed to finish it.
3294 3295
3295 3296 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3296 3297 a boolean.
3297 3298 '''
3298 3299 contmsg = _("continue: %s")
3299 3300 for f, msg in afterresolvedstates:
3300 3301 if repo.vfs.exists(f):
3301 3302 return contmsg % msg, True
3302 3303 if repo[None].dirty(missing=True, merge=False, branch=False):
3303 3304 return contmsg % _("hg commit"), False
3304 3305 return None, None
3305 3306
3306 3307 def checkafterresolved(repo):
3307 3308 '''Inform the user about the next action after completing hg resolve
3308 3309
3309 3310 If there's a matching afterresolvedstates, howtocontinue will yield
3310 3311 repo.ui.warn as the reporter.
3311 3312
3312 3313 Otherwise, it will yield repo.ui.note.
3313 3314 '''
3314 3315 msg, warning = howtocontinue(repo)
3315 3316 if msg is not None:
3316 3317 if warning:
3317 3318 repo.ui.warn("%s\n" % msg)
3318 3319 else:
3319 3320 repo.ui.note("%s\n" % msg)
3320 3321
3321 3322 def wrongtooltocontinue(repo, task):
3322 3323 '''Raise an abort suggesting how to properly continue if there is an
3323 3324 active task.
3324 3325
3325 3326 Uses howtocontinue() to find the active task.
3326 3327
3327 3328 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3328 3329 a hint.
3329 3330 '''
3330 3331 after = howtocontinue(repo)
3331 3332 hint = None
3332 3333 if after[1]:
3333 3334 hint = after[0]
3334 3335 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,1846 +1,1844 b''
1 1 # subrepo.py - sub-repository classes and factory
2 2 #
3 3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import copy
11 11 import errno
12 12 import hashlib
13 13 import os
14 14 import posixpath
15 15 import re
16 16 import stat
17 17 import subprocess
18 18 import sys
19 19 import tarfile
20 20 import xml.dom.minidom
21 21
22 22 from .i18n import _
23 23 from . import (
24 24 cmdutil,
25 25 encoding,
26 26 error,
27 27 exchange,
28 28 logcmdutil,
29 29 match as matchmod,
30 30 node,
31 31 pathutil,
32 32 phases,
33 33 pycompat,
34 34 scmutil,
35 35 subrepoutil,
36 36 util,
37 37 vfs as vfsmod,
38 38 )
39 39 from .utils import (
40 40 dateutil,
41 41 procutil,
42 42 stringutil,
43 43 )
44 44
45 45 hg = None
46 46 reporelpath = subrepoutil.reporelpath
47 47 subrelpath = subrepoutil.subrelpath
48 48 _abssource = subrepoutil._abssource
49 49 propertycache = util.propertycache
50 50
51 51 def _expandedabspath(path):
52 52 '''
53 53 get a path or url and if it is a path expand it and return an absolute path
54 54 '''
55 55 expandedpath = util.urllocalpath(util.expandpath(path))
56 56 u = util.url(expandedpath)
57 57 if not u.scheme:
58 58 path = util.normpath(os.path.abspath(u.path))
59 59 return path
60 60
61 61 def _getstorehashcachename(remotepath):
62 62 '''get a unique filename for the store hash cache of a remote repository'''
63 63 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
64 64
65 65 class SubrepoAbort(error.Abort):
66 66 """Exception class used to avoid handling a subrepo error more than once"""
67 67 def __init__(self, *args, **kw):
68 68 self.subrepo = kw.pop(r'subrepo', None)
69 69 self.cause = kw.pop(r'cause', None)
70 70 error.Abort.__init__(self, *args, **kw)
71 71
72 72 def annotatesubrepoerror(func):
73 73 def decoratedmethod(self, *args, **kargs):
74 74 try:
75 75 res = func(self, *args, **kargs)
76 76 except SubrepoAbort as ex:
77 77 # This exception has already been handled
78 78 raise ex
79 79 except error.Abort as ex:
80 80 subrepo = subrelpath(self)
81 81 errormsg = (stringutil.forcebytestr(ex) + ' '
82 82 + _('(in subrepository "%s")') % subrepo)
83 83 # avoid handling this exception by raising a SubrepoAbort exception
84 84 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
85 85 cause=sys.exc_info())
86 86 return res
87 87 return decoratedmethod
88 88
89 89 def _updateprompt(ui, sub, dirty, local, remote):
90 90 if dirty:
91 91 msg = (_(' subrepository sources for %s differ\n'
92 92 'use (l)ocal source (%s) or (r)emote source (%s)?'
93 93 '$$ &Local $$ &Remote')
94 94 % (subrelpath(sub), local, remote))
95 95 else:
96 96 msg = (_(' subrepository sources for %s differ (in checked out '
97 97 'version)\n'
98 98 'use (l)ocal source (%s) or (r)emote source (%s)?'
99 99 '$$ &Local $$ &Remote')
100 100 % (subrelpath(sub), local, remote))
101 101 return ui.promptchoice(msg, 0)
102 102
103 103 def _sanitize(ui, vfs, ignore):
104 104 for dirname, dirs, names in vfs.walk():
105 105 for i, d in enumerate(dirs):
106 106 if d.lower() == ignore:
107 107 del dirs[i]
108 108 break
109 109 if vfs.basename(dirname).lower() != '.hg':
110 110 continue
111 111 for f in names:
112 112 if f.lower() == 'hgrc':
113 113 ui.warn(_("warning: removing potentially hostile 'hgrc' "
114 114 "in '%s'\n") % vfs.join(dirname))
115 115 vfs.unlink(vfs.reljoin(dirname, f))
116 116
117 117 def _auditsubrepopath(repo, path):
118 118 # sanity check for potentially unsafe paths such as '~' and '$FOO'
119 119 if path.startswith('~') or '$' in path or util.expandpath(path) != path:
120 120 raise error.Abort(_('subrepo path contains illegal component: %s')
121 121 % path)
122 122 # auditor doesn't check if the path itself is a symlink
123 123 pathutil.pathauditor(repo.root)(path)
124 124 if repo.wvfs.islink(path):
125 125 raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
126 126
127 127 SUBREPO_ALLOWED_DEFAULTS = {
128 128 'hg': True,
129 129 'git': False,
130 130 'svn': False,
131 131 }
132 132
133 133 def _checktype(ui, kind):
134 134 # subrepos.allowed is a master kill switch. If disabled, subrepos are
135 135 # disabled period.
136 136 if not ui.configbool('subrepos', 'allowed', True):
137 137 raise error.Abort(_('subrepos not enabled'),
138 138 hint=_("see 'hg help config.subrepos' for details"))
139 139
140 140 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
141 141 if not ui.configbool('subrepos', '%s:allowed' % kind, default):
142 142 raise error.Abort(_('%s subrepos not allowed') % kind,
143 143 hint=_("see 'hg help config.subrepos' for details"))
144 144
145 145 if kind not in types:
146 146 raise error.Abort(_('unknown subrepo type %s') % kind)
147 147
148 148 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
149 149 """return instance of the right subrepo class for subrepo in path"""
150 150 # subrepo inherently violates our import layering rules
151 151 # because it wants to make repo objects from deep inside the stack
152 152 # so we manually delay the circular imports to not break
153 153 # scripts that don't use our demand-loading
154 154 global hg
155 155 from . import hg as h
156 156 hg = h
157 157
158 158 repo = ctx.repo()
159 159 _auditsubrepopath(repo, path)
160 160 state = ctx.substate[path]
161 161 _checktype(repo.ui, state[2])
162 162 if allowwdir:
163 163 state = (state[0], ctx.subrev(path), state[2])
164 164 return types[state[2]](ctx, path, state[:2], allowcreate)
165 165
166 166 def nullsubrepo(ctx, path, pctx):
167 167 """return an empty subrepo in pctx for the extant subrepo in ctx"""
168 168 # subrepo inherently violates our import layering rules
169 169 # because it wants to make repo objects from deep inside the stack
170 170 # so we manually delay the circular imports to not break
171 171 # scripts that don't use our demand-loading
172 172 global hg
173 173 from . import hg as h
174 174 hg = h
175 175
176 176 repo = ctx.repo()
177 177 _auditsubrepopath(repo, path)
178 178 state = ctx.substate[path]
179 179 _checktype(repo.ui, state[2])
180 180 subrev = ''
181 181 if state[2] == 'hg':
182 182 subrev = "0" * 40
183 183 return types[state[2]](pctx, path, (state[0], subrev), True)
184 184
185 185 # subrepo classes need to implement the following abstract class:
186 186
187 187 class abstractsubrepo(object):
188 188
189 189 def __init__(self, ctx, path):
190 190 """Initialize abstractsubrepo part
191 191
192 192 ``ctx`` is the context referring this subrepository in the
193 193 parent repository.
194 194
195 195 ``path`` is the path to this subrepository as seen from
196 196 innermost repository.
197 197 """
198 198 self.ui = ctx.repo().ui
199 199 self._ctx = ctx
200 200 self._path = path
201 201
202 202 def addwebdirpath(self, serverpath, webconf):
203 203 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
204 204
205 205 ``serverpath`` is the path component of the URL for this repo.
206 206
207 207 ``webconf`` is the dictionary of hgwebdir entries.
208 208 """
209 209 pass
210 210
211 211 def storeclean(self, path):
212 212 """
213 213 returns true if the repository has not changed since it was last
214 214 cloned from or pushed to a given repository.
215 215 """
216 216 return False
217 217
218 218 def dirty(self, ignoreupdate=False, missing=False):
219 219 """returns true if the dirstate of the subrepo is dirty or does not
220 220 match current stored state. If ignoreupdate is true, only check
221 221 whether the subrepo has uncommitted changes in its dirstate. If missing
222 222 is true, check for deleted files.
223 223 """
224 224 raise NotImplementedError
225 225
226 226 def dirtyreason(self, ignoreupdate=False, missing=False):
227 227 """return reason string if it is ``dirty()``
228 228
229 229 Returned string should have enough information for the message
230 230 of exception.
231 231
232 232 This returns None, otherwise.
233 233 """
234 234 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
235 235 return _('uncommitted changes in subrepository "%s"'
236 236 ) % subrelpath(self)
237 237
238 238 def bailifchanged(self, ignoreupdate=False, hint=None):
239 239 """raise Abort if subrepository is ``dirty()``
240 240 """
241 241 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
242 242 missing=True)
243 243 if dirtyreason:
244 244 raise error.Abort(dirtyreason, hint=hint)
245 245
246 246 def basestate(self):
247 247 """current working directory base state, disregarding .hgsubstate
248 248 state and working directory modifications"""
249 249 raise NotImplementedError
250 250
251 251 def checknested(self, path):
252 252 """check if path is a subrepository within this repository"""
253 253 return False
254 254
255 255 def commit(self, text, user, date):
256 256 """commit the current changes to the subrepo with the given
257 257 log message. Use given user and date if possible. Return the
258 258 new state of the subrepo.
259 259 """
260 260 raise NotImplementedError
261 261
262 262 def phase(self, state):
263 263 """returns phase of specified state in the subrepository.
264 264 """
265 265 return phases.public
266 266
267 267 def remove(self):
268 268 """remove the subrepo
269 269
270 270 (should verify the dirstate is not dirty first)
271 271 """
272 272 raise NotImplementedError
273 273
274 274 def get(self, state, overwrite=False):
275 275 """run whatever commands are needed to put the subrepo into
276 276 this state
277 277 """
278 278 raise NotImplementedError
279 279
280 280 def merge(self, state):
281 281 """merge currently-saved state with the new state."""
282 282 raise NotImplementedError
283 283
284 284 def push(self, opts):
285 285 """perform whatever action is analogous to 'hg push'
286 286
287 287 This may be a no-op on some systems.
288 288 """
289 289 raise NotImplementedError
290 290
291 291 def add(self, ui, match, prefix, explicitonly, **opts):
292 292 return []
293 293
294 294 def addremove(self, matcher, prefix, opts):
295 295 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
296 296 return 1
297 297
298 298 def cat(self, match, fm, fntemplate, prefix, **opts):
299 299 return 1
300 300
301 301 def status(self, rev2, **opts):
302 302 return scmutil.status([], [], [], [], [], [], [])
303 303
304 304 def diff(self, ui, diffopts, node2, match, prefix, **opts):
305 305 pass
306 306
307 307 def outgoing(self, ui, dest, opts):
308 308 return 1
309 309
310 310 def incoming(self, ui, source, opts):
311 311 return 1
312 312
313 313 def files(self):
314 314 """return filename iterator"""
315 315 raise NotImplementedError
316 316
317 317 def filedata(self, name, decode):
318 318 """return file data, optionally passed through repo decoders"""
319 319 raise NotImplementedError
320 320
321 321 def fileflags(self, name):
322 322 """return file flags"""
323 323 return ''
324 324
325 325 def matchfileset(self, expr, badfn=None):
326 326 """Resolve the fileset expression for this repo"""
327 327 return matchmod.nevermatcher(self.wvfs.base, '', badfn=badfn)
328 328
329 329 def printfiles(self, ui, m, fm, fmt, subrepos):
330 330 """handle the files command for this subrepo"""
331 331 return 1
332 332
333 333 def archive(self, archiver, prefix, match=None, decode=True):
334 334 if match is not None:
335 335 files = [f for f in self.files() if match(f)]
336 336 else:
337 337 files = self.files()
338 338 total = len(files)
339 339 relpath = subrelpath(self)
340 340 progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
341 341 unit=_('files'), total=total)
342 342 progress.update(0)
343 343 for name in files:
344 344 flags = self.fileflags(name)
345 345 mode = 'x' in flags and 0o755 or 0o644
346 346 symlink = 'l' in flags
347 347 archiver.addfile(prefix + self._path + '/' + name,
348 348 mode, symlink, self.filedata(name, decode))
349 349 progress.increment()
350 350 progress.complete()
351 351 return total
352 352
353 353 def walk(self, match):
354 354 '''
355 355 walk recursively through the directory tree, finding all files
356 356 matched by the match function
357 357 '''
358 358
359 359 def forget(self, match, prefix, dryrun, interactive):
360 360 return ([], [])
361 361
362 362 def removefiles(self, matcher, prefix, after, force, subrepos,
363 363 dryrun, warnings):
364 364 """remove the matched files from the subrepository and the filesystem,
365 365 possibly by force and/or after the file has been removed from the
366 366 filesystem. Return 0 on success, 1 on any warning.
367 367 """
368 368 warnings.append(_("warning: removefiles not implemented (%s)")
369 369 % self._path)
370 370 return 1
371 371
372 372 def revert(self, substate, *pats, **opts):
373 373 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
374 374 % (substate[0], substate[2]))
375 375 return []
376 376
377 377 def shortid(self, revid):
378 378 return revid
379 379
380 380 def unshare(self):
381 381 '''
382 382 convert this repository from shared to normal storage.
383 383 '''
384 384
385 385 def verify(self):
386 386 '''verify the integrity of the repository. Return 0 on success or
387 387 warning, 1 on any error.
388 388 '''
389 389 return 0
390 390
391 391 @propertycache
392 392 def wvfs(self):
393 393 """return vfs to access the working directory of this subrepository
394 394 """
395 395 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
396 396
397 397 @propertycache
398 398 def _relpath(self):
399 399 """return path to this subrepository as seen from outermost repository
400 400 """
401 401 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
402 402
403 403 class hgsubrepo(abstractsubrepo):
404 404 def __init__(self, ctx, path, state, allowcreate):
405 405 super(hgsubrepo, self).__init__(ctx, path)
406 406 self._state = state
407 407 r = ctx.repo()
408 408 root = r.wjoin(path)
409 409 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
410 410 # repository constructor does expand variables in path, which is
411 411 # unsafe since subrepo path might come from untrusted source.
412 412 if os.path.realpath(util.expandpath(root)) != root:
413 413 raise error.Abort(_('subrepo path contains illegal component: %s')
414 414 % path)
415 415 self._repo = hg.repository(r.baseui, root, create=create)
416 416 if self._repo.root != root:
417 417 raise error.ProgrammingError('failed to reject unsafe subrepo '
418 418 'path: %s (expanded to %s)'
419 419 % (root, self._repo.root))
420 420
421 421 # Propagate the parent's --hidden option
422 422 if r is r.unfiltered():
423 423 self._repo = self._repo.unfiltered()
424 424
425 425 self.ui = self._repo.ui
426 426 for s, k in [('ui', 'commitsubrepos')]:
427 427 v = r.ui.config(s, k)
428 428 if v:
429 429 self.ui.setconfig(s, k, v, 'subrepo')
430 430 # internal config: ui._usedassubrepo
431 431 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
432 432 self._initrepo(r, state[0], create)
433 433
434 434 @annotatesubrepoerror
435 435 def addwebdirpath(self, serverpath, webconf):
436 436 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
437 437
438 438 def storeclean(self, path):
439 439 with self._repo.lock():
440 440 return self._storeclean(path)
441 441
442 442 def _storeclean(self, path):
443 443 clean = True
444 444 itercache = self._calcstorehash(path)
445 445 for filehash in self._readstorehashcache(path):
446 446 if filehash != next(itercache, None):
447 447 clean = False
448 448 break
449 449 if clean:
450 450 # if not empty:
451 451 # the cached and current pull states have a different size
452 452 clean = next(itercache, None) is None
453 453 return clean
454 454
455 455 def _calcstorehash(self, remotepath):
456 456 '''calculate a unique "store hash"
457 457
458 458 This method is used to to detect when there are changes that may
459 459 require a push to a given remote path.'''
460 460 # sort the files that will be hashed in increasing (likely) file size
461 461 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
462 462 yield '# %s\n' % _expandedabspath(remotepath)
463 463 vfs = self._repo.vfs
464 464 for relname in filelist:
465 465 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
466 466 yield '%s = %s\n' % (relname, filehash)
467 467
468 468 @propertycache
469 469 def _cachestorehashvfs(self):
470 470 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
471 471
472 472 def _readstorehashcache(self, remotepath):
473 473 '''read the store hash cache for a given remote repository'''
474 474 cachefile = _getstorehashcachename(remotepath)
475 475 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
476 476
477 477 def _cachestorehash(self, remotepath):
478 478 '''cache the current store hash
479 479
480 480 Each remote repo requires its own store hash cache, because a subrepo
481 481 store may be "clean" versus a given remote repo, but not versus another
482 482 '''
483 483 cachefile = _getstorehashcachename(remotepath)
484 484 with self._repo.lock():
485 485 storehash = list(self._calcstorehash(remotepath))
486 486 vfs = self._cachestorehashvfs
487 487 vfs.writelines(cachefile, storehash, mode='wb', notindexed=True)
488 488
489 489 def _getctx(self):
490 490 '''fetch the context for this subrepo revision, possibly a workingctx
491 491 '''
492 492 if self._ctx.rev() is None:
493 493 return self._repo[None] # workingctx if parent is workingctx
494 494 else:
495 495 rev = self._state[1]
496 496 return self._repo[rev]
497 497
498 498 @annotatesubrepoerror
499 499 def _initrepo(self, parentrepo, source, create):
500 500 self._repo._subparent = parentrepo
501 501 self._repo._subsource = source
502 502
503 503 if create:
504 504 lines = ['[paths]\n']
505 505
506 506 def addpathconfig(key, value):
507 507 if value:
508 508 lines.append('%s = %s\n' % (key, value))
509 509 self.ui.setconfig('paths', key, value, 'subrepo')
510 510
511 511 defpath = _abssource(self._repo, abort=False)
512 512 defpushpath = _abssource(self._repo, True, abort=False)
513 513 addpathconfig('default', defpath)
514 514 if defpath != defpushpath:
515 515 addpathconfig('default-push', defpushpath)
516 516
517 517 self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines)))
518 518
519 519 @annotatesubrepoerror
520 520 def add(self, ui, match, prefix, explicitonly, **opts):
521 return cmdutil.add(ui, self._repo, match,
522 self.wvfs.reljoin(prefix, self._path),
523 explicitonly, **opts)
521 return cmdutil.add(ui, self._repo, match, prefix, explicitonly, **opts)
524 522
525 523 @annotatesubrepoerror
526 524 def addremove(self, m, prefix, opts):
527 525 # In the same way as sub directories are processed, once in a subrepo,
528 526 # always entry any of its subrepos. Don't corrupt the options that will
529 527 # be used to process sibling subrepos however.
530 528 opts = copy.copy(opts)
531 529 opts['subrepos'] = True
532 530 return scmutil.addremove(self._repo, m,
533 531 self.wvfs.reljoin(prefix, self._path), opts)
534 532
535 533 @annotatesubrepoerror
536 534 def cat(self, match, fm, fntemplate, prefix, **opts):
537 535 rev = self._state[1]
538 536 ctx = self._repo[rev]
539 537 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
540 538 prefix, **opts)
541 539
542 540 @annotatesubrepoerror
543 541 def status(self, rev2, **opts):
544 542 try:
545 543 rev1 = self._state[1]
546 544 ctx1 = self._repo[rev1]
547 545 ctx2 = self._repo[rev2]
548 546 return self._repo.status(ctx1, ctx2, **opts)
549 547 except error.RepoLookupError as inst:
550 548 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
551 549 % (inst, subrelpath(self)))
552 550 return scmutil.status([], [], [], [], [], [], [])
553 551
554 552 @annotatesubrepoerror
555 553 def diff(self, ui, diffopts, node2, match, prefix, **opts):
556 554 try:
557 555 node1 = node.bin(self._state[1])
558 556 # We currently expect node2 to come from substate and be
559 557 # in hex format
560 558 if node2 is not None:
561 559 node2 = node.bin(node2)
562 560 logcmdutil.diffordiffstat(ui, self._repo, diffopts,
563 561 node1, node2, match,
564 562 prefix=posixpath.join(prefix, self._path),
565 563 listsubrepos=True, **opts)
566 564 except error.RepoLookupError as inst:
567 565 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
568 566 % (inst, subrelpath(self)))
569 567
570 568 @annotatesubrepoerror
571 569 def archive(self, archiver, prefix, match=None, decode=True):
572 570 self._get(self._state + ('hg',))
573 571 files = self.files()
574 572 if match:
575 573 files = [f for f in files if match(f)]
576 574 rev = self._state[1]
577 575 ctx = self._repo[rev]
578 576 scmutil.prefetchfiles(self._repo, [ctx.rev()],
579 577 scmutil.matchfiles(self._repo, files))
580 578 total = abstractsubrepo.archive(self, archiver, prefix, match)
581 579 for subpath in ctx.substate:
582 580 s = subrepo(ctx, subpath, True)
583 581 submatch = matchmod.subdirmatcher(subpath, match)
584 582 total += s.archive(archiver, prefix + self._path + '/', submatch,
585 583 decode)
586 584 return total
587 585
588 586 @annotatesubrepoerror
589 587 def dirty(self, ignoreupdate=False, missing=False):
590 588 r = self._state[1]
591 589 if r == '' and not ignoreupdate: # no state recorded
592 590 return True
593 591 w = self._repo[None]
594 592 if r != w.p1().hex() and not ignoreupdate:
595 593 # different version checked out
596 594 return True
597 595 return w.dirty(missing=missing) # working directory changed
598 596
599 597 def basestate(self):
600 598 return self._repo['.'].hex()
601 599
602 600 def checknested(self, path):
603 601 return self._repo._checknested(self._repo.wjoin(path))
604 602
605 603 @annotatesubrepoerror
606 604 def commit(self, text, user, date):
607 605 # don't bother committing in the subrepo if it's only been
608 606 # updated
609 607 if not self.dirty(True):
610 608 return self._repo['.'].hex()
611 609 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
612 610 n = self._repo.commit(text, user, date)
613 611 if not n:
614 612 return self._repo['.'].hex() # different version checked out
615 613 return node.hex(n)
616 614
617 615 @annotatesubrepoerror
618 616 def phase(self, state):
619 617 return self._repo[state or '.'].phase()
620 618
621 619 @annotatesubrepoerror
622 620 def remove(self):
623 621 # we can't fully delete the repository as it may contain
624 622 # local-only history
625 623 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
626 624 hg.clean(self._repo, node.nullid, False)
627 625
628 626 def _get(self, state):
629 627 source, revision, kind = state
630 628 parentrepo = self._repo._subparent
631 629
632 630 if revision in self._repo.unfiltered():
633 631 # Allow shared subrepos tracked at null to setup the sharedpath
634 632 if len(self._repo) != 0 or not parentrepo.shared():
635 633 return True
636 634 self._repo._subsource = source
637 635 srcurl = _abssource(self._repo)
638 636
639 637 # Defer creating the peer until after the status message is logged, in
640 638 # case there are network problems.
641 639 getpeer = lambda: hg.peer(self._repo, {}, srcurl)
642 640
643 641 if len(self._repo) == 0:
644 642 # use self._repo.vfs instead of self.wvfs to remove .hg only
645 643 self._repo.vfs.rmtree()
646 644
647 645 # A remote subrepo could be shared if there is a local copy
648 646 # relative to the parent's share source. But clone pooling doesn't
649 647 # assemble the repos in a tree, so that can't be consistently done.
650 648 # A simpler option is for the user to configure clone pooling, and
651 649 # work with that.
652 650 if parentrepo.shared() and hg.islocal(srcurl):
653 651 self.ui.status(_('sharing subrepo %s from %s\n')
654 652 % (subrelpath(self), srcurl))
655 653 shared = hg.share(self._repo._subparent.baseui,
656 654 getpeer(), self._repo.root,
657 655 update=False, bookmarks=False)
658 656 self._repo = shared.local()
659 657 else:
660 658 # TODO: find a common place for this and this code in the
661 659 # share.py wrap of the clone command.
662 660 if parentrepo.shared():
663 661 pool = self.ui.config('share', 'pool')
664 662 if pool:
665 663 pool = util.expandpath(pool)
666 664
667 665 shareopts = {
668 666 'pool': pool,
669 667 'mode': self.ui.config('share', 'poolnaming'),
670 668 }
671 669 else:
672 670 shareopts = {}
673 671
674 672 self.ui.status(_('cloning subrepo %s from %s\n')
675 673 % (subrelpath(self), util.hidepassword(srcurl)))
676 674 other, cloned = hg.clone(self._repo._subparent.baseui, {},
677 675 getpeer(), self._repo.root,
678 676 update=False, shareopts=shareopts)
679 677 self._repo = cloned.local()
680 678 self._initrepo(parentrepo, source, create=True)
681 679 self._cachestorehash(srcurl)
682 680 else:
683 681 self.ui.status(_('pulling subrepo %s from %s\n')
684 682 % (subrelpath(self), util.hidepassword(srcurl)))
685 683 cleansub = self.storeclean(srcurl)
686 684 exchange.pull(self._repo, getpeer())
687 685 if cleansub:
688 686 # keep the repo clean after pull
689 687 self._cachestorehash(srcurl)
690 688 return False
691 689
692 690 @annotatesubrepoerror
693 691 def get(self, state, overwrite=False):
694 692 inrepo = self._get(state)
695 693 source, revision, kind = state
696 694 repo = self._repo
697 695 repo.ui.debug("getting subrepo %s\n" % self._path)
698 696 if inrepo:
699 697 urepo = repo.unfiltered()
700 698 ctx = urepo[revision]
701 699 if ctx.hidden():
702 700 urepo.ui.warn(
703 701 _('revision %s in subrepository "%s" is hidden\n') \
704 702 % (revision[0:12], self._path))
705 703 repo = urepo
706 704 hg.updaterepo(repo, revision, overwrite)
707 705
708 706 @annotatesubrepoerror
709 707 def merge(self, state):
710 708 self._get(state)
711 709 cur = self._repo['.']
712 710 dst = self._repo[state[1]]
713 711 anc = dst.ancestor(cur)
714 712
715 713 def mergefunc():
716 714 if anc == cur and dst.branch() == cur.branch():
717 715 self.ui.debug('updating subrepository "%s"\n'
718 716 % subrelpath(self))
719 717 hg.update(self._repo, state[1])
720 718 elif anc == dst:
721 719 self.ui.debug('skipping subrepository "%s"\n'
722 720 % subrelpath(self))
723 721 else:
724 722 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
725 723 hg.merge(self._repo, state[1], remind=False)
726 724
727 725 wctx = self._repo[None]
728 726 if self.dirty():
729 727 if anc != dst:
730 728 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
731 729 mergefunc()
732 730 else:
733 731 mergefunc()
734 732 else:
735 733 mergefunc()
736 734
737 735 @annotatesubrepoerror
738 736 def push(self, opts):
739 737 force = opts.get('force')
740 738 newbranch = opts.get('new_branch')
741 739 ssh = opts.get('ssh')
742 740
743 741 # push subrepos depth-first for coherent ordering
744 742 c = self._repo['.']
745 743 subs = c.substate # only repos that are committed
746 744 for s in sorted(subs):
747 745 if c.sub(s).push(opts) == 0:
748 746 return False
749 747
750 748 dsturl = _abssource(self._repo, True)
751 749 if not force:
752 750 if self.storeclean(dsturl):
753 751 self.ui.status(
754 752 _('no changes made to subrepo %s since last push to %s\n')
755 753 % (subrelpath(self), util.hidepassword(dsturl)))
756 754 return None
757 755 self.ui.status(_('pushing subrepo %s to %s\n') %
758 756 (subrelpath(self), util.hidepassword(dsturl)))
759 757 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
760 758 res = exchange.push(self._repo, other, force, newbranch=newbranch)
761 759
762 760 # the repo is now clean
763 761 self._cachestorehash(dsturl)
764 762 return res.cgresult
765 763
766 764 @annotatesubrepoerror
767 765 def outgoing(self, ui, dest, opts):
768 766 if 'rev' in opts or 'branch' in opts:
769 767 opts = copy.copy(opts)
770 768 opts.pop('rev', None)
771 769 opts.pop('branch', None)
772 770 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
773 771
774 772 @annotatesubrepoerror
775 773 def incoming(self, ui, source, opts):
776 774 if 'rev' in opts or 'branch' in opts:
777 775 opts = copy.copy(opts)
778 776 opts.pop('rev', None)
779 777 opts.pop('branch', None)
780 778 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
781 779
782 780 @annotatesubrepoerror
783 781 def files(self):
784 782 rev = self._state[1]
785 783 ctx = self._repo[rev]
786 784 return ctx.manifest().keys()
787 785
788 786 def filedata(self, name, decode):
789 787 rev = self._state[1]
790 788 data = self._repo[rev][name].data()
791 789 if decode:
792 790 data = self._repo.wwritedata(name, data)
793 791 return data
794 792
795 793 def fileflags(self, name):
796 794 rev = self._state[1]
797 795 ctx = self._repo[rev]
798 796 return ctx.flags(name)
799 797
800 798 @annotatesubrepoerror
801 799 def printfiles(self, ui, m, fm, fmt, subrepos):
802 800 # If the parent context is a workingctx, use the workingctx here for
803 801 # consistency.
804 802 if self._ctx.rev() is None:
805 803 ctx = self._repo[None]
806 804 else:
807 805 rev = self._state[1]
808 806 ctx = self._repo[rev]
809 807 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
810 808
811 809 @annotatesubrepoerror
812 810 def matchfileset(self, expr, badfn=None):
813 811 repo = self._repo
814 812 if self._ctx.rev() is None:
815 813 ctx = repo[None]
816 814 else:
817 815 rev = self._state[1]
818 816 ctx = repo[rev]
819 817
820 818 matchers = [ctx.matchfileset(expr, badfn=badfn)]
821 819
822 820 for subpath in ctx.substate:
823 821 sub = ctx.sub(subpath)
824 822
825 823 try:
826 824 sm = sub.matchfileset(expr, badfn=badfn)
827 825 pm = matchmod.prefixdirmatcher(repo.root, repo.getcwd(),
828 826 subpath, sm, badfn=badfn)
829 827 matchers.append(pm)
830 828 except error.LookupError:
831 829 self.ui.status(_("skipping missing subrepository: %s\n")
832 830 % self.wvfs.reljoin(reporelpath(self), subpath))
833 831 if len(matchers) == 1:
834 832 return matchers[0]
835 833 return matchmod.unionmatcher(matchers)
836 834
837 835 def walk(self, match):
838 836 ctx = self._repo[None]
839 837 return ctx.walk(match)
840 838
841 839 @annotatesubrepoerror
842 840 def forget(self, match, prefix, dryrun, interactive):
843 841 return cmdutil.forget(self.ui, self._repo, match, prefix,
844 842 True, dryrun=dryrun, interactive=interactive)
845 843
846 844 @annotatesubrepoerror
847 845 def removefiles(self, matcher, prefix, after, force, subrepos,
848 846 dryrun, warnings):
849 847 return cmdutil.remove(self.ui, self._repo, matcher, prefix,
850 848 after, force, subrepos, dryrun)
851 849
852 850 @annotatesubrepoerror
853 851 def revert(self, substate, *pats, **opts):
854 852 # reverting a subrepo is a 2 step process:
855 853 # 1. if the no_backup is not set, revert all modified
856 854 # files inside the subrepo
857 855 # 2. update the subrepo to the revision specified in
858 856 # the corresponding substate dictionary
859 857 self.ui.status(_('reverting subrepo %s\n') % substate[0])
860 858 if not opts.get(r'no_backup'):
861 859 # Revert all files on the subrepo, creating backups
862 860 # Note that this will not recursively revert subrepos
863 861 # We could do it if there was a set:subrepos() predicate
864 862 opts = opts.copy()
865 863 opts[r'date'] = None
866 864 opts[r'rev'] = substate[1]
867 865
868 866 self.filerevert(*pats, **opts)
869 867
870 868 # Update the repo to the revision specified in the given substate
871 869 if not opts.get(r'dry_run'):
872 870 self.get(substate, overwrite=True)
873 871
874 872 def filerevert(self, *pats, **opts):
875 873 ctx = self._repo[opts[r'rev']]
876 874 parents = self._repo.dirstate.parents()
877 875 if opts.get(r'all'):
878 876 pats = ['set:modified()']
879 877 else:
880 878 pats = []
881 879 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
882 880
883 881 def shortid(self, revid):
884 882 return revid[:12]
885 883
886 884 @annotatesubrepoerror
887 885 def unshare(self):
888 886 # subrepo inherently violates our import layering rules
889 887 # because it wants to make repo objects from deep inside the stack
890 888 # so we manually delay the circular imports to not break
891 889 # scripts that don't use our demand-loading
892 890 global hg
893 891 from . import hg as h
894 892 hg = h
895 893
896 894 # Nothing prevents a user from sharing in a repo, and then making that a
897 895 # subrepo. Alternately, the previous unshare attempt may have failed
898 896 # part way through. So recurse whether or not this layer is shared.
899 897 if self._repo.shared():
900 898 self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
901 899
902 900 hg.unshare(self.ui, self._repo)
903 901
904 902 def verify(self):
905 903 try:
906 904 rev = self._state[1]
907 905 ctx = self._repo.unfiltered()[rev]
908 906 if ctx.hidden():
909 907 # Since hidden revisions aren't pushed/pulled, it seems worth an
910 908 # explicit warning.
911 909 ui = self._repo.ui
912 910 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
913 911 (self._relpath, node.short(self._ctx.node())))
914 912 return 0
915 913 except error.RepoLookupError:
916 914 # A missing subrepo revision may be a case of needing to pull it, so
917 915 # don't treat this as an error.
918 916 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
919 917 (self._relpath, node.short(self._ctx.node())))
920 918 return 0
921 919
922 920 @propertycache
923 921 def wvfs(self):
924 922 """return own wvfs for efficiency and consistency
925 923 """
926 924 return self._repo.wvfs
927 925
928 926 @propertycache
929 927 def _relpath(self):
930 928 """return path to this subrepository as seen from outermost repository
931 929 """
932 930 # Keep consistent dir separators by avoiding vfs.join(self._path)
933 931 return reporelpath(self._repo)
934 932
935 933 class svnsubrepo(abstractsubrepo):
936 934 def __init__(self, ctx, path, state, allowcreate):
937 935 super(svnsubrepo, self).__init__(ctx, path)
938 936 self._state = state
939 937 self._exe = procutil.findexe('svn')
940 938 if not self._exe:
941 939 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
942 940 % self._path)
943 941
944 942 def _svncommand(self, commands, filename='', failok=False):
945 943 cmd = [self._exe]
946 944 extrakw = {}
947 945 if not self.ui.interactive():
948 946 # Making stdin be a pipe should prevent svn from behaving
949 947 # interactively even if we can't pass --non-interactive.
950 948 extrakw[r'stdin'] = subprocess.PIPE
951 949 # Starting in svn 1.5 --non-interactive is a global flag
952 950 # instead of being per-command, but we need to support 1.4 so
953 951 # we have to be intelligent about what commands take
954 952 # --non-interactive.
955 953 if commands[0] in ('update', 'checkout', 'commit'):
956 954 cmd.append('--non-interactive')
957 955 cmd.extend(commands)
958 956 if filename is not None:
959 957 path = self.wvfs.reljoin(self._ctx.repo().origroot,
960 958 self._path, filename)
961 959 cmd.append(path)
962 960 env = dict(encoding.environ)
963 961 # Avoid localized output, preserve current locale for everything else.
964 962 lc_all = env.get('LC_ALL')
965 963 if lc_all:
966 964 env['LANG'] = lc_all
967 965 del env['LC_ALL']
968 966 env['LC_MESSAGES'] = 'C'
969 967 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmd),
970 968 bufsize=-1, close_fds=procutil.closefds,
971 969 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
972 970 env=procutil.tonativeenv(env), **extrakw)
973 971 stdout, stderr = map(util.fromnativeeol, p.communicate())
974 972 stderr = stderr.strip()
975 973 if not failok:
976 974 if p.returncode:
977 975 raise error.Abort(stderr or 'exited with code %d'
978 976 % p.returncode)
979 977 if stderr:
980 978 self.ui.warn(stderr + '\n')
981 979 return stdout, stderr
982 980
983 981 @propertycache
984 982 def _svnversion(self):
985 983 output, err = self._svncommand(['--version', '--quiet'], filename=None)
986 984 m = re.search(br'^(\d+)\.(\d+)', output)
987 985 if not m:
988 986 raise error.Abort(_('cannot retrieve svn tool version'))
989 987 return (int(m.group(1)), int(m.group(2)))
990 988
991 989 def _svnmissing(self):
992 990 return not self.wvfs.exists('.svn')
993 991
994 992 def _wcrevs(self):
995 993 # Get the working directory revision as well as the last
996 994 # commit revision so we can compare the subrepo state with
997 995 # both. We used to store the working directory one.
998 996 output, err = self._svncommand(['info', '--xml'])
999 997 doc = xml.dom.minidom.parseString(output)
1000 998 entries = doc.getElementsByTagName(r'entry')
1001 999 lastrev, rev = '0', '0'
1002 1000 if entries:
1003 1001 rev = pycompat.bytestr(entries[0].getAttribute(r'revision')) or '0'
1004 1002 commits = entries[0].getElementsByTagName(r'commit')
1005 1003 if commits:
1006 1004 lastrev = pycompat.bytestr(
1007 1005 commits[0].getAttribute(r'revision')) or '0'
1008 1006 return (lastrev, rev)
1009 1007
1010 1008 def _wcrev(self):
1011 1009 return self._wcrevs()[0]
1012 1010
1013 1011 def _wcchanged(self):
1014 1012 """Return (changes, extchanges, missing) where changes is True
1015 1013 if the working directory was changed, extchanges is
1016 1014 True if any of these changes concern an external entry and missing
1017 1015 is True if any change is a missing entry.
1018 1016 """
1019 1017 output, err = self._svncommand(['status', '--xml'])
1020 1018 externals, changes, missing = [], [], []
1021 1019 doc = xml.dom.minidom.parseString(output)
1022 1020 for e in doc.getElementsByTagName(r'entry'):
1023 1021 s = e.getElementsByTagName(r'wc-status')
1024 1022 if not s:
1025 1023 continue
1026 1024 item = s[0].getAttribute(r'item')
1027 1025 props = s[0].getAttribute(r'props')
1028 1026 path = e.getAttribute(r'path').encode('utf8')
1029 1027 if item == r'external':
1030 1028 externals.append(path)
1031 1029 elif item == r'missing':
1032 1030 missing.append(path)
1033 1031 if (item not in (r'', r'normal', r'unversioned', r'external')
1034 1032 or props not in (r'', r'none', r'normal')):
1035 1033 changes.append(path)
1036 1034 for path in changes:
1037 1035 for ext in externals:
1038 1036 if path == ext or path.startswith(ext + pycompat.ossep):
1039 1037 return True, True, bool(missing)
1040 1038 return bool(changes), False, bool(missing)
1041 1039
1042 1040 @annotatesubrepoerror
1043 1041 def dirty(self, ignoreupdate=False, missing=False):
1044 1042 if self._svnmissing():
1045 1043 return self._state[1] != ''
1046 1044 wcchanged = self._wcchanged()
1047 1045 changed = wcchanged[0] or (missing and wcchanged[2])
1048 1046 if not changed:
1049 1047 if self._state[1] in self._wcrevs() or ignoreupdate:
1050 1048 return False
1051 1049 return True
1052 1050
1053 1051 def basestate(self):
1054 1052 lastrev, rev = self._wcrevs()
1055 1053 if lastrev != rev:
1056 1054 # Last committed rev is not the same than rev. We would
1057 1055 # like to take lastrev but we do not know if the subrepo
1058 1056 # URL exists at lastrev. Test it and fallback to rev it
1059 1057 # is not there.
1060 1058 try:
1061 1059 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1062 1060 return lastrev
1063 1061 except error.Abort:
1064 1062 pass
1065 1063 return rev
1066 1064
1067 1065 @annotatesubrepoerror
1068 1066 def commit(self, text, user, date):
1069 1067 # user and date are out of our hands since svn is centralized
1070 1068 changed, extchanged, missing = self._wcchanged()
1071 1069 if not changed:
1072 1070 return self.basestate()
1073 1071 if extchanged:
1074 1072 # Do not try to commit externals
1075 1073 raise error.Abort(_('cannot commit svn externals'))
1076 1074 if missing:
1077 1075 # svn can commit with missing entries but aborting like hg
1078 1076 # seems a better approach.
1079 1077 raise error.Abort(_('cannot commit missing svn entries'))
1080 1078 commitinfo, err = self._svncommand(['commit', '-m', text])
1081 1079 self.ui.status(commitinfo)
1082 1080 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1083 1081 if not newrev:
1084 1082 if not commitinfo.strip():
1085 1083 # Sometimes, our definition of "changed" differs from
1086 1084 # svn one. For instance, svn ignores missing files
1087 1085 # when committing. If there are only missing files, no
1088 1086 # commit is made, no output and no error code.
1089 1087 raise error.Abort(_('failed to commit svn changes'))
1090 1088 raise error.Abort(commitinfo.splitlines()[-1])
1091 1089 newrev = newrev.groups()[0]
1092 1090 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1093 1091 return newrev
1094 1092
1095 1093 @annotatesubrepoerror
1096 1094 def remove(self):
1097 1095 if self.dirty():
1098 1096 self.ui.warn(_('not removing repo %s because '
1099 1097 'it has changes.\n') % self._path)
1100 1098 return
1101 1099 self.ui.note(_('removing subrepo %s\n') % self._path)
1102 1100
1103 1101 self.wvfs.rmtree(forcibly=True)
1104 1102 try:
1105 1103 pwvfs = self._ctx.repo().wvfs
1106 1104 pwvfs.removedirs(pwvfs.dirname(self._path))
1107 1105 except OSError:
1108 1106 pass
1109 1107
1110 1108 @annotatesubrepoerror
1111 1109 def get(self, state, overwrite=False):
1112 1110 if overwrite:
1113 1111 self._svncommand(['revert', '--recursive'])
1114 1112 args = ['checkout']
1115 1113 if self._svnversion >= (1, 5):
1116 1114 args.append('--force')
1117 1115 # The revision must be specified at the end of the URL to properly
1118 1116 # update to a directory which has since been deleted and recreated.
1119 1117 args.append('%s@%s' % (state[0], state[1]))
1120 1118
1121 1119 # SEC: check that the ssh url is safe
1122 1120 util.checksafessh(state[0])
1123 1121
1124 1122 status, err = self._svncommand(args, failok=True)
1125 1123 _sanitize(self.ui, self.wvfs, '.svn')
1126 1124 if not re.search('Checked out revision [0-9]+.', status):
1127 1125 if ('is already a working copy for a different URL' in err
1128 1126 and (self._wcchanged()[:2] == (False, False))):
1129 1127 # obstructed but clean working copy, so just blow it away.
1130 1128 self.remove()
1131 1129 self.get(state, overwrite=False)
1132 1130 return
1133 1131 raise error.Abort((status or err).splitlines()[-1])
1134 1132 self.ui.status(status)
1135 1133
1136 1134 @annotatesubrepoerror
1137 1135 def merge(self, state):
1138 1136 old = self._state[1]
1139 1137 new = state[1]
1140 1138 wcrev = self._wcrev()
1141 1139 if new != wcrev:
1142 1140 dirty = old == wcrev or self._wcchanged()[0]
1143 1141 if _updateprompt(self.ui, self, dirty, wcrev, new):
1144 1142 self.get(state, False)
1145 1143
1146 1144 def push(self, opts):
1147 1145 # push is a no-op for SVN
1148 1146 return True
1149 1147
1150 1148 @annotatesubrepoerror
1151 1149 def files(self):
1152 1150 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1153 1151 doc = xml.dom.minidom.parseString(output)
1154 1152 paths = []
1155 1153 for e in doc.getElementsByTagName(r'entry'):
1156 1154 kind = pycompat.bytestr(e.getAttribute(r'kind'))
1157 1155 if kind != 'file':
1158 1156 continue
1159 1157 name = r''.join(c.data for c
1160 1158 in e.getElementsByTagName(r'name')[0].childNodes
1161 1159 if c.nodeType == c.TEXT_NODE)
1162 1160 paths.append(name.encode('utf8'))
1163 1161 return paths
1164 1162
1165 1163 def filedata(self, name, decode):
1166 1164 return self._svncommand(['cat'], name)[0]
1167 1165
1168 1166
1169 1167 class gitsubrepo(abstractsubrepo):
1170 1168 def __init__(self, ctx, path, state, allowcreate):
1171 1169 super(gitsubrepo, self).__init__(ctx, path)
1172 1170 self._state = state
1173 1171 self._abspath = ctx.repo().wjoin(path)
1174 1172 self._subparent = ctx.repo()
1175 1173 self._ensuregit()
1176 1174
1177 1175 def _ensuregit(self):
1178 1176 try:
1179 1177 self._gitexecutable = 'git'
1180 1178 out, err = self._gitnodir(['--version'])
1181 1179 except OSError as e:
1182 1180 genericerror = _("error executing git for subrepo '%s': %s")
1183 1181 notfoundhint = _("check git is installed and in your PATH")
1184 1182 if e.errno != errno.ENOENT:
1185 1183 raise error.Abort(genericerror % (
1186 1184 self._path, encoding.strtolocal(e.strerror)))
1187 1185 elif pycompat.iswindows:
1188 1186 try:
1189 1187 self._gitexecutable = 'git.cmd'
1190 1188 out, err = self._gitnodir(['--version'])
1191 1189 except OSError as e2:
1192 1190 if e2.errno == errno.ENOENT:
1193 1191 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1194 1192 " for subrepo '%s'") % self._path,
1195 1193 hint=notfoundhint)
1196 1194 else:
1197 1195 raise error.Abort(genericerror % (self._path,
1198 1196 encoding.strtolocal(e2.strerror)))
1199 1197 else:
1200 1198 raise error.Abort(_("couldn't find git for subrepo '%s'")
1201 1199 % self._path, hint=notfoundhint)
1202 1200 versionstatus = self._checkversion(out)
1203 1201 if versionstatus == 'unknown':
1204 1202 self.ui.warn(_('cannot retrieve git version\n'))
1205 1203 elif versionstatus == 'abort':
1206 1204 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1207 1205 elif versionstatus == 'warning':
1208 1206 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1209 1207
1210 1208 @staticmethod
1211 1209 def _gitversion(out):
1212 1210 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1213 1211 if m:
1214 1212 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1215 1213
1216 1214 m = re.search(br'^git version (\d+)\.(\d+)', out)
1217 1215 if m:
1218 1216 return (int(m.group(1)), int(m.group(2)), 0)
1219 1217
1220 1218 return -1
1221 1219
1222 1220 @staticmethod
1223 1221 def _checkversion(out):
1224 1222 '''ensure git version is new enough
1225 1223
1226 1224 >>> _checkversion = gitsubrepo._checkversion
1227 1225 >>> _checkversion(b'git version 1.6.0')
1228 1226 'ok'
1229 1227 >>> _checkversion(b'git version 1.8.5')
1230 1228 'ok'
1231 1229 >>> _checkversion(b'git version 1.4.0')
1232 1230 'abort'
1233 1231 >>> _checkversion(b'git version 1.5.0')
1234 1232 'warning'
1235 1233 >>> _checkversion(b'git version 1.9-rc0')
1236 1234 'ok'
1237 1235 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1238 1236 'ok'
1239 1237 >>> _checkversion(b'git version 1.9.0.GIT')
1240 1238 'ok'
1241 1239 >>> _checkversion(b'git version 12345')
1242 1240 'unknown'
1243 1241 >>> _checkversion(b'no')
1244 1242 'unknown'
1245 1243 '''
1246 1244 version = gitsubrepo._gitversion(out)
1247 1245 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1248 1246 # despite the docstring comment. For now, error on 1.4.0, warn on
1249 1247 # 1.5.0 but attempt to continue.
1250 1248 if version == -1:
1251 1249 return 'unknown'
1252 1250 if version < (1, 5, 0):
1253 1251 return 'abort'
1254 1252 elif version < (1, 6, 0):
1255 1253 return 'warning'
1256 1254 return 'ok'
1257 1255
1258 1256 def _gitcommand(self, commands, env=None, stream=False):
1259 1257 return self._gitdir(commands, env=env, stream=stream)[0]
1260 1258
1261 1259 def _gitdir(self, commands, env=None, stream=False):
1262 1260 return self._gitnodir(commands, env=env, stream=stream,
1263 1261 cwd=self._abspath)
1264 1262
1265 1263 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1266 1264 """Calls the git command
1267 1265
1268 1266 The methods tries to call the git command. versions prior to 1.6.0
1269 1267 are not supported and very probably fail.
1270 1268 """
1271 1269 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1272 1270 if env is None:
1273 1271 env = encoding.environ.copy()
1274 1272 # disable localization for Git output (issue5176)
1275 1273 env['LC_ALL'] = 'C'
1276 1274 # fix for Git CVE-2015-7545
1277 1275 if 'GIT_ALLOW_PROTOCOL' not in env:
1278 1276 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1279 1277 # unless ui.quiet is set, print git's stderr,
1280 1278 # which is mostly progress and useful info
1281 1279 errpipe = None
1282 1280 if self.ui.quiet:
1283 1281 errpipe = open(os.devnull, 'w')
1284 1282 if self.ui._colormode and len(commands) and commands[0] == "diff":
1285 1283 # insert the argument in the front,
1286 1284 # the end of git diff arguments is used for paths
1287 1285 commands.insert(1, '--color')
1288 1286 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr,
1289 1287 [self._gitexecutable] + commands),
1290 1288 bufsize=-1,
1291 1289 cwd=pycompat.rapply(procutil.tonativestr, cwd),
1292 1290 env=procutil.tonativeenv(env),
1293 1291 close_fds=procutil.closefds,
1294 1292 stdout=subprocess.PIPE, stderr=errpipe)
1295 1293 if stream:
1296 1294 return p.stdout, None
1297 1295
1298 1296 retdata = p.stdout.read().strip()
1299 1297 # wait for the child to exit to avoid race condition.
1300 1298 p.wait()
1301 1299
1302 1300 if p.returncode != 0 and p.returncode != 1:
1303 1301 # there are certain error codes that are ok
1304 1302 command = commands[0]
1305 1303 if command in ('cat-file', 'symbolic-ref'):
1306 1304 return retdata, p.returncode
1307 1305 # for all others, abort
1308 1306 raise error.Abort(_('git %s error %d in %s') %
1309 1307 (command, p.returncode, self._relpath))
1310 1308
1311 1309 return retdata, p.returncode
1312 1310
1313 1311 def _gitmissing(self):
1314 1312 return not self.wvfs.exists('.git')
1315 1313
1316 1314 def _gitstate(self):
1317 1315 return self._gitcommand(['rev-parse', 'HEAD'])
1318 1316
1319 1317 def _gitcurrentbranch(self):
1320 1318 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1321 1319 if err:
1322 1320 current = None
1323 1321 return current
1324 1322
1325 1323 def _gitremote(self, remote):
1326 1324 out = self._gitcommand(['remote', 'show', '-n', remote])
1327 1325 line = out.split('\n')[1]
1328 1326 i = line.index('URL: ') + len('URL: ')
1329 1327 return line[i:]
1330 1328
1331 1329 def _githavelocally(self, revision):
1332 1330 out, code = self._gitdir(['cat-file', '-e', revision])
1333 1331 return code == 0
1334 1332
1335 1333 def _gitisancestor(self, r1, r2):
1336 1334 base = self._gitcommand(['merge-base', r1, r2])
1337 1335 return base == r1
1338 1336
1339 1337 def _gitisbare(self):
1340 1338 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1341 1339
1342 1340 def _gitupdatestat(self):
1343 1341 """This must be run before git diff-index.
1344 1342 diff-index only looks at changes to file stat;
1345 1343 this command looks at file contents and updates the stat."""
1346 1344 self._gitcommand(['update-index', '-q', '--refresh'])
1347 1345
1348 1346 def _gitbranchmap(self):
1349 1347 '''returns 2 things:
1350 1348 a map from git branch to revision
1351 1349 a map from revision to branches'''
1352 1350 branch2rev = {}
1353 1351 rev2branch = {}
1354 1352
1355 1353 out = self._gitcommand(['for-each-ref', '--format',
1356 1354 '%(objectname) %(refname)'])
1357 1355 for line in out.split('\n'):
1358 1356 revision, ref = line.split(' ')
1359 1357 if (not ref.startswith('refs/heads/') and
1360 1358 not ref.startswith('refs/remotes/')):
1361 1359 continue
1362 1360 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1363 1361 continue # ignore remote/HEAD redirects
1364 1362 branch2rev[ref] = revision
1365 1363 rev2branch.setdefault(revision, []).append(ref)
1366 1364 return branch2rev, rev2branch
1367 1365
1368 1366 def _gittracking(self, branches):
1369 1367 'return map of remote branch to local tracking branch'
1370 1368 # assumes no more than one local tracking branch for each remote
1371 1369 tracking = {}
1372 1370 for b in branches:
1373 1371 if b.startswith('refs/remotes/'):
1374 1372 continue
1375 1373 bname = b.split('/', 2)[2]
1376 1374 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1377 1375 if remote:
1378 1376 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1379 1377 tracking['refs/remotes/%s/%s' %
1380 1378 (remote, ref.split('/', 2)[2])] = b
1381 1379 return tracking
1382 1380
1383 1381 def _abssource(self, source):
1384 1382 if '://' not in source:
1385 1383 # recognize the scp syntax as an absolute source
1386 1384 colon = source.find(':')
1387 1385 if colon != -1 and '/' not in source[:colon]:
1388 1386 return source
1389 1387 self._subsource = source
1390 1388 return _abssource(self)
1391 1389
1392 1390 def _fetch(self, source, revision):
1393 1391 if self._gitmissing():
1394 1392 # SEC: check for safe ssh url
1395 1393 util.checksafessh(source)
1396 1394
1397 1395 source = self._abssource(source)
1398 1396 self.ui.status(_('cloning subrepo %s from %s\n') %
1399 1397 (self._relpath, source))
1400 1398 self._gitnodir(['clone', source, self._abspath])
1401 1399 if self._githavelocally(revision):
1402 1400 return
1403 1401 self.ui.status(_('pulling subrepo %s from %s\n') %
1404 1402 (self._relpath, self._gitremote('origin')))
1405 1403 # try only origin: the originally cloned repo
1406 1404 self._gitcommand(['fetch'])
1407 1405 if not self._githavelocally(revision):
1408 1406 raise error.Abort(_('revision %s does not exist in subrepository '
1409 1407 '"%s"\n') % (revision, self._relpath))
1410 1408
1411 1409 @annotatesubrepoerror
1412 1410 def dirty(self, ignoreupdate=False, missing=False):
1413 1411 if self._gitmissing():
1414 1412 return self._state[1] != ''
1415 1413 if self._gitisbare():
1416 1414 return True
1417 1415 if not ignoreupdate and self._state[1] != self._gitstate():
1418 1416 # different version checked out
1419 1417 return True
1420 1418 # check for staged changes or modified files; ignore untracked files
1421 1419 self._gitupdatestat()
1422 1420 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1423 1421 return code == 1
1424 1422
1425 1423 def basestate(self):
1426 1424 return self._gitstate()
1427 1425
1428 1426 @annotatesubrepoerror
1429 1427 def get(self, state, overwrite=False):
1430 1428 source, revision, kind = state
1431 1429 if not revision:
1432 1430 self.remove()
1433 1431 return
1434 1432 self._fetch(source, revision)
1435 1433 # if the repo was set to be bare, unbare it
1436 1434 if self._gitisbare():
1437 1435 self._gitcommand(['config', 'core.bare', 'false'])
1438 1436 if self._gitstate() == revision:
1439 1437 self._gitcommand(['reset', '--hard', 'HEAD'])
1440 1438 return
1441 1439 elif self._gitstate() == revision:
1442 1440 if overwrite:
1443 1441 # first reset the index to unmark new files for commit, because
1444 1442 # reset --hard will otherwise throw away files added for commit,
1445 1443 # not just unmark them.
1446 1444 self._gitcommand(['reset', 'HEAD'])
1447 1445 self._gitcommand(['reset', '--hard', 'HEAD'])
1448 1446 return
1449 1447 branch2rev, rev2branch = self._gitbranchmap()
1450 1448
1451 1449 def checkout(args):
1452 1450 cmd = ['checkout']
1453 1451 if overwrite:
1454 1452 # first reset the index to unmark new files for commit, because
1455 1453 # the -f option will otherwise throw away files added for
1456 1454 # commit, not just unmark them.
1457 1455 self._gitcommand(['reset', 'HEAD'])
1458 1456 cmd.append('-f')
1459 1457 self._gitcommand(cmd + args)
1460 1458 _sanitize(self.ui, self.wvfs, '.git')
1461 1459
1462 1460 def rawcheckout():
1463 1461 # no branch to checkout, check it out with no branch
1464 1462 self.ui.warn(_('checking out detached HEAD in '
1465 1463 'subrepository "%s"\n') % self._relpath)
1466 1464 self.ui.warn(_('check out a git branch if you intend '
1467 1465 'to make changes\n'))
1468 1466 checkout(['-q', revision])
1469 1467
1470 1468 if revision not in rev2branch:
1471 1469 rawcheckout()
1472 1470 return
1473 1471 branches = rev2branch[revision]
1474 1472 firstlocalbranch = None
1475 1473 for b in branches:
1476 1474 if b == 'refs/heads/master':
1477 1475 # master trumps all other branches
1478 1476 checkout(['refs/heads/master'])
1479 1477 return
1480 1478 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1481 1479 firstlocalbranch = b
1482 1480 if firstlocalbranch:
1483 1481 checkout([firstlocalbranch])
1484 1482 return
1485 1483
1486 1484 tracking = self._gittracking(branch2rev.keys())
1487 1485 # choose a remote branch already tracked if possible
1488 1486 remote = branches[0]
1489 1487 if remote not in tracking:
1490 1488 for b in branches:
1491 1489 if b in tracking:
1492 1490 remote = b
1493 1491 break
1494 1492
1495 1493 if remote not in tracking:
1496 1494 # create a new local tracking branch
1497 1495 local = remote.split('/', 3)[3]
1498 1496 checkout(['-b', local, remote])
1499 1497 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1500 1498 # When updating to a tracked remote branch,
1501 1499 # if the local tracking branch is downstream of it,
1502 1500 # a normal `git pull` would have performed a "fast-forward merge"
1503 1501 # which is equivalent to updating the local branch to the remote.
1504 1502 # Since we are only looking at branching at update, we need to
1505 1503 # detect this situation and perform this action lazily.
1506 1504 if tracking[remote] != self._gitcurrentbranch():
1507 1505 checkout([tracking[remote]])
1508 1506 self._gitcommand(['merge', '--ff', remote])
1509 1507 _sanitize(self.ui, self.wvfs, '.git')
1510 1508 else:
1511 1509 # a real merge would be required, just checkout the revision
1512 1510 rawcheckout()
1513 1511
1514 1512 @annotatesubrepoerror
1515 1513 def commit(self, text, user, date):
1516 1514 if self._gitmissing():
1517 1515 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1518 1516 cmd = ['commit', '-a', '-m', text]
1519 1517 env = encoding.environ.copy()
1520 1518 if user:
1521 1519 cmd += ['--author', user]
1522 1520 if date:
1523 1521 # git's date parser silently ignores when seconds < 1e9
1524 1522 # convert to ISO8601
1525 1523 env['GIT_AUTHOR_DATE'] = dateutil.datestr(date,
1526 1524 '%Y-%m-%dT%H:%M:%S %1%2')
1527 1525 self._gitcommand(cmd, env=env)
1528 1526 # make sure commit works otherwise HEAD might not exist under certain
1529 1527 # circumstances
1530 1528 return self._gitstate()
1531 1529
1532 1530 @annotatesubrepoerror
1533 1531 def merge(self, state):
1534 1532 source, revision, kind = state
1535 1533 self._fetch(source, revision)
1536 1534 base = self._gitcommand(['merge-base', revision, self._state[1]])
1537 1535 self._gitupdatestat()
1538 1536 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1539 1537
1540 1538 def mergefunc():
1541 1539 if base == revision:
1542 1540 self.get(state) # fast forward merge
1543 1541 elif base != self._state[1]:
1544 1542 self._gitcommand(['merge', '--no-commit', revision])
1545 1543 _sanitize(self.ui, self.wvfs, '.git')
1546 1544
1547 1545 if self.dirty():
1548 1546 if self._gitstate() != revision:
1549 1547 dirty = self._gitstate() == self._state[1] or code != 0
1550 1548 if _updateprompt(self.ui, self, dirty,
1551 1549 self._state[1][:7], revision[:7]):
1552 1550 mergefunc()
1553 1551 else:
1554 1552 mergefunc()
1555 1553
1556 1554 @annotatesubrepoerror
1557 1555 def push(self, opts):
1558 1556 force = opts.get('force')
1559 1557
1560 1558 if not self._state[1]:
1561 1559 return True
1562 1560 if self._gitmissing():
1563 1561 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1564 1562 # if a branch in origin contains the revision, nothing to do
1565 1563 branch2rev, rev2branch = self._gitbranchmap()
1566 1564 if self._state[1] in rev2branch:
1567 1565 for b in rev2branch[self._state[1]]:
1568 1566 if b.startswith('refs/remotes/origin/'):
1569 1567 return True
1570 1568 for b, revision in branch2rev.iteritems():
1571 1569 if b.startswith('refs/remotes/origin/'):
1572 1570 if self._gitisancestor(self._state[1], revision):
1573 1571 return True
1574 1572 # otherwise, try to push the currently checked out branch
1575 1573 cmd = ['push']
1576 1574 if force:
1577 1575 cmd.append('--force')
1578 1576
1579 1577 current = self._gitcurrentbranch()
1580 1578 if current:
1581 1579 # determine if the current branch is even useful
1582 1580 if not self._gitisancestor(self._state[1], current):
1583 1581 self.ui.warn(_('unrelated git branch checked out '
1584 1582 'in subrepository "%s"\n') % self._relpath)
1585 1583 return False
1586 1584 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1587 1585 (current.split('/', 2)[2], self._relpath))
1588 1586 ret = self._gitdir(cmd + ['origin', current])
1589 1587 return ret[1] == 0
1590 1588 else:
1591 1589 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1592 1590 'cannot push revision %s\n') %
1593 1591 (self._relpath, self._state[1]))
1594 1592 return False
1595 1593
1596 1594 @annotatesubrepoerror
1597 1595 def add(self, ui, match, prefix, explicitonly, **opts):
1598 1596 if self._gitmissing():
1599 1597 return []
1600 1598
1601 1599 s = self.status(None, unknown=True, clean=True)
1602 1600
1603 1601 tracked = set()
1604 1602 # dirstates 'amn' warn, 'r' is added again
1605 1603 for l in (s.modified, s.added, s.deleted, s.clean):
1606 1604 tracked.update(l)
1607 1605
1608 1606 # Unknown files not of interest will be rejected by the matcher
1609 1607 files = s.unknown
1610 1608 files.extend(match.files())
1611 1609
1612 1610 rejected = []
1613 1611
1614 1612 files = [f for f in sorted(set(files)) if match(f)]
1615 1613 for f in files:
1616 1614 exact = match.exact(f)
1617 1615 command = ["add"]
1618 1616 if exact:
1619 1617 command.append("-f") #should be added, even if ignored
1620 1618 if ui.verbose or not exact:
1621 1619 ui.status(_('adding %s\n') % match.rel(f))
1622 1620
1623 1621 if f in tracked: # hg prints 'adding' even if already tracked
1624 1622 if exact:
1625 1623 rejected.append(f)
1626 1624 continue
1627 1625 if not opts.get(r'dry_run'):
1628 1626 self._gitcommand(command + [f])
1629 1627
1630 1628 for f in rejected:
1631 1629 ui.warn(_("%s already tracked!\n") % match.abs(f))
1632 1630
1633 1631 return rejected
1634 1632
1635 1633 @annotatesubrepoerror
1636 1634 def remove(self):
1637 1635 if self._gitmissing():
1638 1636 return
1639 1637 if self.dirty():
1640 1638 self.ui.warn(_('not removing repo %s because '
1641 1639 'it has changes.\n') % self._relpath)
1642 1640 return
1643 1641 # we can't fully delete the repository as it may contain
1644 1642 # local-only history
1645 1643 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1646 1644 self._gitcommand(['config', 'core.bare', 'true'])
1647 1645 for f, kind in self.wvfs.readdir():
1648 1646 if f == '.git':
1649 1647 continue
1650 1648 if kind == stat.S_IFDIR:
1651 1649 self.wvfs.rmtree(f)
1652 1650 else:
1653 1651 self.wvfs.unlink(f)
1654 1652
1655 1653 def archive(self, archiver, prefix, match=None, decode=True):
1656 1654 total = 0
1657 1655 source, revision = self._state
1658 1656 if not revision:
1659 1657 return total
1660 1658 self._fetch(source, revision)
1661 1659
1662 1660 # Parse git's native archive command.
1663 1661 # This should be much faster than manually traversing the trees
1664 1662 # and objects with many subprocess calls.
1665 1663 tarstream = self._gitcommand(['archive', revision], stream=True)
1666 1664 tar = tarfile.open(fileobj=tarstream, mode=r'r|')
1667 1665 relpath = subrelpath(self)
1668 1666 progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
1669 1667 unit=_('files'))
1670 1668 progress.update(0)
1671 1669 for info in tar:
1672 1670 if info.isdir():
1673 1671 continue
1674 1672 bname = pycompat.fsencode(info.name)
1675 1673 if match and not match(bname):
1676 1674 continue
1677 1675 if info.issym():
1678 1676 data = info.linkname
1679 1677 else:
1680 1678 data = tar.extractfile(info).read()
1681 1679 archiver.addfile(prefix + self._path + '/' + bname,
1682 1680 info.mode, info.issym(), data)
1683 1681 total += 1
1684 1682 progress.increment()
1685 1683 progress.complete()
1686 1684 return total
1687 1685
1688 1686
1689 1687 @annotatesubrepoerror
1690 1688 def cat(self, match, fm, fntemplate, prefix, **opts):
1691 1689 rev = self._state[1]
1692 1690 if match.anypats():
1693 1691 return 1 #No support for include/exclude yet
1694 1692
1695 1693 if not match.files():
1696 1694 return 1
1697 1695
1698 1696 # TODO: add support for non-plain formatter (see cmdutil.cat())
1699 1697 for f in match.files():
1700 1698 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1701 1699 fp = cmdutil.makefileobj(self._ctx, fntemplate,
1702 1700 pathname=self.wvfs.reljoin(prefix, f))
1703 1701 fp.write(output)
1704 1702 fp.close()
1705 1703 return 0
1706 1704
1707 1705
1708 1706 @annotatesubrepoerror
1709 1707 def status(self, rev2, **opts):
1710 1708 rev1 = self._state[1]
1711 1709 if self._gitmissing() or not rev1:
1712 1710 # if the repo is missing, return no results
1713 1711 return scmutil.status([], [], [], [], [], [], [])
1714 1712 modified, added, removed = [], [], []
1715 1713 self._gitupdatestat()
1716 1714 if rev2:
1717 1715 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1718 1716 else:
1719 1717 command = ['diff-index', '--no-renames', rev1]
1720 1718 out = self._gitcommand(command)
1721 1719 for line in out.split('\n'):
1722 1720 tab = line.find('\t')
1723 1721 if tab == -1:
1724 1722 continue
1725 1723 status, f = line[tab - 1:tab], line[tab + 1:]
1726 1724 if status == 'M':
1727 1725 modified.append(f)
1728 1726 elif status == 'A':
1729 1727 added.append(f)
1730 1728 elif status == 'D':
1731 1729 removed.append(f)
1732 1730
1733 1731 deleted, unknown, ignored, clean = [], [], [], []
1734 1732
1735 1733 command = ['status', '--porcelain', '-z']
1736 1734 if opts.get(r'unknown'):
1737 1735 command += ['--untracked-files=all']
1738 1736 if opts.get(r'ignored'):
1739 1737 command += ['--ignored']
1740 1738 out = self._gitcommand(command)
1741 1739
1742 1740 changedfiles = set()
1743 1741 changedfiles.update(modified)
1744 1742 changedfiles.update(added)
1745 1743 changedfiles.update(removed)
1746 1744 for line in out.split('\0'):
1747 1745 if not line:
1748 1746 continue
1749 1747 st = line[0:2]
1750 1748 #moves and copies show 2 files on one line
1751 1749 if line.find('\0') >= 0:
1752 1750 filename1, filename2 = line[3:].split('\0')
1753 1751 else:
1754 1752 filename1 = line[3:]
1755 1753 filename2 = None
1756 1754
1757 1755 changedfiles.add(filename1)
1758 1756 if filename2:
1759 1757 changedfiles.add(filename2)
1760 1758
1761 1759 if st == '??':
1762 1760 unknown.append(filename1)
1763 1761 elif st == '!!':
1764 1762 ignored.append(filename1)
1765 1763
1766 1764 if opts.get(r'clean'):
1767 1765 out = self._gitcommand(['ls-files'])
1768 1766 for f in out.split('\n'):
1769 1767 if not f in changedfiles:
1770 1768 clean.append(f)
1771 1769
1772 1770 return scmutil.status(modified, added, removed, deleted,
1773 1771 unknown, ignored, clean)
1774 1772
1775 1773 @annotatesubrepoerror
1776 1774 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1777 1775 node1 = self._state[1]
1778 1776 cmd = ['diff', '--no-renames']
1779 1777 if opts[r'stat']:
1780 1778 cmd.append('--stat')
1781 1779 else:
1782 1780 # for Git, this also implies '-p'
1783 1781 cmd.append('-U%d' % diffopts.context)
1784 1782
1785 1783 gitprefix = self.wvfs.reljoin(prefix, self._path)
1786 1784
1787 1785 if diffopts.noprefix:
1788 1786 cmd.extend(['--src-prefix=%s/' % gitprefix,
1789 1787 '--dst-prefix=%s/' % gitprefix])
1790 1788 else:
1791 1789 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1792 1790 '--dst-prefix=b/%s/' % gitprefix])
1793 1791
1794 1792 if diffopts.ignorews:
1795 1793 cmd.append('--ignore-all-space')
1796 1794 if diffopts.ignorewsamount:
1797 1795 cmd.append('--ignore-space-change')
1798 1796 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1799 1797 and diffopts.ignoreblanklines:
1800 1798 cmd.append('--ignore-blank-lines')
1801 1799
1802 1800 cmd.append(node1)
1803 1801 if node2:
1804 1802 cmd.append(node2)
1805 1803
1806 1804 output = ""
1807 1805 if match.always():
1808 1806 output += self._gitcommand(cmd) + '\n'
1809 1807 else:
1810 1808 st = self.status(node2)[:3]
1811 1809 files = [f for sublist in st for f in sublist]
1812 1810 for f in files:
1813 1811 if match(f):
1814 1812 output += self._gitcommand(cmd + ['--', f]) + '\n'
1815 1813
1816 1814 if output.strip():
1817 1815 ui.write(output)
1818 1816
1819 1817 @annotatesubrepoerror
1820 1818 def revert(self, substate, *pats, **opts):
1821 1819 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1822 1820 if not opts.get(r'no_backup'):
1823 1821 status = self.status(None)
1824 1822 names = status.modified
1825 1823 for name in names:
1826 1824 # backuppath() expects a path relative to the parent repo (the
1827 1825 # repo that ui.origbackuppath is relative to)
1828 1826 parentname = os.path.join(self._path, name)
1829 1827 bakname = scmutil.backuppath(self.ui, self._subparent,
1830 1828 parentname)
1831 1829 self.ui.note(_('saving current version of %s as %s\n') %
1832 1830 (name, os.path.relpath(bakname)))
1833 1831 util.rename(self.wvfs.join(name), bakname)
1834 1832
1835 1833 if not opts.get(r'dry_run'):
1836 1834 self.get(substate, overwrite=True)
1837 1835 return []
1838 1836
1839 1837 def shortid(self, revid):
1840 1838 return revid[:7]
1841 1839
1842 1840 types = {
1843 1841 'hg': hgsubrepo,
1844 1842 'svn': svnsubrepo,
1845 1843 'git': gitsubrepo,
1846 1844 }
General Comments 0
You need to be logged in to leave comments. Login now