##// END OF EJS Templates
merge: tell _checkunknownfiles about whether this was merge --force...
Siddharth Agarwal -
r28020:cffa46cb default
parent child Browse files
Show More
@@ -1,1405 +1,1406 b''
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10 10
11 11 import os
12 12 import copy
13 13
14 14 from mercurial import hg, util, cmdutil, scmutil, match as match_, \
15 15 archival, pathutil, revset, error
16 16 from mercurial.i18n import _
17 17
18 18 import lfutil
19 19 import lfcommands
20 20 import basestore
21 21
22 22 # -- Utility functions: commonly/repeatedly needed functionality ---------------
23 23
24 24 def composelargefilematcher(match, manifest):
25 25 '''create a matcher that matches only the largefiles in the original
26 26 matcher'''
27 27 m = copy.copy(match)
28 28 lfile = lambda f: lfutil.standin(f) in manifest
29 29 m._files = filter(lfile, m._files)
30 30 m._fileroots = set(m._files)
31 31 m._always = False
32 32 origmatchfn = m.matchfn
33 33 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
34 34 return m
35 35
36 36 def composenormalfilematcher(match, manifest, exclude=None):
37 37 excluded = set()
38 38 if exclude is not None:
39 39 excluded.update(exclude)
40 40
41 41 m = copy.copy(match)
42 42 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
43 43 manifest or f in excluded)
44 44 m._files = filter(notlfile, m._files)
45 45 m._fileroots = set(m._files)
46 46 m._always = False
47 47 origmatchfn = m.matchfn
48 48 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
49 49 return m
50 50
51 51 def installnormalfilesmatchfn(manifest):
52 52 '''installmatchfn with a matchfn that ignores all largefiles'''
53 53 def overridematch(ctx, pats=(), opts=None, globbed=False,
54 54 default='relpath', badfn=None):
55 55 if opts is None:
56 56 opts = {}
57 57 match = oldmatch(ctx, pats, opts, globbed, default, badfn=badfn)
58 58 return composenormalfilematcher(match, manifest)
59 59 oldmatch = installmatchfn(overridematch)
60 60
61 61 def installmatchfn(f):
62 62 '''monkey patch the scmutil module with a custom match function.
63 63 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
64 64 oldmatch = scmutil.match
65 65 setattr(f, 'oldmatch', oldmatch)
66 66 scmutil.match = f
67 67 return oldmatch
68 68
69 69 def restorematchfn():
70 70 '''restores scmutil.match to what it was before installmatchfn
71 71 was called. no-op if scmutil.match is its original function.
72 72
73 73 Note that n calls to installmatchfn will require n calls to
74 74 restore the original matchfn.'''
75 75 scmutil.match = getattr(scmutil.match, 'oldmatch')
76 76
77 77 def installmatchandpatsfn(f):
78 78 oldmatchandpats = scmutil.matchandpats
79 79 setattr(f, 'oldmatchandpats', oldmatchandpats)
80 80 scmutil.matchandpats = f
81 81 return oldmatchandpats
82 82
83 83 def restorematchandpatsfn():
84 84 '''restores scmutil.matchandpats to what it was before
85 85 installmatchandpatsfn was called. No-op if scmutil.matchandpats
86 86 is its original function.
87 87
88 88 Note that n calls to installmatchandpatsfn will require n calls
89 89 to restore the original matchfn.'''
90 90 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
91 91 scmutil.matchandpats)
92 92
93 93 def addlargefiles(ui, repo, isaddremove, matcher, **opts):
94 94 large = opts.get('large')
95 95 lfsize = lfutil.getminsize(
96 96 ui, lfutil.islfilesrepo(repo), opts.get('lfsize'))
97 97
98 98 lfmatcher = None
99 99 if lfutil.islfilesrepo(repo):
100 100 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
101 101 if lfpats:
102 102 lfmatcher = match_.match(repo.root, '', list(lfpats))
103 103
104 104 lfnames = []
105 105 m = matcher
106 106
107 107 wctx = repo[None]
108 108 for f in repo.walk(match_.badmatch(m, lambda x, y: None)):
109 109 exact = m.exact(f)
110 110 lfile = lfutil.standin(f) in wctx
111 111 nfile = f in wctx
112 112 exists = lfile or nfile
113 113
114 114 # addremove in core gets fancy with the name, add doesn't
115 115 if isaddremove:
116 116 name = m.uipath(f)
117 117 else:
118 118 name = m.rel(f)
119 119
120 120 # Don't warn the user when they attempt to add a normal tracked file.
121 121 # The normal add code will do that for us.
122 122 if exact and exists:
123 123 if lfile:
124 124 ui.warn(_('%s already a largefile\n') % name)
125 125 continue
126 126
127 127 if (exact or not exists) and not lfutil.isstandin(f):
128 128 # In case the file was removed previously, but not committed
129 129 # (issue3507)
130 130 if not repo.wvfs.exists(f):
131 131 continue
132 132
133 133 abovemin = (lfsize and
134 134 repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024)
135 135 if large or abovemin or (lfmatcher and lfmatcher(f)):
136 136 lfnames.append(f)
137 137 if ui.verbose or not exact:
138 138 ui.status(_('adding %s as a largefile\n') % name)
139 139
140 140 bad = []
141 141
142 142 # Need to lock, otherwise there could be a race condition between
143 143 # when standins are created and added to the repo.
144 144 with repo.wlock():
145 145 if not opts.get('dry_run'):
146 146 standins = []
147 147 lfdirstate = lfutil.openlfdirstate(ui, repo)
148 148 for f in lfnames:
149 149 standinname = lfutil.standin(f)
150 150 lfutil.writestandin(repo, standinname, hash='',
151 151 executable=lfutil.getexecutable(repo.wjoin(f)))
152 152 standins.append(standinname)
153 153 if lfdirstate[f] == 'r':
154 154 lfdirstate.normallookup(f)
155 155 else:
156 156 lfdirstate.add(f)
157 157 lfdirstate.write()
158 158 bad += [lfutil.splitstandin(f)
159 159 for f in repo[None].add(standins)
160 160 if f in m.files()]
161 161
162 162 added = [f for f in lfnames if f not in bad]
163 163 return added, bad
164 164
165 165 def removelargefiles(ui, repo, isaddremove, matcher, **opts):
166 166 after = opts.get('after')
167 167 m = composelargefilematcher(matcher, repo[None].manifest())
168 168 try:
169 169 repo.lfstatus = True
170 170 s = repo.status(match=m, clean=not isaddremove)
171 171 finally:
172 172 repo.lfstatus = False
173 173 manifest = repo[None].manifest()
174 174 modified, added, deleted, clean = [[f for f in list
175 175 if lfutil.standin(f) in manifest]
176 176 for list in (s.modified, s.added,
177 177 s.deleted, s.clean)]
178 178
179 179 def warn(files, msg):
180 180 for f in files:
181 181 ui.warn(msg % m.rel(f))
182 182 return int(len(files) > 0)
183 183
184 184 result = 0
185 185
186 186 if after:
187 187 remove = deleted
188 188 result = warn(modified + added + clean,
189 189 _('not removing %s: file still exists\n'))
190 190 else:
191 191 remove = deleted + clean
192 192 result = warn(modified, _('not removing %s: file is modified (use -f'
193 193 ' to force removal)\n'))
194 194 result = warn(added, _('not removing %s: file has been marked for add'
195 195 ' (use forget to undo)\n')) or result
196 196
197 197 # Need to lock because standin files are deleted then removed from the
198 198 # repository and we could race in-between.
199 199 with repo.wlock():
200 200 lfdirstate = lfutil.openlfdirstate(ui, repo)
201 201 for f in sorted(remove):
202 202 if ui.verbose or not m.exact(f):
203 203 # addremove in core gets fancy with the name, remove doesn't
204 204 if isaddremove:
205 205 name = m.uipath(f)
206 206 else:
207 207 name = m.rel(f)
208 208 ui.status(_('removing %s\n') % name)
209 209
210 210 if not opts.get('dry_run'):
211 211 if not after:
212 212 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
213 213
214 214 if opts.get('dry_run'):
215 215 return result
216 216
217 217 remove = [lfutil.standin(f) for f in remove]
218 218 # If this is being called by addremove, let the original addremove
219 219 # function handle this.
220 220 if not isaddremove:
221 221 for f in remove:
222 222 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
223 223 repo[None].forget(remove)
224 224
225 225 for f in remove:
226 226 lfutil.synclfdirstate(repo, lfdirstate, lfutil.splitstandin(f),
227 227 False)
228 228
229 229 lfdirstate.write()
230 230
231 231 return result
232 232
233 233 # For overriding mercurial.hgweb.webcommands so that largefiles will
234 234 # appear at their right place in the manifests.
235 235 def decodepath(orig, path):
236 236 return lfutil.splitstandin(path) or path
237 237
238 238 # -- Wrappers: modify existing commands --------------------------------
239 239
240 240 def overrideadd(orig, ui, repo, *pats, **opts):
241 241 if opts.get('normal') and opts.get('large'):
242 242 raise error.Abort(_('--normal cannot be used with --large'))
243 243 return orig(ui, repo, *pats, **opts)
244 244
245 245 def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
246 246 # The --normal flag short circuits this override
247 247 if opts.get('normal'):
248 248 return orig(ui, repo, matcher, prefix, explicitonly, **opts)
249 249
250 250 ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
251 251 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(),
252 252 ladded)
253 253 bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts)
254 254
255 255 bad.extend(f for f in lbad)
256 256 return bad
257 257
258 258 def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos):
259 259 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
260 260 result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos)
261 261 return removelargefiles(ui, repo, False, matcher, after=after,
262 262 force=force) or result
263 263
264 264 def overridestatusfn(orig, repo, rev2, **opts):
265 265 try:
266 266 repo._repo.lfstatus = True
267 267 return orig(repo, rev2, **opts)
268 268 finally:
269 269 repo._repo.lfstatus = False
270 270
271 271 def overridestatus(orig, ui, repo, *pats, **opts):
272 272 try:
273 273 repo.lfstatus = True
274 274 return orig(ui, repo, *pats, **opts)
275 275 finally:
276 276 repo.lfstatus = False
277 277
278 278 def overridedirty(orig, repo, ignoreupdate=False):
279 279 try:
280 280 repo._repo.lfstatus = True
281 281 return orig(repo, ignoreupdate)
282 282 finally:
283 283 repo._repo.lfstatus = False
284 284
285 285 def overridelog(orig, ui, repo, *pats, **opts):
286 286 def overridematchandpats(ctx, pats=(), opts=None, globbed=False,
287 287 default='relpath', badfn=None):
288 288 """Matcher that merges root directory with .hglf, suitable for log.
289 289 It is still possible to match .hglf directly.
290 290 For any listed files run log on the standin too.
291 291 matchfn tries both the given filename and with .hglf stripped.
292 292 """
293 293 if opts is None:
294 294 opts = {}
295 295 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default,
296 296 badfn=badfn)
297 297 m, p = copy.copy(matchandpats)
298 298
299 299 if m.always():
300 300 # We want to match everything anyway, so there's no benefit trying
301 301 # to add standins.
302 302 return matchandpats
303 303
304 304 pats = set(p)
305 305
306 306 def fixpats(pat, tostandin=lfutil.standin):
307 307 if pat.startswith('set:'):
308 308 return pat
309 309
310 310 kindpat = match_._patsplit(pat, None)
311 311
312 312 if kindpat[0] is not None:
313 313 return kindpat[0] + ':' + tostandin(kindpat[1])
314 314 return tostandin(kindpat[1])
315 315
316 316 if m._cwd:
317 317 hglf = lfutil.shortname
318 318 back = util.pconvert(m.rel(hglf)[:-len(hglf)])
319 319
320 320 def tostandin(f):
321 321 # The file may already be a standin, so truncate the back
322 322 # prefix and test before mangling it. This avoids turning
323 323 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
324 324 if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
325 325 return f
326 326
327 327 # An absolute path is from outside the repo, so truncate the
328 328 # path to the root before building the standin. Otherwise cwd
329 329 # is somewhere in the repo, relative to root, and needs to be
330 330 # prepended before building the standin.
331 331 if os.path.isabs(m._cwd):
332 332 f = f[len(back):]
333 333 else:
334 334 f = m._cwd + '/' + f
335 335 return back + lfutil.standin(f)
336 336
337 337 pats.update(fixpats(f, tostandin) for f in p)
338 338 else:
339 339 def tostandin(f):
340 340 if lfutil.splitstandin(f):
341 341 return f
342 342 return lfutil.standin(f)
343 343 pats.update(fixpats(f, tostandin) for f in p)
344 344
345 345 for i in range(0, len(m._files)):
346 346 # Don't add '.hglf' to m.files, since that is already covered by '.'
347 347 if m._files[i] == '.':
348 348 continue
349 349 standin = lfutil.standin(m._files[i])
350 350 # If the "standin" is a directory, append instead of replace to
351 351 # support naming a directory on the command line with only
352 352 # largefiles. The original directory is kept to support normal
353 353 # files.
354 354 if standin in repo[ctx.node()]:
355 355 m._files[i] = standin
356 356 elif m._files[i] not in repo[ctx.node()] \
357 357 and repo.wvfs.isdir(standin):
358 358 m._files.append(standin)
359 359
360 360 m._fileroots = set(m._files)
361 361 m._always = False
362 362 origmatchfn = m.matchfn
363 363 def lfmatchfn(f):
364 364 lf = lfutil.splitstandin(f)
365 365 if lf is not None and origmatchfn(lf):
366 366 return True
367 367 r = origmatchfn(f)
368 368 return r
369 369 m.matchfn = lfmatchfn
370 370
371 371 ui.debug('updated patterns: %s\n' % sorted(pats))
372 372 return m, pats
373 373
374 374 # For hg log --patch, the match object is used in two different senses:
375 375 # (1) to determine what revisions should be printed out, and
376 376 # (2) to determine what files to print out diffs for.
377 377 # The magic matchandpats override should be used for case (1) but not for
378 378 # case (2).
379 379 def overridemakelogfilematcher(repo, pats, opts, badfn=None):
380 380 wctx = repo[None]
381 381 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
382 382 return lambda rev: match
383 383
384 384 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
385 385 oldmakelogfilematcher = cmdutil._makenofollowlogfilematcher
386 386 setattr(cmdutil, '_makenofollowlogfilematcher', overridemakelogfilematcher)
387 387
388 388 try:
389 389 return orig(ui, repo, *pats, **opts)
390 390 finally:
391 391 restorematchandpatsfn()
392 392 setattr(cmdutil, '_makenofollowlogfilematcher', oldmakelogfilematcher)
393 393
394 394 def overrideverify(orig, ui, repo, *pats, **opts):
395 395 large = opts.pop('large', False)
396 396 all = opts.pop('lfa', False)
397 397 contents = opts.pop('lfc', False)
398 398
399 399 result = orig(ui, repo, *pats, **opts)
400 400 if large or all or contents:
401 401 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
402 402 return result
403 403
404 404 def overridedebugstate(orig, ui, repo, *pats, **opts):
405 405 large = opts.pop('large', False)
406 406 if large:
407 407 class fakerepo(object):
408 408 dirstate = lfutil.openlfdirstate(ui, repo)
409 409 orig(ui, fakerepo, *pats, **opts)
410 410 else:
411 411 orig(ui, repo, *pats, **opts)
412 412
413 413 # Before starting the manifest merge, merge.updates will call
414 414 # _checkunknownfile to check if there are any files in the merged-in
415 415 # changeset that collide with unknown files in the working copy.
416 416 #
417 417 # The largefiles are seen as unknown, so this prevents us from merging
418 418 # in a file 'foo' if we already have a largefile with the same name.
419 419 #
420 420 # The overridden function filters the unknown files by removing any
421 421 # largefiles. This makes the merge proceed and we can then handle this
422 422 # case further in the overridden calculateupdates function below.
423 423 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
424 424 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
425 425 return False
426 426 return origfn(repo, wctx, mctx, f, f2)
427 427
428 428 # The manifest merge handles conflicts on the manifest level. We want
429 429 # to handle changes in largefile-ness of files at this level too.
430 430 #
431 431 # The strategy is to run the original calculateupdates and then process
432 432 # the action list it outputs. There are two cases we need to deal with:
433 433 #
434 434 # 1. Normal file in p1, largefile in p2. Here the largefile is
435 435 # detected via its standin file, which will enter the working copy
436 436 # with a "get" action. It is not "merge" since the standin is all
437 437 # Mercurial is concerned with at this level -- the link to the
438 438 # existing normal file is not relevant here.
439 439 #
440 440 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
441 441 # since the largefile will be present in the working copy and
442 442 # different from the normal file in p2. Mercurial therefore
443 443 # triggers a merge action.
444 444 #
445 445 # In both cases, we prompt the user and emit new actions to either
446 446 # remove the standin (if the normal file was kept) or to remove the
447 447 # normal file and get the standin (if the largefile was kept). The
448 448 # default prompt answer is to use the largefile version since it was
449 449 # presumably changed on purpose.
450 450 #
451 451 # Finally, the merge.applyupdates function will then take care of
452 452 # writing the files into the working copy and lfcommands.updatelfiles
453 453 # will update the largefiles.
454 454 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
455 acceptremote, followcopies, matcher=None):
455 acceptremote, followcopies, matcher=None,
456 mergeforce=False):
456 457 overwrite = force and not branchmerge
457 458 actions, diverge, renamedelete = origfn(
458 459 repo, p1, p2, pas, branchmerge, force, acceptremote,
459 460 followcopies, matcher=matcher)
460 461
461 462 if overwrite:
462 463 return actions, diverge, renamedelete
463 464
464 465 # Convert to dictionary with filename as key and action as value.
465 466 lfiles = set()
466 467 for f in actions:
467 468 splitstandin = lfutil.splitstandin(f)
468 469 if splitstandin in p1:
469 470 lfiles.add(splitstandin)
470 471 elif lfutil.standin(f) in p1:
471 472 lfiles.add(f)
472 473
473 474 for lfile in sorted(lfiles):
474 475 standin = lfutil.standin(lfile)
475 476 (lm, largs, lmsg) = actions.get(lfile, (None, None, None))
476 477 (sm, sargs, smsg) = actions.get(standin, (None, None, None))
477 478 if sm in ('g', 'dc') and lm != 'r':
478 479 if sm == 'dc':
479 480 f1, f2, fa, move, anc = sargs
480 481 sargs = (p2[f2].flags(), False)
481 482 # Case 1: normal file in the working copy, largefile in
482 483 # the second parent
483 484 usermsg = _('remote turned local normal file %s into a largefile\n'
484 485 'use (l)argefile or keep (n)ormal file?'
485 486 '$$ &Largefile $$ &Normal file') % lfile
486 487 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
487 488 actions[lfile] = ('r', None, 'replaced by standin')
488 489 actions[standin] = ('g', sargs, 'replaces standin')
489 490 else: # keep local normal file
490 491 actions[lfile] = ('k', None, 'replaces standin')
491 492 if branchmerge:
492 493 actions[standin] = ('k', None, 'replaced by non-standin')
493 494 else:
494 495 actions[standin] = ('r', None, 'replaced by non-standin')
495 496 elif lm in ('g', 'dc') and sm != 'r':
496 497 if lm == 'dc':
497 498 f1, f2, fa, move, anc = largs
498 499 largs = (p2[f2].flags(), False)
499 500 # Case 2: largefile in the working copy, normal file in
500 501 # the second parent
501 502 usermsg = _('remote turned local largefile %s into a normal file\n'
502 503 'keep (l)argefile or use (n)ormal file?'
503 504 '$$ &Largefile $$ &Normal file') % lfile
504 505 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
505 506 if branchmerge:
506 507 # largefile can be restored from standin safely
507 508 actions[lfile] = ('k', None, 'replaced by standin')
508 509 actions[standin] = ('k', None, 'replaces standin')
509 510 else:
510 511 # "lfile" should be marked as "removed" without
511 512 # removal of itself
512 513 actions[lfile] = ('lfmr', None,
513 514 'forget non-standin largefile')
514 515
515 516 # linear-merge should treat this largefile as 're-added'
516 517 actions[standin] = ('a', None, 'keep standin')
517 518 else: # pick remote normal file
518 519 actions[lfile] = ('g', largs, 'replaces standin')
519 520 actions[standin] = ('r', None, 'replaced by non-standin')
520 521
521 522 return actions, diverge, renamedelete
522 523
523 524 def mergerecordupdates(orig, repo, actions, branchmerge):
524 525 if 'lfmr' in actions:
525 526 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
526 527 for lfile, args, msg in actions['lfmr']:
527 528 # this should be executed before 'orig', to execute 'remove'
528 529 # before all other actions
529 530 repo.dirstate.remove(lfile)
530 531 # make sure lfile doesn't get synclfdirstate'd as normal
531 532 lfdirstate.add(lfile)
532 533 lfdirstate.write()
533 534
534 535 return orig(repo, actions, branchmerge)
535 536
536 537
537 538 # Override filemerge to prompt the user about how they wish to merge
538 539 # largefiles. This will handle identical edits without prompting the user.
539 540 def overridefilemerge(origfn, premerge, repo, mynode, orig, fcd, fco, fca,
540 541 labels=None):
541 542 if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
542 543 return origfn(premerge, repo, mynode, orig, fcd, fco, fca,
543 544 labels=labels)
544 545
545 546 ahash = fca.data().strip().lower()
546 547 dhash = fcd.data().strip().lower()
547 548 ohash = fco.data().strip().lower()
548 549 if (ohash != ahash and
549 550 ohash != dhash and
550 551 (dhash == ahash or
551 552 repo.ui.promptchoice(
552 553 _('largefile %s has a merge conflict\nancestor was %s\n'
553 554 'keep (l)ocal %s or\ntake (o)ther %s?'
554 555 '$$ &Local $$ &Other') %
555 556 (lfutil.splitstandin(orig), ahash, dhash, ohash),
556 557 0) == 1)):
557 558 repo.wwrite(fcd.path(), fco.data(), fco.flags())
558 559 return True, 0, False
559 560
560 561 def copiespathcopies(orig, ctx1, ctx2, match=None):
561 562 copies = orig(ctx1, ctx2, match=match)
562 563 updated = {}
563 564
564 565 for k, v in copies.iteritems():
565 566 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
566 567
567 568 return updated
568 569
569 570 # Copy first changes the matchers to match standins instead of
570 571 # largefiles. Then it overrides util.copyfile in that function it
571 572 # checks if the destination largefile already exists. It also keeps a
572 573 # list of copied files so that the largefiles can be copied and the
573 574 # dirstate updated.
574 575 def overridecopy(orig, ui, repo, pats, opts, rename=False):
575 576 # doesn't remove largefile on rename
576 577 if len(pats) < 2:
577 578 # this isn't legal, let the original function deal with it
578 579 return orig(ui, repo, pats, opts, rename)
579 580
580 581 # This could copy both lfiles and normal files in one command,
581 582 # but we don't want to do that. First replace their matcher to
582 583 # only match normal files and run it, then replace it to just
583 584 # match largefiles and run it again.
584 585 nonormalfiles = False
585 586 nolfiles = False
586 587 installnormalfilesmatchfn(repo[None].manifest())
587 588 try:
588 589 result = orig(ui, repo, pats, opts, rename)
589 590 except error.Abort as e:
590 591 if str(e) != _('no files to copy'):
591 592 raise e
592 593 else:
593 594 nonormalfiles = True
594 595 result = 0
595 596 finally:
596 597 restorematchfn()
597 598
598 599 # The first rename can cause our current working directory to be removed.
599 600 # In that case there is nothing left to copy/rename so just quit.
600 601 try:
601 602 repo.getcwd()
602 603 except OSError:
603 604 return result
604 605
605 606 def makestandin(relpath):
606 607 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
607 608 return os.path.join(repo.wjoin(lfutil.standin(path)))
608 609
609 610 fullpats = scmutil.expandpats(pats)
610 611 dest = fullpats[-1]
611 612
612 613 if os.path.isdir(dest):
613 614 if not os.path.isdir(makestandin(dest)):
614 615 os.makedirs(makestandin(dest))
615 616
616 617 try:
617 618 # When we call orig below it creates the standins but we don't add
618 619 # them to the dir state until later so lock during that time.
619 620 wlock = repo.wlock()
620 621
621 622 manifest = repo[None].manifest()
622 623 def overridematch(ctx, pats=(), opts=None, globbed=False,
623 624 default='relpath', badfn=None):
624 625 if opts is None:
625 626 opts = {}
626 627 newpats = []
627 628 # The patterns were previously mangled to add the standin
628 629 # directory; we need to remove that now
629 630 for pat in pats:
630 631 if match_.patkind(pat) is None and lfutil.shortname in pat:
631 632 newpats.append(pat.replace(lfutil.shortname, ''))
632 633 else:
633 634 newpats.append(pat)
634 635 match = oldmatch(ctx, newpats, opts, globbed, default, badfn=badfn)
635 636 m = copy.copy(match)
636 637 lfile = lambda f: lfutil.standin(f) in manifest
637 638 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
638 639 m._fileroots = set(m._files)
639 640 origmatchfn = m.matchfn
640 641 m.matchfn = lambda f: (lfutil.isstandin(f) and
641 642 (f in manifest) and
642 643 origmatchfn(lfutil.splitstandin(f)) or
643 644 None)
644 645 return m
645 646 oldmatch = installmatchfn(overridematch)
646 647 listpats = []
647 648 for pat in pats:
648 649 if match_.patkind(pat) is not None:
649 650 listpats.append(pat)
650 651 else:
651 652 listpats.append(makestandin(pat))
652 653
653 654 try:
654 655 origcopyfile = util.copyfile
655 656 copiedfiles = []
656 657 def overridecopyfile(src, dest):
657 658 if (lfutil.shortname in src and
658 659 dest.startswith(repo.wjoin(lfutil.shortname))):
659 660 destlfile = dest.replace(lfutil.shortname, '')
660 661 if not opts['force'] and os.path.exists(destlfile):
661 662 raise IOError('',
662 663 _('destination largefile already exists'))
663 664 copiedfiles.append((src, dest))
664 665 origcopyfile(src, dest)
665 666
666 667 util.copyfile = overridecopyfile
667 668 result += orig(ui, repo, listpats, opts, rename)
668 669 finally:
669 670 util.copyfile = origcopyfile
670 671
671 672 lfdirstate = lfutil.openlfdirstate(ui, repo)
672 673 for (src, dest) in copiedfiles:
673 674 if (lfutil.shortname in src and
674 675 dest.startswith(repo.wjoin(lfutil.shortname))):
675 676 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
676 677 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
677 678 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
678 679 if not os.path.isdir(destlfiledir):
679 680 os.makedirs(destlfiledir)
680 681 if rename:
681 682 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
682 683
683 684 # The file is gone, but this deletes any empty parent
684 685 # directories as a side-effect.
685 686 util.unlinkpath(repo.wjoin(srclfile), True)
686 687 lfdirstate.remove(srclfile)
687 688 else:
688 689 util.copyfile(repo.wjoin(srclfile),
689 690 repo.wjoin(destlfile))
690 691
691 692 lfdirstate.add(destlfile)
692 693 lfdirstate.write()
693 694 except error.Abort as e:
694 695 if str(e) != _('no files to copy'):
695 696 raise e
696 697 else:
697 698 nolfiles = True
698 699 finally:
699 700 restorematchfn()
700 701 wlock.release()
701 702
702 703 if nolfiles and nonormalfiles:
703 704 raise error.Abort(_('no files to copy'))
704 705
705 706 return result
706 707
707 708 # When the user calls revert, we have to be careful to not revert any
708 709 # changes to other largefiles accidentally. This means we have to keep
709 710 # track of the largefiles that are being reverted so we only pull down
710 711 # the necessary largefiles.
711 712 #
712 713 # Standins are only updated (to match the hash of largefiles) before
713 714 # commits. Update the standins then run the original revert, changing
714 715 # the matcher to hit standins instead of largefiles. Based on the
715 716 # resulting standins update the largefiles.
716 717 def overriderevert(orig, ui, repo, ctx, parents, *pats, **opts):
717 718 # Because we put the standins in a bad state (by updating them)
718 719 # and then return them to a correct state we need to lock to
719 720 # prevent others from changing them in their incorrect state.
720 721 with repo.wlock():
721 722 lfdirstate = lfutil.openlfdirstate(ui, repo)
722 723 s = lfutil.lfdirstatestatus(lfdirstate, repo)
723 724 lfdirstate.write()
724 725 for lfile in s.modified:
725 726 lfutil.updatestandin(repo, lfutil.standin(lfile))
726 727 for lfile in s.deleted:
727 728 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
728 729 os.unlink(repo.wjoin(lfutil.standin(lfile)))
729 730
730 731 oldstandins = lfutil.getstandinsstate(repo)
731 732
732 733 def overridematch(mctx, pats=(), opts=None, globbed=False,
733 734 default='relpath', badfn=None):
734 735 if opts is None:
735 736 opts = {}
736 737 match = oldmatch(mctx, pats, opts, globbed, default, badfn=badfn)
737 738 m = copy.copy(match)
738 739
739 740 # revert supports recursing into subrepos, and though largefiles
740 741 # currently doesn't work correctly in that case, this match is
741 742 # called, so the lfdirstate above may not be the correct one for
742 743 # this invocation of match.
743 744 lfdirstate = lfutil.openlfdirstate(mctx.repo().ui, mctx.repo(),
744 745 False)
745 746
746 747 def tostandin(f):
747 748 standin = lfutil.standin(f)
748 749 if standin in ctx or standin in mctx:
749 750 return standin
750 751 elif standin in repo[None] or lfdirstate[f] == 'r':
751 752 return None
752 753 return f
753 754 m._files = [tostandin(f) for f in m._files]
754 755 m._files = [f for f in m._files if f is not None]
755 756 m._fileroots = set(m._files)
756 757 origmatchfn = m.matchfn
757 758 def matchfn(f):
758 759 if lfutil.isstandin(f):
759 760 return (origmatchfn(lfutil.splitstandin(f)) and
760 761 (f in ctx or f in mctx))
761 762 return origmatchfn(f)
762 763 m.matchfn = matchfn
763 764 return m
764 765 oldmatch = installmatchfn(overridematch)
765 766 try:
766 767 orig(ui, repo, ctx, parents, *pats, **opts)
767 768 finally:
768 769 restorematchfn()
769 770
770 771 newstandins = lfutil.getstandinsstate(repo)
771 772 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
772 773 # lfdirstate should be 'normallookup'-ed for updated files,
773 774 # because reverting doesn't touch dirstate for 'normal' files
774 775 # when target revision is explicitly specified: in such case,
775 776 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
776 777 # of target (standin) file.
777 778 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
778 779 normallookup=True)
779 780
780 781 # after pulling changesets, we need to take some extra care to get
781 782 # largefiles updated remotely
782 783 def overridepull(orig, ui, repo, source=None, **opts):
783 784 revsprepull = len(repo)
784 785 if not source:
785 786 source = 'default'
786 787 repo.lfpullsource = source
787 788 result = orig(ui, repo, source, **opts)
788 789 revspostpull = len(repo)
789 790 lfrevs = opts.get('lfrev', [])
790 791 if opts.get('all_largefiles'):
791 792 lfrevs.append('pulled()')
792 793 if lfrevs and revspostpull > revsprepull:
793 794 numcached = 0
794 795 repo.firstpulled = revsprepull # for pulled() revset expression
795 796 try:
796 797 for rev in scmutil.revrange(repo, lfrevs):
797 798 ui.note(_('pulling largefiles for revision %s\n') % rev)
798 799 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
799 800 numcached += len(cached)
800 801 finally:
801 802 del repo.firstpulled
802 803 ui.status(_("%d largefiles cached\n") % numcached)
803 804 return result
804 805
805 806 revsetpredicate = revset.extpredicate()
806 807
807 808 @revsetpredicate('pulled()')
808 809 def pulledrevsetsymbol(repo, subset, x):
809 810 """Changesets that just has been pulled.
810 811
811 812 Only available with largefiles from pull --lfrev expressions.
812 813
813 814 .. container:: verbose
814 815
815 816 Some examples:
816 817
817 818 - pull largefiles for all new changesets::
818 819
819 820 hg pull -lfrev "pulled()"
820 821
821 822 - pull largefiles for all new branch heads::
822 823
823 824 hg pull -lfrev "head(pulled()) and not closed()"
824 825
825 826 """
826 827
827 828 try:
828 829 firstpulled = repo.firstpulled
829 830 except AttributeError:
830 831 raise error.Abort(_("pulled() only available in --lfrev"))
831 832 return revset.baseset([r for r in subset if r >= firstpulled])
832 833
833 834 def overrideclone(orig, ui, source, dest=None, **opts):
834 835 d = dest
835 836 if d is None:
836 837 d = hg.defaultdest(source)
837 838 if opts.get('all_largefiles') and not hg.islocal(d):
838 839 raise error.Abort(_(
839 840 '--all-largefiles is incompatible with non-local destination %s') %
840 841 d)
841 842
842 843 return orig(ui, source, dest, **opts)
843 844
844 845 def hgclone(orig, ui, opts, *args, **kwargs):
845 846 result = orig(ui, opts, *args, **kwargs)
846 847
847 848 if result is not None:
848 849 sourcerepo, destrepo = result
849 850 repo = destrepo.local()
850 851
851 852 # When cloning to a remote repo (like through SSH), no repo is available
852 853 # from the peer. Therefore the largefiles can't be downloaded and the
853 854 # hgrc can't be updated.
854 855 if not repo:
855 856 return result
856 857
857 858 # If largefiles is required for this repo, permanently enable it locally
858 859 if 'largefiles' in repo.requirements:
859 860 fp = repo.vfs('hgrc', 'a', text=True)
860 861 try:
861 862 fp.write('\n[extensions]\nlargefiles=\n')
862 863 finally:
863 864 fp.close()
864 865
865 866 # Caching is implicitly limited to 'rev' option, since the dest repo was
866 867 # truncated at that point. The user may expect a download count with
867 868 # this option, so attempt whether or not this is a largefile repo.
868 869 if opts.get('all_largefiles'):
869 870 success, missing = lfcommands.downloadlfiles(ui, repo, None)
870 871
871 872 if missing != 0:
872 873 return None
873 874
874 875 return result
875 876
876 877 def overriderebase(orig, ui, repo, **opts):
877 878 if not util.safehasattr(repo, '_largefilesenabled'):
878 879 return orig(ui, repo, **opts)
879 880
880 881 resuming = opts.get('continue')
881 882 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
882 883 repo._lfstatuswriters.append(lambda *msg, **opts: None)
883 884 try:
884 885 return orig(ui, repo, **opts)
885 886 finally:
886 887 repo._lfstatuswriters.pop()
887 888 repo._lfcommithooks.pop()
888 889
889 890 def overridearchivecmd(orig, ui, repo, dest, **opts):
890 891 repo.unfiltered().lfstatus = True
891 892
892 893 try:
893 894 return orig(ui, repo.unfiltered(), dest, **opts)
894 895 finally:
895 896 repo.unfiltered().lfstatus = False
896 897
897 898 def hgwebarchive(orig, web, req, tmpl):
898 899 web.repo.lfstatus = True
899 900
900 901 try:
901 902 return orig(web, req, tmpl)
902 903 finally:
903 904 web.repo.lfstatus = False
904 905
905 906 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
906 907 prefix='', mtime=None, subrepos=None):
907 908 # For some reason setting repo.lfstatus in hgwebarchive only changes the
908 909 # unfiltered repo's attr, so check that as well.
909 910 if not repo.lfstatus and not repo.unfiltered().lfstatus:
910 911 return orig(repo, dest, node, kind, decode, matchfn, prefix, mtime,
911 912 subrepos)
912 913
913 914 # No need to lock because we are only reading history and
914 915 # largefile caches, neither of which are modified.
915 916 if node is not None:
916 917 lfcommands.cachelfiles(repo.ui, repo, node)
917 918
918 919 if kind not in archival.archivers:
919 920 raise error.Abort(_("unknown archive type '%s'") % kind)
920 921
921 922 ctx = repo[node]
922 923
923 924 if kind == 'files':
924 925 if prefix:
925 926 raise error.Abort(
926 927 _('cannot give prefix when archiving to files'))
927 928 else:
928 929 prefix = archival.tidyprefix(dest, kind, prefix)
929 930
930 931 def write(name, mode, islink, getdata):
931 932 if matchfn and not matchfn(name):
932 933 return
933 934 data = getdata()
934 935 if decode:
935 936 data = repo.wwritedata(name, data)
936 937 archiver.addfile(prefix + name, mode, islink, data)
937 938
938 939 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
939 940
940 941 if repo.ui.configbool("ui", "archivemeta", True):
941 942 write('.hg_archival.txt', 0o644, False,
942 943 lambda: archival.buildmetadata(ctx))
943 944
944 945 for f in ctx:
945 946 ff = ctx.flags(f)
946 947 getdata = ctx[f].data
947 948 if lfutil.isstandin(f):
948 949 if node is not None:
949 950 path = lfutil.findfile(repo, getdata().strip())
950 951
951 952 if path is None:
952 953 raise error.Abort(
953 954 _('largefile %s not found in repo store or system cache')
954 955 % lfutil.splitstandin(f))
955 956 else:
956 957 path = lfutil.splitstandin(f)
957 958
958 959 f = lfutil.splitstandin(f)
959 960
960 961 getdata = lambda: util.readfile(path)
961 962 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
962 963
963 964 if subrepos:
964 965 for subpath in sorted(ctx.substate):
965 966 sub = ctx.workingsub(subpath)
966 967 submatch = match_.subdirmatcher(subpath, matchfn)
967 968 sub._repo.lfstatus = True
968 969 sub.archive(archiver, prefix, submatch)
969 970
970 971 archiver.done()
971 972
972 973 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None):
973 974 if not repo._repo.lfstatus:
974 975 return orig(repo, archiver, prefix, match)
975 976
976 977 repo._get(repo._state + ('hg',))
977 978 rev = repo._state[1]
978 979 ctx = repo._repo[rev]
979 980
980 981 if ctx.node() is not None:
981 982 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
982 983
983 984 def write(name, mode, islink, getdata):
984 985 # At this point, the standin has been replaced with the largefile name,
985 986 # so the normal matcher works here without the lfutil variants.
986 987 if match and not match(f):
987 988 return
988 989 data = getdata()
989 990
990 991 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
991 992
992 993 for f in ctx:
993 994 ff = ctx.flags(f)
994 995 getdata = ctx[f].data
995 996 if lfutil.isstandin(f):
996 997 if ctx.node() is not None:
997 998 path = lfutil.findfile(repo._repo, getdata().strip())
998 999
999 1000 if path is None:
1000 1001 raise error.Abort(
1001 1002 _('largefile %s not found in repo store or system cache')
1002 1003 % lfutil.splitstandin(f))
1003 1004 else:
1004 1005 path = lfutil.splitstandin(f)
1005 1006
1006 1007 f = lfutil.splitstandin(f)
1007 1008
1008 1009 getdata = lambda: util.readfile(os.path.join(prefix, path))
1009 1010
1010 1011 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
1011 1012
1012 1013 for subpath in sorted(ctx.substate):
1013 1014 sub = ctx.workingsub(subpath)
1014 1015 submatch = match_.subdirmatcher(subpath, match)
1015 1016 sub._repo.lfstatus = True
1016 1017 sub.archive(archiver, prefix + repo._path + '/', submatch)
1017 1018
1018 1019 # If a largefile is modified, the change is not reflected in its
1019 1020 # standin until a commit. cmdutil.bailifchanged() raises an exception
1020 1021 # if the repo has uncommitted changes. Wrap it to also check if
1021 1022 # largefiles were changed. This is used by bisect, backout and fetch.
1022 1023 def overridebailifchanged(orig, repo, *args, **kwargs):
1023 1024 orig(repo, *args, **kwargs)
1024 1025 repo.lfstatus = True
1025 1026 s = repo.status()
1026 1027 repo.lfstatus = False
1027 1028 if s.modified or s.added or s.removed or s.deleted:
1028 1029 raise error.Abort(_('uncommitted changes'))
1029 1030
1030 1031 def postcommitstatus(orig, repo, *args, **kwargs):
1031 1032 repo.lfstatus = True
1032 1033 try:
1033 1034 return orig(repo, *args, **kwargs)
1034 1035 finally:
1035 1036 repo.lfstatus = False
1036 1037
1037 1038 def cmdutilforget(orig, ui, repo, match, prefix, explicitonly):
1038 1039 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1039 1040 bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly)
1040 1041 m = composelargefilematcher(match, repo[None].manifest())
1041 1042
1042 1043 try:
1043 1044 repo.lfstatus = True
1044 1045 s = repo.status(match=m, clean=True)
1045 1046 finally:
1046 1047 repo.lfstatus = False
1047 1048 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1048 1049 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
1049 1050
1050 1051 for f in forget:
1051 1052 if lfutil.standin(f) not in repo.dirstate and not \
1052 1053 repo.wvfs.isdir(lfutil.standin(f)):
1053 1054 ui.warn(_('not removing %s: file is already untracked\n')
1054 1055 % m.rel(f))
1055 1056 bad.append(f)
1056 1057
1057 1058 for f in forget:
1058 1059 if ui.verbose or not m.exact(f):
1059 1060 ui.status(_('removing %s\n') % m.rel(f))
1060 1061
1061 1062 # Need to lock because standin files are deleted then removed from the
1062 1063 # repository and we could race in-between.
1063 1064 with repo.wlock():
1064 1065 lfdirstate = lfutil.openlfdirstate(ui, repo)
1065 1066 for f in forget:
1066 1067 if lfdirstate[f] == 'a':
1067 1068 lfdirstate.drop(f)
1068 1069 else:
1069 1070 lfdirstate.remove(f)
1070 1071 lfdirstate.write()
1071 1072 standins = [lfutil.standin(f) for f in forget]
1072 1073 for f in standins:
1073 1074 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1074 1075 rejected = repo[None].forget(standins)
1075 1076
1076 1077 bad.extend(f for f in rejected if f in m.files())
1077 1078 forgot.extend(f for f in forget if f not in rejected)
1078 1079 return bad, forgot
1079 1080
1080 1081 def _getoutgoings(repo, other, missing, addfunc):
1081 1082 """get pairs of filename and largefile hash in outgoing revisions
1082 1083 in 'missing'.
1083 1084
1084 1085 largefiles already existing on 'other' repository are ignored.
1085 1086
1086 1087 'addfunc' is invoked with each unique pairs of filename and
1087 1088 largefile hash value.
1088 1089 """
1089 1090 knowns = set()
1090 1091 lfhashes = set()
1091 1092 def dedup(fn, lfhash):
1092 1093 k = (fn, lfhash)
1093 1094 if k not in knowns:
1094 1095 knowns.add(k)
1095 1096 lfhashes.add(lfhash)
1096 1097 lfutil.getlfilestoupload(repo, missing, dedup)
1097 1098 if lfhashes:
1098 1099 lfexists = basestore._openstore(repo, other).exists(lfhashes)
1099 1100 for fn, lfhash in knowns:
1100 1101 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1101 1102 addfunc(fn, lfhash)
1102 1103
1103 1104 def outgoinghook(ui, repo, other, opts, missing):
1104 1105 if opts.pop('large', None):
1105 1106 lfhashes = set()
1106 1107 if ui.debugflag:
1107 1108 toupload = {}
1108 1109 def addfunc(fn, lfhash):
1109 1110 if fn not in toupload:
1110 1111 toupload[fn] = []
1111 1112 toupload[fn].append(lfhash)
1112 1113 lfhashes.add(lfhash)
1113 1114 def showhashes(fn):
1114 1115 for lfhash in sorted(toupload[fn]):
1115 1116 ui.debug(' %s\n' % (lfhash))
1116 1117 else:
1117 1118 toupload = set()
1118 1119 def addfunc(fn, lfhash):
1119 1120 toupload.add(fn)
1120 1121 lfhashes.add(lfhash)
1121 1122 def showhashes(fn):
1122 1123 pass
1123 1124 _getoutgoings(repo, other, missing, addfunc)
1124 1125
1125 1126 if not toupload:
1126 1127 ui.status(_('largefiles: no files to upload\n'))
1127 1128 else:
1128 1129 ui.status(_('largefiles to upload (%d entities):\n')
1129 1130 % (len(lfhashes)))
1130 1131 for file in sorted(toupload):
1131 1132 ui.status(lfutil.splitstandin(file) + '\n')
1132 1133 showhashes(file)
1133 1134 ui.status('\n')
1134 1135
1135 1136 def summaryremotehook(ui, repo, opts, changes):
1136 1137 largeopt = opts.get('large', False)
1137 1138 if changes is None:
1138 1139 if largeopt:
1139 1140 return (False, True) # only outgoing check is needed
1140 1141 else:
1141 1142 return (False, False)
1142 1143 elif largeopt:
1143 1144 url, branch, peer, outgoing = changes[1]
1144 1145 if peer is None:
1145 1146 # i18n: column positioning for "hg summary"
1146 1147 ui.status(_('largefiles: (no remote repo)\n'))
1147 1148 return
1148 1149
1149 1150 toupload = set()
1150 1151 lfhashes = set()
1151 1152 def addfunc(fn, lfhash):
1152 1153 toupload.add(fn)
1153 1154 lfhashes.add(lfhash)
1154 1155 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1155 1156
1156 1157 if not toupload:
1157 1158 # i18n: column positioning for "hg summary"
1158 1159 ui.status(_('largefiles: (no files to upload)\n'))
1159 1160 else:
1160 1161 # i18n: column positioning for "hg summary"
1161 1162 ui.status(_('largefiles: %d entities for %d files to upload\n')
1162 1163 % (len(lfhashes), len(toupload)))
1163 1164
1164 1165 def overridesummary(orig, ui, repo, *pats, **opts):
1165 1166 try:
1166 1167 repo.lfstatus = True
1167 1168 orig(ui, repo, *pats, **opts)
1168 1169 finally:
1169 1170 repo.lfstatus = False
1170 1171
1171 1172 def scmutiladdremove(orig, repo, matcher, prefix, opts=None, dry_run=None,
1172 1173 similarity=None):
1173 1174 if opts is None:
1174 1175 opts = {}
1175 1176 if not lfutil.islfilesrepo(repo):
1176 1177 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1177 1178 # Get the list of missing largefiles so we can remove them
1178 1179 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1179 1180 unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
1180 1181 False, False, False)
1181 1182
1182 1183 # Call into the normal remove code, but the removing of the standin, we want
1183 1184 # to have handled by original addremove. Monkey patching here makes sure
1184 1185 # we don't remove the standin in the largefiles code, preventing a very
1185 1186 # confused state later.
1186 1187 if s.deleted:
1187 1188 m = copy.copy(matcher)
1188 1189
1189 1190 # The m._files and m._map attributes are not changed to the deleted list
1190 1191 # because that affects the m.exact() test, which in turn governs whether
1191 1192 # or not the file name is printed, and how. Simply limit the original
1192 1193 # matches to those in the deleted status list.
1193 1194 matchfn = m.matchfn
1194 1195 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1195 1196
1196 1197 removelargefiles(repo.ui, repo, True, m, **opts)
1197 1198 # Call into the normal add code, and any files that *should* be added as
1198 1199 # largefiles will be
1199 1200 added, bad = addlargefiles(repo.ui, repo, True, matcher, **opts)
1200 1201 # Now that we've handled largefiles, hand off to the original addremove
1201 1202 # function to take care of the rest. Make sure it doesn't do anything with
1202 1203 # largefiles by passing a matcher that will ignore them.
1203 1204 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1204 1205 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1205 1206
1206 1207 # Calling purge with --all will cause the largefiles to be deleted.
1207 1208 # Override repo.status to prevent this from happening.
1208 1209 def overridepurge(orig, ui, repo, *dirs, **opts):
1209 1210 # XXX Monkey patching a repoview will not work. The assigned attribute will
1210 1211 # be set on the unfiltered repo, but we will only lookup attributes in the
1211 1212 # unfiltered repo if the lookup in the repoview object itself fails. As the
1212 1213 # monkey patched method exists on the repoview class the lookup will not
1213 1214 # fail. As a result, the original version will shadow the monkey patched
1214 1215 # one, defeating the monkey patch.
1215 1216 #
1216 1217 # As a work around we use an unfiltered repo here. We should do something
1217 1218 # cleaner instead.
1218 1219 repo = repo.unfiltered()
1219 1220 oldstatus = repo.status
1220 1221 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1221 1222 clean=False, unknown=False, listsubrepos=False):
1222 1223 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1223 1224 listsubrepos)
1224 1225 lfdirstate = lfutil.openlfdirstate(ui, repo)
1225 1226 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1226 1227 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1227 1228 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1228 1229 unknown, ignored, r.clean)
1229 1230 repo.status = overridestatus
1230 1231 orig(ui, repo, *dirs, **opts)
1231 1232 repo.status = oldstatus
1232 1233 def overriderollback(orig, ui, repo, **opts):
1233 1234 with repo.wlock():
1234 1235 before = repo.dirstate.parents()
1235 1236 orphans = set(f for f in repo.dirstate
1236 1237 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1237 1238 result = orig(ui, repo, **opts)
1238 1239 after = repo.dirstate.parents()
1239 1240 if before == after:
1240 1241 return result # no need to restore standins
1241 1242
1242 1243 pctx = repo['.']
1243 1244 for f in repo.dirstate:
1244 1245 if lfutil.isstandin(f):
1245 1246 orphans.discard(f)
1246 1247 if repo.dirstate[f] == 'r':
1247 1248 repo.wvfs.unlinkpath(f, ignoremissing=True)
1248 1249 elif f in pctx:
1249 1250 fctx = pctx[f]
1250 1251 repo.wwrite(f, fctx.data(), fctx.flags())
1251 1252 else:
1252 1253 # content of standin is not so important in 'a',
1253 1254 # 'm' or 'n' (coming from the 2nd parent) cases
1254 1255 lfutil.writestandin(repo, f, '', False)
1255 1256 for standin in orphans:
1256 1257 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1257 1258
1258 1259 lfdirstate = lfutil.openlfdirstate(ui, repo)
1259 1260 orphans = set(lfdirstate)
1260 1261 lfiles = lfutil.listlfiles(repo)
1261 1262 for file in lfiles:
1262 1263 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1263 1264 orphans.discard(file)
1264 1265 for lfile in orphans:
1265 1266 lfdirstate.drop(lfile)
1266 1267 lfdirstate.write()
1267 1268 return result
1268 1269
1269 1270 def overridetransplant(orig, ui, repo, *revs, **opts):
1270 1271 resuming = opts.get('continue')
1271 1272 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1272 1273 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1273 1274 try:
1274 1275 result = orig(ui, repo, *revs, **opts)
1275 1276 finally:
1276 1277 repo._lfstatuswriters.pop()
1277 1278 repo._lfcommithooks.pop()
1278 1279 return result
1279 1280
1280 1281 def overridecat(orig, ui, repo, file1, *pats, **opts):
1281 1282 ctx = scmutil.revsingle(repo, opts.get('rev'))
1282 1283 err = 1
1283 1284 notbad = set()
1284 1285 m = scmutil.match(ctx, (file1,) + pats, opts)
1285 1286 origmatchfn = m.matchfn
1286 1287 def lfmatchfn(f):
1287 1288 if origmatchfn(f):
1288 1289 return True
1289 1290 lf = lfutil.splitstandin(f)
1290 1291 if lf is None:
1291 1292 return False
1292 1293 notbad.add(lf)
1293 1294 return origmatchfn(lf)
1294 1295 m.matchfn = lfmatchfn
1295 1296 origbadfn = m.bad
1296 1297 def lfbadfn(f, msg):
1297 1298 if not f in notbad:
1298 1299 origbadfn(f, msg)
1299 1300 m.bad = lfbadfn
1300 1301
1301 1302 origvisitdirfn = m.visitdir
1302 1303 def lfvisitdirfn(dir):
1303 1304 if dir == lfutil.shortname:
1304 1305 return True
1305 1306 ret = origvisitdirfn(dir)
1306 1307 if ret:
1307 1308 return ret
1308 1309 lf = lfutil.splitstandin(dir)
1309 1310 if lf is None:
1310 1311 return False
1311 1312 return origvisitdirfn(lf)
1312 1313 m.visitdir = lfvisitdirfn
1313 1314
1314 1315 for f in ctx.walk(m):
1315 1316 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1316 1317 pathname=f)
1317 1318 lf = lfutil.splitstandin(f)
1318 1319 if lf is None or origmatchfn(f):
1319 1320 # duplicating unreachable code from commands.cat
1320 1321 data = ctx[f].data()
1321 1322 if opts.get('decode'):
1322 1323 data = repo.wwritedata(f, data)
1323 1324 fp.write(data)
1324 1325 else:
1325 1326 hash = lfutil.readstandin(repo, lf, ctx.rev())
1326 1327 if not lfutil.inusercache(repo.ui, hash):
1327 1328 store = basestore._openstore(repo)
1328 1329 success, missing = store.get([(lf, hash)])
1329 1330 if len(success) != 1:
1330 1331 raise error.Abort(
1331 1332 _('largefile %s is not in cache and could not be '
1332 1333 'downloaded') % lf)
1333 1334 path = lfutil.usercachepath(repo.ui, hash)
1334 1335 fpin = open(path, "rb")
1335 1336 for chunk in util.filechunkiter(fpin, 128 * 1024):
1336 1337 fp.write(chunk)
1337 1338 fpin.close()
1338 1339 fp.close()
1339 1340 err = 0
1340 1341 return err
1341 1342
1342 1343 def mergeupdate(orig, repo, node, branchmerge, force,
1343 1344 *args, **kwargs):
1344 1345 matcher = kwargs.get('matcher', None)
1345 1346 # note if this is a partial update
1346 1347 partial = matcher and not matcher.always()
1347 1348 with repo.wlock():
1348 1349 # branch | | |
1349 1350 # merge | force | partial | action
1350 1351 # -------+-------+---------+--------------
1351 1352 # x | x | x | linear-merge
1352 1353 # o | x | x | branch-merge
1353 1354 # x | o | x | overwrite (as clean update)
1354 1355 # o | o | x | force-branch-merge (*1)
1355 1356 # x | x | o | (*)
1356 1357 # o | x | o | (*)
1357 1358 # x | o | o | overwrite (as revert)
1358 1359 # o | o | o | (*)
1359 1360 #
1360 1361 # (*) don't care
1361 1362 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1362 1363
1363 1364 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1364 1365 unsure, s = lfdirstate.status(match_.always(repo.root,
1365 1366 repo.getcwd()),
1366 1367 [], False, False, False)
1367 1368 pctx = repo['.']
1368 1369 for lfile in unsure + s.modified:
1369 1370 lfileabs = repo.wvfs.join(lfile)
1370 1371 if not os.path.exists(lfileabs):
1371 1372 continue
1372 1373 lfhash = lfutil.hashrepofile(repo, lfile)
1373 1374 standin = lfutil.standin(lfile)
1374 1375 lfutil.writestandin(repo, standin, lfhash,
1375 1376 lfutil.getexecutable(lfileabs))
1376 1377 if (standin in pctx and
1377 1378 lfhash == lfutil.readstandin(repo, lfile, '.')):
1378 1379 lfdirstate.normal(lfile)
1379 1380 for lfile in s.added:
1380 1381 lfutil.updatestandin(repo, lfutil.standin(lfile))
1381 1382 lfdirstate.write()
1382 1383
1383 1384 oldstandins = lfutil.getstandinsstate(repo)
1384 1385
1385 1386 result = orig(repo, node, branchmerge, force, *args, **kwargs)
1386 1387
1387 1388 newstandins = lfutil.getstandinsstate(repo)
1388 1389 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1389 1390 if branchmerge or force or partial:
1390 1391 filelist.extend(s.deleted + s.removed)
1391 1392
1392 1393 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1393 1394 normallookup=partial)
1394 1395
1395 1396 return result
1396 1397
1397 1398 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1398 1399 result = orig(repo, files, *args, **kwargs)
1399 1400
1400 1401 filelist = [lfutil.splitstandin(f) for f in files if lfutil.isstandin(f)]
1401 1402 if filelist:
1402 1403 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1403 1404 printmessage=False, normallookup=True)
1404 1405
1405 1406 return result
@@ -1,7056 +1,7057 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullhex, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _
11 11 import os, re, difflib, time, tempfile, errno, shlex
12 12 import sys, socket
13 13 import hg, scmutil, util, revlog, copies, error, bookmarks
14 14 import patch, help, encoding, templatekw, discovery
15 15 import archival, changegroup, cmdutil, hbisect
16 16 import sshserver, hgweb
17 17 import extensions
18 18 import merge as mergemod
19 19 import minirst, revset, fileset
20 20 import dagparser, context, simplemerge, graphmod, copies
21 21 import random, operator
22 22 import setdiscovery, treediscovery, dagutil, pvec, localrepo, destutil
23 23 import phases, obsolete, exchange, bundle2, repair, lock as lockmod
24 24 import ui as uimod
25 25 import streamclone
26 26 import commandserver
27 27
28 28 table = {}
29 29
30 30 command = cmdutil.command(table)
31 31
32 32 # Space delimited list of commands that don't require local repositories.
33 33 # This should be populated by passing norepo=True into the @command decorator.
34 34 norepo = ''
35 35 # Space delimited list of commands that optionally require local repositories.
36 36 # This should be populated by passing optionalrepo=True into the @command
37 37 # decorator.
38 38 optionalrepo = ''
39 39 # Space delimited list of commands that will examine arguments looking for
40 40 # a repository. This should be populated by passing inferrepo=True into the
41 41 # @command decorator.
42 42 inferrepo = ''
43 43
44 44 # label constants
45 45 # until 3.5, bookmarks.current was the advertised name, not
46 46 # bookmarks.active, so we must use both to avoid breaking old
47 47 # custom styles
48 48 activebookmarklabel = 'bookmarks.active bookmarks.current'
49 49
50 50 # common command options
51 51
52 52 globalopts = [
53 53 ('R', 'repository', '',
54 54 _('repository root directory or name of overlay bundle file'),
55 55 _('REPO')),
56 56 ('', 'cwd', '',
57 57 _('change working directory'), _('DIR')),
58 58 ('y', 'noninteractive', None,
59 59 _('do not prompt, automatically pick the first choice for all prompts')),
60 60 ('q', 'quiet', None, _('suppress output')),
61 61 ('v', 'verbose', None, _('enable additional output')),
62 62 ('', 'config', [],
63 63 _('set/override config option (use \'section.name=value\')'),
64 64 _('CONFIG')),
65 65 ('', 'debug', None, _('enable debugging output')),
66 66 ('', 'debugger', None, _('start debugger')),
67 67 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
68 68 _('ENCODE')),
69 69 ('', 'encodingmode', encoding.encodingmode,
70 70 _('set the charset encoding mode'), _('MODE')),
71 71 ('', 'traceback', None, _('always print a traceback on exception')),
72 72 ('', 'time', None, _('time how long the command takes')),
73 73 ('', 'profile', None, _('print command execution profile')),
74 74 ('', 'version', None, _('output version information and exit')),
75 75 ('h', 'help', None, _('display help and exit')),
76 76 ('', 'hidden', False, _('consider hidden changesets')),
77 77 ]
78 78
79 79 dryrunopts = [('n', 'dry-run', None,
80 80 _('do not perform actions, just print output'))]
81 81
82 82 remoteopts = [
83 83 ('e', 'ssh', '',
84 84 _('specify ssh command to use'), _('CMD')),
85 85 ('', 'remotecmd', '',
86 86 _('specify hg command to run on the remote side'), _('CMD')),
87 87 ('', 'insecure', None,
88 88 _('do not verify server certificate (ignoring web.cacerts config)')),
89 89 ]
90 90
91 91 walkopts = [
92 92 ('I', 'include', [],
93 93 _('include names matching the given patterns'), _('PATTERN')),
94 94 ('X', 'exclude', [],
95 95 _('exclude names matching the given patterns'), _('PATTERN')),
96 96 ]
97 97
98 98 commitopts = [
99 99 ('m', 'message', '',
100 100 _('use text as commit message'), _('TEXT')),
101 101 ('l', 'logfile', '',
102 102 _('read commit message from file'), _('FILE')),
103 103 ]
104 104
105 105 commitopts2 = [
106 106 ('d', 'date', '',
107 107 _('record the specified date as commit date'), _('DATE')),
108 108 ('u', 'user', '',
109 109 _('record the specified user as committer'), _('USER')),
110 110 ]
111 111
112 112 # hidden for now
113 113 formatteropts = [
114 114 ('T', 'template', '',
115 115 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
116 116 ]
117 117
118 118 templateopts = [
119 119 ('', 'style', '',
120 120 _('display using template map file (DEPRECATED)'), _('STYLE')),
121 121 ('T', 'template', '',
122 122 _('display with template'), _('TEMPLATE')),
123 123 ]
124 124
125 125 logopts = [
126 126 ('p', 'patch', None, _('show patch')),
127 127 ('g', 'git', None, _('use git extended diff format')),
128 128 ('l', 'limit', '',
129 129 _('limit number of changes displayed'), _('NUM')),
130 130 ('M', 'no-merges', None, _('do not show merges')),
131 131 ('', 'stat', None, _('output diffstat-style summary of changes')),
132 132 ('G', 'graph', None, _("show the revision DAG")),
133 133 ] + templateopts
134 134
135 135 diffopts = [
136 136 ('a', 'text', None, _('treat all files as text')),
137 137 ('g', 'git', None, _('use git extended diff format')),
138 138 ('', 'nodates', None, _('omit dates from diff headers'))
139 139 ]
140 140
141 141 diffwsopts = [
142 142 ('w', 'ignore-all-space', None,
143 143 _('ignore white space when comparing lines')),
144 144 ('b', 'ignore-space-change', None,
145 145 _('ignore changes in the amount of white space')),
146 146 ('B', 'ignore-blank-lines', None,
147 147 _('ignore changes whose lines are all blank')),
148 148 ]
149 149
150 150 diffopts2 = [
151 151 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
152 152 ('p', 'show-function', None, _('show which function each change is in')),
153 153 ('', 'reverse', None, _('produce a diff that undoes the changes')),
154 154 ] + diffwsopts + [
155 155 ('U', 'unified', '',
156 156 _('number of lines of context to show'), _('NUM')),
157 157 ('', 'stat', None, _('output diffstat-style summary of changes')),
158 158 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
159 159 ]
160 160
161 161 mergetoolopts = [
162 162 ('t', 'tool', '', _('specify merge tool')),
163 163 ]
164 164
165 165 similarityopts = [
166 166 ('s', 'similarity', '',
167 167 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
168 168 ]
169 169
170 170 subrepoopts = [
171 171 ('S', 'subrepos', None,
172 172 _('recurse into subrepositories'))
173 173 ]
174 174
175 175 debugrevlogopts = [
176 176 ('c', 'changelog', False, _('open changelog')),
177 177 ('m', 'manifest', False, _('open manifest')),
178 178 ('', 'dir', False, _('open directory manifest')),
179 179 ]
180 180
181 181 # Commands start here, listed alphabetically
182 182
183 183 @command('^add',
184 184 walkopts + subrepoopts + dryrunopts,
185 185 _('[OPTION]... [FILE]...'),
186 186 inferrepo=True)
187 187 def add(ui, repo, *pats, **opts):
188 188 """add the specified files on the next commit
189 189
190 190 Schedule files to be version controlled and added to the
191 191 repository.
192 192
193 193 The files will be added to the repository at the next commit. To
194 194 undo an add before that, see :hg:`forget`.
195 195
196 196 If no names are given, add all files to the repository (except
197 197 files matching ``.hgignore``).
198 198
199 199 .. container:: verbose
200 200
201 201 Examples:
202 202
203 203 - New (unknown) files are added
204 204 automatically by :hg:`add`::
205 205
206 206 $ ls
207 207 foo.c
208 208 $ hg status
209 209 ? foo.c
210 210 $ hg add
211 211 adding foo.c
212 212 $ hg status
213 213 A foo.c
214 214
215 215 - Specific files to be added can be specified::
216 216
217 217 $ ls
218 218 bar.c foo.c
219 219 $ hg status
220 220 ? bar.c
221 221 ? foo.c
222 222 $ hg add bar.c
223 223 $ hg status
224 224 A bar.c
225 225 ? foo.c
226 226
227 227 Returns 0 if all files are successfully added.
228 228 """
229 229
230 230 m = scmutil.match(repo[None], pats, opts)
231 231 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
232 232 return rejected and 1 or 0
233 233
234 234 @command('addremove',
235 235 similarityopts + subrepoopts + walkopts + dryrunopts,
236 236 _('[OPTION]... [FILE]...'),
237 237 inferrepo=True)
238 238 def addremove(ui, repo, *pats, **opts):
239 239 """add all new files, delete all missing files
240 240
241 241 Add all new files and remove all missing files from the
242 242 repository.
243 243
244 244 Unless names are given, new files are ignored if they match any of
245 245 the patterns in ``.hgignore``. As with add, these changes take
246 246 effect at the next commit.
247 247
248 248 Use the -s/--similarity option to detect renamed files. This
249 249 option takes a percentage between 0 (disabled) and 100 (files must
250 250 be identical) as its parameter. With a parameter greater than 0,
251 251 this compares every removed file with every added file and records
252 252 those similar enough as renames. Detecting renamed files this way
253 253 can be expensive. After using this option, :hg:`status -C` can be
254 254 used to check which files were identified as moved or renamed. If
255 255 not specified, -s/--similarity defaults to 100 and only renames of
256 256 identical files are detected.
257 257
258 258 .. container:: verbose
259 259
260 260 Examples:
261 261
262 262 - A number of files (bar.c and foo.c) are new,
263 263 while foobar.c has been removed (without using :hg:`remove`)
264 264 from the repository::
265 265
266 266 $ ls
267 267 bar.c foo.c
268 268 $ hg status
269 269 ! foobar.c
270 270 ? bar.c
271 271 ? foo.c
272 272 $ hg addremove
273 273 adding bar.c
274 274 adding foo.c
275 275 removing foobar.c
276 276 $ hg status
277 277 A bar.c
278 278 A foo.c
279 279 R foobar.c
280 280
281 281 - A file foobar.c was moved to foo.c without using :hg:`rename`.
282 282 Afterwards, it was edited slightly::
283 283
284 284 $ ls
285 285 foo.c
286 286 $ hg status
287 287 ! foobar.c
288 288 ? foo.c
289 289 $ hg addremove --similarity 90
290 290 removing foobar.c
291 291 adding foo.c
292 292 recording removal of foobar.c as rename to foo.c (94% similar)
293 293 $ hg status -C
294 294 A foo.c
295 295 foobar.c
296 296 R foobar.c
297 297
298 298 Returns 0 if all files are successfully added.
299 299 """
300 300 try:
301 301 sim = float(opts.get('similarity') or 100)
302 302 except ValueError:
303 303 raise error.Abort(_('similarity must be a number'))
304 304 if sim < 0 or sim > 100:
305 305 raise error.Abort(_('similarity must be between 0 and 100'))
306 306 matcher = scmutil.match(repo[None], pats, opts)
307 307 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
308 308
309 309 @command('^annotate|blame',
310 310 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
311 311 ('', 'follow', None,
312 312 _('follow copies/renames and list the filename (DEPRECATED)')),
313 313 ('', 'no-follow', None, _("don't follow copies and renames")),
314 314 ('a', 'text', None, _('treat all files as text')),
315 315 ('u', 'user', None, _('list the author (long with -v)')),
316 316 ('f', 'file', None, _('list the filename')),
317 317 ('d', 'date', None, _('list the date (short with -q)')),
318 318 ('n', 'number', None, _('list the revision number (default)')),
319 319 ('c', 'changeset', None, _('list the changeset')),
320 320 ('l', 'line-number', None, _('show line number at the first appearance'))
321 321 ] + diffwsopts + walkopts + formatteropts,
322 322 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
323 323 inferrepo=True)
324 324 def annotate(ui, repo, *pats, **opts):
325 325 """show changeset information by line for each file
326 326
327 327 List changes in files, showing the revision id responsible for
328 328 each line.
329 329
330 330 This command is useful for discovering when a change was made and
331 331 by whom.
332 332
333 333 If you include --file, --user, or --date, the revision number is
334 334 suppressed unless you also include --number.
335 335
336 336 Without the -a/--text option, annotate will avoid processing files
337 337 it detects as binary. With -a, annotate will annotate the file
338 338 anyway, although the results will probably be neither useful
339 339 nor desirable.
340 340
341 341 Returns 0 on success.
342 342 """
343 343 if not pats:
344 344 raise error.Abort(_('at least one filename or pattern is required'))
345 345
346 346 if opts.get('follow'):
347 347 # --follow is deprecated and now just an alias for -f/--file
348 348 # to mimic the behavior of Mercurial before version 1.5
349 349 opts['file'] = True
350 350
351 351 ctx = scmutil.revsingle(repo, opts.get('rev'))
352 352
353 353 fm = ui.formatter('annotate', opts)
354 354 if ui.quiet:
355 355 datefunc = util.shortdate
356 356 else:
357 357 datefunc = util.datestr
358 358 if ctx.rev() is None:
359 359 def hexfn(node):
360 360 if node is None:
361 361 return None
362 362 else:
363 363 return fm.hexfunc(node)
364 364 if opts.get('changeset'):
365 365 # omit "+" suffix which is appended to node hex
366 366 def formatrev(rev):
367 367 if rev is None:
368 368 return '%d' % ctx.p1().rev()
369 369 else:
370 370 return '%d' % rev
371 371 else:
372 372 def formatrev(rev):
373 373 if rev is None:
374 374 return '%d+' % ctx.p1().rev()
375 375 else:
376 376 return '%d ' % rev
377 377 def formathex(hex):
378 378 if hex is None:
379 379 return '%s+' % fm.hexfunc(ctx.p1().node())
380 380 else:
381 381 return '%s ' % hex
382 382 else:
383 383 hexfn = fm.hexfunc
384 384 formatrev = formathex = str
385 385
386 386 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
387 387 ('number', ' ', lambda x: x[0].rev(), formatrev),
388 388 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
389 389 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
390 390 ('file', ' ', lambda x: x[0].path(), str),
391 391 ('line_number', ':', lambda x: x[1], str),
392 392 ]
393 393 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
394 394
395 395 if (not opts.get('user') and not opts.get('changeset')
396 396 and not opts.get('date') and not opts.get('file')):
397 397 opts['number'] = True
398 398
399 399 linenumber = opts.get('line_number') is not None
400 400 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
401 401 raise error.Abort(_('at least one of -n/-c is required for -l'))
402 402
403 403 if fm:
404 404 def makefunc(get, fmt):
405 405 return get
406 406 else:
407 407 def makefunc(get, fmt):
408 408 return lambda x: fmt(get(x))
409 409 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
410 410 if opts.get(op)]
411 411 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
412 412 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
413 413 if opts.get(op))
414 414
415 415 def bad(x, y):
416 416 raise error.Abort("%s: %s" % (x, y))
417 417
418 418 m = scmutil.match(ctx, pats, opts, badfn=bad)
419 419
420 420 follow = not opts.get('no_follow')
421 421 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
422 422 whitespace=True)
423 423 for abs in ctx.walk(m):
424 424 fctx = ctx[abs]
425 425 if not opts.get('text') and util.binary(fctx.data()):
426 426 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
427 427 continue
428 428
429 429 lines = fctx.annotate(follow=follow, linenumber=linenumber,
430 430 diffopts=diffopts)
431 431 formats = []
432 432 pieces = []
433 433
434 434 for f, sep in funcmap:
435 435 l = [f(n) for n, dummy in lines]
436 436 if l:
437 437 if fm:
438 438 formats.append(['%s' for x in l])
439 439 else:
440 440 sizes = [encoding.colwidth(x) for x in l]
441 441 ml = max(sizes)
442 442 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
443 443 pieces.append(l)
444 444
445 445 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
446 446 fm.startitem()
447 447 fm.write(fields, "".join(f), *p)
448 448 fm.write('line', ": %s", l[1])
449 449
450 450 if lines and not lines[-1][1].endswith('\n'):
451 451 fm.plain('\n')
452 452
453 453 fm.end()
454 454
455 455 @command('archive',
456 456 [('', 'no-decode', None, _('do not pass files through decoders')),
457 457 ('p', 'prefix', '', _('directory prefix for files in archive'),
458 458 _('PREFIX')),
459 459 ('r', 'rev', '', _('revision to distribute'), _('REV')),
460 460 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
461 461 ] + subrepoopts + walkopts,
462 462 _('[OPTION]... DEST'))
463 463 def archive(ui, repo, dest, **opts):
464 464 '''create an unversioned archive of a repository revision
465 465
466 466 By default, the revision used is the parent of the working
467 467 directory; use -r/--rev to specify a different revision.
468 468
469 469 The archive type is automatically detected based on file
470 470 extension (to override, use -t/--type).
471 471
472 472 .. container:: verbose
473 473
474 474 Examples:
475 475
476 476 - create a zip file containing the 1.0 release::
477 477
478 478 hg archive -r 1.0 project-1.0.zip
479 479
480 480 - create a tarball excluding .hg files::
481 481
482 482 hg archive project.tar.gz -X ".hg*"
483 483
484 484 Valid types are:
485 485
486 486 :``files``: a directory full of files (default)
487 487 :``tar``: tar archive, uncompressed
488 488 :``tbz2``: tar archive, compressed using bzip2
489 489 :``tgz``: tar archive, compressed using gzip
490 490 :``uzip``: zip archive, uncompressed
491 491 :``zip``: zip archive, compressed using deflate
492 492
493 493 The exact name of the destination archive or directory is given
494 494 using a format string; see :hg:`help export` for details.
495 495
496 496 Each member added to an archive file has a directory prefix
497 497 prepended. Use -p/--prefix to specify a format string for the
498 498 prefix. The default is the basename of the archive, with suffixes
499 499 removed.
500 500
501 501 Returns 0 on success.
502 502 '''
503 503
504 504 ctx = scmutil.revsingle(repo, opts.get('rev'))
505 505 if not ctx:
506 506 raise error.Abort(_('no working directory: please specify a revision'))
507 507 node = ctx.node()
508 508 dest = cmdutil.makefilename(repo, dest, node)
509 509 if os.path.realpath(dest) == repo.root:
510 510 raise error.Abort(_('repository root cannot be destination'))
511 511
512 512 kind = opts.get('type') or archival.guesskind(dest) or 'files'
513 513 prefix = opts.get('prefix')
514 514
515 515 if dest == '-':
516 516 if kind == 'files':
517 517 raise error.Abort(_('cannot archive plain files to stdout'))
518 518 dest = cmdutil.makefileobj(repo, dest)
519 519 if not prefix:
520 520 prefix = os.path.basename(repo.root) + '-%h'
521 521
522 522 prefix = cmdutil.makefilename(repo, prefix, node)
523 523 matchfn = scmutil.match(ctx, [], opts)
524 524 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
525 525 matchfn, prefix, subrepos=opts.get('subrepos'))
526 526
527 527 @command('backout',
528 528 [('', 'merge', None, _('merge with old dirstate parent after backout')),
529 529 ('', 'commit', None,
530 530 _('commit if no conflicts were encountered (DEPRECATED)')),
531 531 ('', 'no-commit', None, _('do not commit')),
532 532 ('', 'parent', '',
533 533 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
534 534 ('r', 'rev', '', _('revision to backout'), _('REV')),
535 535 ('e', 'edit', False, _('invoke editor on commit messages')),
536 536 ] + mergetoolopts + walkopts + commitopts + commitopts2,
537 537 _('[OPTION]... [-r] REV'))
538 538 def backout(ui, repo, node=None, rev=None, **opts):
539 539 '''reverse effect of earlier changeset
540 540
541 541 Prepare a new changeset with the effect of REV undone in the
542 542 current working directory. If no conflicts were encountered,
543 543 it will be committed immediately.
544 544
545 545 If REV is the parent of the working directory, then this new changeset
546 546 is committed automatically (unless --no-commit is specified).
547 547
548 548 .. note::
549 549
550 550 :hg:`backout` cannot be used to fix either an unwanted or
551 551 incorrect merge.
552 552
553 553 .. container:: verbose
554 554
555 555 Examples:
556 556
557 557 - Reverse the effect of the parent of the working directory.
558 558 This backout will be committed immediately::
559 559
560 560 hg backout -r .
561 561
562 562 - Reverse the effect of previous bad revision 23::
563 563
564 564 hg backout -r 23
565 565
566 566 - Reverse the effect of previous bad revision 23 and
567 567 leave changes uncommitted::
568 568
569 569 hg backout -r 23 --no-commit
570 570 hg commit -m "Backout revision 23"
571 571
572 572 By default, the pending changeset will have one parent,
573 573 maintaining a linear history. With --merge, the pending
574 574 changeset will instead have two parents: the old parent of the
575 575 working directory and a new child of REV that simply undoes REV.
576 576
577 577 Before version 1.7, the behavior without --merge was equivalent
578 578 to specifying --merge followed by :hg:`update --clean .` to
579 579 cancel the merge and leave the child of REV as a head to be
580 580 merged separately.
581 581
582 582 See :hg:`help dates` for a list of formats valid for -d/--date.
583 583
584 584 See :hg:`help revert` for a way to restore files to the state
585 585 of another revision.
586 586
587 587 Returns 0 on success, 1 if nothing to backout or there are unresolved
588 588 files.
589 589 '''
590 590 wlock = lock = None
591 591 try:
592 592 wlock = repo.wlock()
593 593 lock = repo.lock()
594 594 return _dobackout(ui, repo, node, rev, **opts)
595 595 finally:
596 596 release(lock, wlock)
597 597
598 598 def _dobackout(ui, repo, node=None, rev=None, **opts):
599 599 if opts.get('commit') and opts.get('no_commit'):
600 600 raise error.Abort(_("cannot use --commit with --no-commit"))
601 601 if opts.get('merge') and opts.get('no_commit'):
602 602 raise error.Abort(_("cannot use --merge with --no-commit"))
603 603
604 604 if rev and node:
605 605 raise error.Abort(_("please specify just one revision"))
606 606
607 607 if not rev:
608 608 rev = node
609 609
610 610 if not rev:
611 611 raise error.Abort(_("please specify a revision to backout"))
612 612
613 613 date = opts.get('date')
614 614 if date:
615 615 opts['date'] = util.parsedate(date)
616 616
617 617 cmdutil.checkunfinished(repo)
618 618 cmdutil.bailifchanged(repo)
619 619 node = scmutil.revsingle(repo, rev).node()
620 620
621 621 op1, op2 = repo.dirstate.parents()
622 622 if not repo.changelog.isancestor(node, op1):
623 623 raise error.Abort(_('cannot backout change that is not an ancestor'))
624 624
625 625 p1, p2 = repo.changelog.parents(node)
626 626 if p1 == nullid:
627 627 raise error.Abort(_('cannot backout a change with no parents'))
628 628 if p2 != nullid:
629 629 if not opts.get('parent'):
630 630 raise error.Abort(_('cannot backout a merge changeset'))
631 631 p = repo.lookup(opts['parent'])
632 632 if p not in (p1, p2):
633 633 raise error.Abort(_('%s is not a parent of %s') %
634 634 (short(p), short(node)))
635 635 parent = p
636 636 else:
637 637 if opts.get('parent'):
638 638 raise error.Abort(_('cannot use --parent on non-merge changeset'))
639 639 parent = p1
640 640
641 641 # the backout should appear on the same branch
642 642 branch = repo.dirstate.branch()
643 643 bheads = repo.branchheads(branch)
644 644 rctx = scmutil.revsingle(repo, hex(parent))
645 645 if not opts.get('merge') and op1 != node:
646 646 dsguard = cmdutil.dirstateguard(repo, 'backout')
647 647 try:
648 648 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
649 649 'backout')
650 650 stats = mergemod.update(repo, parent, True, True, node, False)
651 651 repo.setparents(op1, op2)
652 652 dsguard.close()
653 653 hg._showstats(repo, stats)
654 654 if stats[3]:
655 655 repo.ui.status(_("use 'hg resolve' to retry unresolved "
656 656 "file merges\n"))
657 657 return 1
658 658 finally:
659 659 ui.setconfig('ui', 'forcemerge', '', '')
660 660 lockmod.release(dsguard)
661 661 else:
662 662 hg.clean(repo, node, show_stats=False)
663 663 repo.dirstate.setbranch(branch)
664 664 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
665 665
666 666 if opts.get('no_commit'):
667 667 msg = _("changeset %s backed out, "
668 668 "don't forget to commit.\n")
669 669 ui.status(msg % short(node))
670 670 return 0
671 671
672 672 def commitfunc(ui, repo, message, match, opts):
673 673 editform = 'backout'
674 674 e = cmdutil.getcommiteditor(editform=editform, **opts)
675 675 if not message:
676 676 # we don't translate commit messages
677 677 message = "Backed out changeset %s" % short(node)
678 678 e = cmdutil.getcommiteditor(edit=True, editform=editform)
679 679 return repo.commit(message, opts.get('user'), opts.get('date'),
680 680 match, editor=e)
681 681 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
682 682 if not newnode:
683 683 ui.status(_("nothing changed\n"))
684 684 return 1
685 685 cmdutil.commitstatus(repo, newnode, branch, bheads)
686 686
687 687 def nice(node):
688 688 return '%d:%s' % (repo.changelog.rev(node), short(node))
689 689 ui.status(_('changeset %s backs out changeset %s\n') %
690 690 (nice(repo.changelog.tip()), nice(node)))
691 691 if opts.get('merge') and op1 != node:
692 692 hg.clean(repo, op1, show_stats=False)
693 693 ui.status(_('merging with changeset %s\n')
694 694 % nice(repo.changelog.tip()))
695 695 try:
696 696 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
697 697 'backout')
698 698 return hg.merge(repo, hex(repo.changelog.tip()))
699 699 finally:
700 700 ui.setconfig('ui', 'forcemerge', '', '')
701 701 return 0
702 702
703 703 @command('bisect',
704 704 [('r', 'reset', False, _('reset bisect state')),
705 705 ('g', 'good', False, _('mark changeset good')),
706 706 ('b', 'bad', False, _('mark changeset bad')),
707 707 ('s', 'skip', False, _('skip testing changeset')),
708 708 ('e', 'extend', False, _('extend the bisect range')),
709 709 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
710 710 ('U', 'noupdate', False, _('do not update to target'))],
711 711 _("[-gbsr] [-U] [-c CMD] [REV]"))
712 712 def bisect(ui, repo, rev=None, extra=None, command=None,
713 713 reset=None, good=None, bad=None, skip=None, extend=None,
714 714 noupdate=None):
715 715 """subdivision search of changesets
716 716
717 717 This command helps to find changesets which introduce problems. To
718 718 use, mark the earliest changeset you know exhibits the problem as
719 719 bad, then mark the latest changeset which is free from the problem
720 720 as good. Bisect will update your working directory to a revision
721 721 for testing (unless the -U/--noupdate option is specified). Once
722 722 you have performed tests, mark the working directory as good or
723 723 bad, and bisect will either update to another candidate changeset
724 724 or announce that it has found the bad revision.
725 725
726 726 As a shortcut, you can also use the revision argument to mark a
727 727 revision as good or bad without checking it out first.
728 728
729 729 If you supply a command, it will be used for automatic bisection.
730 730 The environment variable HG_NODE will contain the ID of the
731 731 changeset being tested. The exit status of the command will be
732 732 used to mark revisions as good or bad: status 0 means good, 125
733 733 means to skip the revision, 127 (command not found) will abort the
734 734 bisection, and any other non-zero exit status means the revision
735 735 is bad.
736 736
737 737 .. container:: verbose
738 738
739 739 Some examples:
740 740
741 741 - start a bisection with known bad revision 34, and good revision 12::
742 742
743 743 hg bisect --bad 34
744 744 hg bisect --good 12
745 745
746 746 - advance the current bisection by marking current revision as good or
747 747 bad::
748 748
749 749 hg bisect --good
750 750 hg bisect --bad
751 751
752 752 - mark the current revision, or a known revision, to be skipped (e.g. if
753 753 that revision is not usable because of another issue)::
754 754
755 755 hg bisect --skip
756 756 hg bisect --skip 23
757 757
758 758 - skip all revisions that do not touch directories ``foo`` or ``bar``::
759 759
760 760 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
761 761
762 762 - forget the current bisection::
763 763
764 764 hg bisect --reset
765 765
766 766 - use 'make && make tests' to automatically find the first broken
767 767 revision::
768 768
769 769 hg bisect --reset
770 770 hg bisect --bad 34
771 771 hg bisect --good 12
772 772 hg bisect --command "make && make tests"
773 773
774 774 - see all changesets whose states are already known in the current
775 775 bisection::
776 776
777 777 hg log -r "bisect(pruned)"
778 778
779 779 - see the changeset currently being bisected (especially useful
780 780 if running with -U/--noupdate)::
781 781
782 782 hg log -r "bisect(current)"
783 783
784 784 - see all changesets that took part in the current bisection::
785 785
786 786 hg log -r "bisect(range)"
787 787
788 788 - you can even get a nice graph::
789 789
790 790 hg log --graph -r "bisect(range)"
791 791
792 792 See :hg:`help revsets` for more about the `bisect()` keyword.
793 793
794 794 Returns 0 on success.
795 795 """
796 796 def extendbisectrange(nodes, good):
797 797 # bisect is incomplete when it ends on a merge node and
798 798 # one of the parent was not checked.
799 799 parents = repo[nodes[0]].parents()
800 800 if len(parents) > 1:
801 801 if good:
802 802 side = state['bad']
803 803 else:
804 804 side = state['good']
805 805 num = len(set(i.node() for i in parents) & set(side))
806 806 if num == 1:
807 807 return parents[0].ancestor(parents[1])
808 808 return None
809 809
810 810 def print_result(nodes, good):
811 811 displayer = cmdutil.show_changeset(ui, repo, {})
812 812 if len(nodes) == 1:
813 813 # narrowed it down to a single revision
814 814 if good:
815 815 ui.write(_("The first good revision is:\n"))
816 816 else:
817 817 ui.write(_("The first bad revision is:\n"))
818 818 displayer.show(repo[nodes[0]])
819 819 extendnode = extendbisectrange(nodes, good)
820 820 if extendnode is not None:
821 821 ui.write(_('Not all ancestors of this changeset have been'
822 822 ' checked.\nUse bisect --extend to continue the '
823 823 'bisection from\nthe common ancestor, %s.\n')
824 824 % extendnode)
825 825 else:
826 826 # multiple possible revisions
827 827 if good:
828 828 ui.write(_("Due to skipped revisions, the first "
829 829 "good revision could be any of:\n"))
830 830 else:
831 831 ui.write(_("Due to skipped revisions, the first "
832 832 "bad revision could be any of:\n"))
833 833 for n in nodes:
834 834 displayer.show(repo[n])
835 835 displayer.close()
836 836
837 837 def check_state(state, interactive=True):
838 838 if not state['good'] or not state['bad']:
839 839 if (good or bad or skip or reset) and interactive:
840 840 return
841 841 if not state['good']:
842 842 raise error.Abort(_('cannot bisect (no known good revisions)'))
843 843 else:
844 844 raise error.Abort(_('cannot bisect (no known bad revisions)'))
845 845 return True
846 846
847 847 # backward compatibility
848 848 if rev in "good bad reset init".split():
849 849 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
850 850 cmd, rev, extra = rev, extra, None
851 851 if cmd == "good":
852 852 good = True
853 853 elif cmd == "bad":
854 854 bad = True
855 855 else:
856 856 reset = True
857 857 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
858 858 raise error.Abort(_('incompatible arguments'))
859 859
860 860 cmdutil.checkunfinished(repo)
861 861
862 862 if reset:
863 863 p = repo.join("bisect.state")
864 864 if os.path.exists(p):
865 865 os.unlink(p)
866 866 return
867 867
868 868 state = hbisect.load_state(repo)
869 869
870 870 if command:
871 871 changesets = 1
872 872 if noupdate:
873 873 try:
874 874 node = state['current'][0]
875 875 except LookupError:
876 876 raise error.Abort(_('current bisect revision is unknown - '
877 877 'start a new bisect to fix'))
878 878 else:
879 879 node, p2 = repo.dirstate.parents()
880 880 if p2 != nullid:
881 881 raise error.Abort(_('current bisect revision is a merge'))
882 882 try:
883 883 while changesets:
884 884 # update state
885 885 state['current'] = [node]
886 886 hbisect.save_state(repo, state)
887 887 status = ui.system(command, environ={'HG_NODE': hex(node)})
888 888 if status == 125:
889 889 transition = "skip"
890 890 elif status == 0:
891 891 transition = "good"
892 892 # status < 0 means process was killed
893 893 elif status == 127:
894 894 raise error.Abort(_("failed to execute %s") % command)
895 895 elif status < 0:
896 896 raise error.Abort(_("%s killed") % command)
897 897 else:
898 898 transition = "bad"
899 899 ctx = scmutil.revsingle(repo, rev, node)
900 900 rev = None # clear for future iterations
901 901 state[transition].append(ctx.node())
902 902 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
903 903 check_state(state, interactive=False)
904 904 # bisect
905 905 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
906 906 # update to next check
907 907 node = nodes[0]
908 908 if not noupdate:
909 909 cmdutil.bailifchanged(repo)
910 910 hg.clean(repo, node, show_stats=False)
911 911 finally:
912 912 state['current'] = [node]
913 913 hbisect.save_state(repo, state)
914 914 print_result(nodes, bgood)
915 915 return
916 916
917 917 # update state
918 918
919 919 if rev:
920 920 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
921 921 else:
922 922 nodes = [repo.lookup('.')]
923 923
924 924 if good or bad or skip:
925 925 if good:
926 926 state['good'] += nodes
927 927 elif bad:
928 928 state['bad'] += nodes
929 929 elif skip:
930 930 state['skip'] += nodes
931 931 hbisect.save_state(repo, state)
932 932
933 933 if not check_state(state):
934 934 return
935 935
936 936 # actually bisect
937 937 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
938 938 if extend:
939 939 if not changesets:
940 940 extendnode = extendbisectrange(nodes, good)
941 941 if extendnode is not None:
942 942 ui.write(_("Extending search to changeset %d:%s\n")
943 943 % (extendnode.rev(), extendnode))
944 944 state['current'] = [extendnode.node()]
945 945 hbisect.save_state(repo, state)
946 946 if noupdate:
947 947 return
948 948 cmdutil.bailifchanged(repo)
949 949 return hg.clean(repo, extendnode.node())
950 950 raise error.Abort(_("nothing to extend"))
951 951
952 952 if changesets == 0:
953 953 print_result(nodes, good)
954 954 else:
955 955 assert len(nodes) == 1 # only a single node can be tested next
956 956 node = nodes[0]
957 957 # compute the approximate number of remaining tests
958 958 tests, size = 0, 2
959 959 while size <= changesets:
960 960 tests, size = tests + 1, size * 2
961 961 rev = repo.changelog.rev(node)
962 962 ui.write(_("Testing changeset %d:%s "
963 963 "(%d changesets remaining, ~%d tests)\n")
964 964 % (rev, short(node), changesets, tests))
965 965 state['current'] = [node]
966 966 hbisect.save_state(repo, state)
967 967 if not noupdate:
968 968 cmdutil.bailifchanged(repo)
969 969 return hg.clean(repo, node)
970 970
971 971 @command('bookmarks|bookmark',
972 972 [('f', 'force', False, _('force')),
973 973 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
974 974 ('d', 'delete', False, _('delete a given bookmark')),
975 975 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
976 976 ('i', 'inactive', False, _('mark a bookmark inactive')),
977 977 ] + formatteropts,
978 978 _('hg bookmarks [OPTIONS]... [NAME]...'))
979 979 def bookmark(ui, repo, *names, **opts):
980 980 '''create a new bookmark or list existing bookmarks
981 981
982 982 Bookmarks are labels on changesets to help track lines of development.
983 983 Bookmarks are unversioned and can be moved, renamed and deleted.
984 984 Deleting or moving a bookmark has no effect on the associated changesets.
985 985
986 986 Creating or updating to a bookmark causes it to be marked as 'active'.
987 987 The active bookmark is indicated with a '*'.
988 988 When a commit is made, the active bookmark will advance to the new commit.
989 989 A plain :hg:`update` will also advance an active bookmark, if possible.
990 990 Updating away from a bookmark will cause it to be deactivated.
991 991
992 992 Bookmarks can be pushed and pulled between repositories (see
993 993 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
994 994 diverged, a new 'divergent bookmark' of the form 'name@path' will
995 995 be created. Using :hg:`merge` will resolve the divergence.
996 996
997 997 A bookmark named '@' has the special property that :hg:`clone` will
998 998 check it out by default if it exists.
999 999
1000 1000 .. container:: verbose
1001 1001
1002 1002 Examples:
1003 1003
1004 1004 - create an active bookmark for a new line of development::
1005 1005
1006 1006 hg book new-feature
1007 1007
1008 1008 - create an inactive bookmark as a place marker::
1009 1009
1010 1010 hg book -i reviewed
1011 1011
1012 1012 - create an inactive bookmark on another changeset::
1013 1013
1014 1014 hg book -r .^ tested
1015 1015
1016 1016 - rename bookmark turkey to dinner::
1017 1017
1018 1018 hg book -m turkey dinner
1019 1019
1020 1020 - move the '@' bookmark from another branch::
1021 1021
1022 1022 hg book -f @
1023 1023 '''
1024 1024 force = opts.get('force')
1025 1025 rev = opts.get('rev')
1026 1026 delete = opts.get('delete')
1027 1027 rename = opts.get('rename')
1028 1028 inactive = opts.get('inactive')
1029 1029
1030 1030 def checkformat(mark):
1031 1031 mark = mark.strip()
1032 1032 if not mark:
1033 1033 raise error.Abort(_("bookmark names cannot consist entirely of "
1034 1034 "whitespace"))
1035 1035 scmutil.checknewlabel(repo, mark, 'bookmark')
1036 1036 return mark
1037 1037
1038 1038 def checkconflict(repo, mark, cur, force=False, target=None):
1039 1039 if mark in marks and not force:
1040 1040 if target:
1041 1041 if marks[mark] == target and target == cur:
1042 1042 # re-activating a bookmark
1043 1043 return
1044 1044 anc = repo.changelog.ancestors([repo[target].rev()])
1045 1045 bmctx = repo[marks[mark]]
1046 1046 divs = [repo[b].node() for b in marks
1047 1047 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
1048 1048
1049 1049 # allow resolving a single divergent bookmark even if moving
1050 1050 # the bookmark across branches when a revision is specified
1051 1051 # that contains a divergent bookmark
1052 1052 if bmctx.rev() not in anc and target in divs:
1053 1053 bookmarks.deletedivergent(repo, [target], mark)
1054 1054 return
1055 1055
1056 1056 deletefrom = [b for b in divs
1057 1057 if repo[b].rev() in anc or b == target]
1058 1058 bookmarks.deletedivergent(repo, deletefrom, mark)
1059 1059 if bookmarks.validdest(repo, bmctx, repo[target]):
1060 1060 ui.status(_("moving bookmark '%s' forward from %s\n") %
1061 1061 (mark, short(bmctx.node())))
1062 1062 return
1063 1063 raise error.Abort(_("bookmark '%s' already exists "
1064 1064 "(use -f to force)") % mark)
1065 1065 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
1066 1066 and not force):
1067 1067 raise error.Abort(
1068 1068 _("a bookmark cannot have the name of an existing branch"))
1069 1069
1070 1070 if delete and rename:
1071 1071 raise error.Abort(_("--delete and --rename are incompatible"))
1072 1072 if delete and rev:
1073 1073 raise error.Abort(_("--rev is incompatible with --delete"))
1074 1074 if rename and rev:
1075 1075 raise error.Abort(_("--rev is incompatible with --rename"))
1076 1076 if not names and (delete or rev):
1077 1077 raise error.Abort(_("bookmark name required"))
1078 1078
1079 1079 if delete or rename or names or inactive:
1080 1080 wlock = lock = tr = None
1081 1081 try:
1082 1082 wlock = repo.wlock()
1083 1083 lock = repo.lock()
1084 1084 cur = repo.changectx('.').node()
1085 1085 marks = repo._bookmarks
1086 1086 if delete:
1087 1087 tr = repo.transaction('bookmark')
1088 1088 for mark in names:
1089 1089 if mark not in marks:
1090 1090 raise error.Abort(_("bookmark '%s' does not exist") %
1091 1091 mark)
1092 1092 if mark == repo._activebookmark:
1093 1093 bookmarks.deactivate(repo)
1094 1094 del marks[mark]
1095 1095
1096 1096 elif rename:
1097 1097 tr = repo.transaction('bookmark')
1098 1098 if not names:
1099 1099 raise error.Abort(_("new bookmark name required"))
1100 1100 elif len(names) > 1:
1101 1101 raise error.Abort(_("only one new bookmark name allowed"))
1102 1102 mark = checkformat(names[0])
1103 1103 if rename not in marks:
1104 1104 raise error.Abort(_("bookmark '%s' does not exist")
1105 1105 % rename)
1106 1106 checkconflict(repo, mark, cur, force)
1107 1107 marks[mark] = marks[rename]
1108 1108 if repo._activebookmark == rename and not inactive:
1109 1109 bookmarks.activate(repo, mark)
1110 1110 del marks[rename]
1111 1111 elif names:
1112 1112 tr = repo.transaction('bookmark')
1113 1113 newact = None
1114 1114 for mark in names:
1115 1115 mark = checkformat(mark)
1116 1116 if newact is None:
1117 1117 newact = mark
1118 1118 if inactive and mark == repo._activebookmark:
1119 1119 bookmarks.deactivate(repo)
1120 1120 return
1121 1121 tgt = cur
1122 1122 if rev:
1123 1123 tgt = scmutil.revsingle(repo, rev).node()
1124 1124 checkconflict(repo, mark, cur, force, tgt)
1125 1125 marks[mark] = tgt
1126 1126 if not inactive and cur == marks[newact] and not rev:
1127 1127 bookmarks.activate(repo, newact)
1128 1128 elif cur != tgt and newact == repo._activebookmark:
1129 1129 bookmarks.deactivate(repo)
1130 1130 elif inactive:
1131 1131 if len(marks) == 0:
1132 1132 ui.status(_("no bookmarks set\n"))
1133 1133 elif not repo._activebookmark:
1134 1134 ui.status(_("no active bookmark\n"))
1135 1135 else:
1136 1136 bookmarks.deactivate(repo)
1137 1137 if tr is not None:
1138 1138 marks.recordchange(tr)
1139 1139 tr.close()
1140 1140 finally:
1141 1141 lockmod.release(tr, lock, wlock)
1142 1142 else: # show bookmarks
1143 1143 fm = ui.formatter('bookmarks', opts)
1144 1144 hexfn = fm.hexfunc
1145 1145 marks = repo._bookmarks
1146 1146 if len(marks) == 0 and not fm:
1147 1147 ui.status(_("no bookmarks set\n"))
1148 1148 for bmark, n in sorted(marks.iteritems()):
1149 1149 active = repo._activebookmark
1150 1150 if bmark == active:
1151 1151 prefix, label = '*', activebookmarklabel
1152 1152 else:
1153 1153 prefix, label = ' ', ''
1154 1154
1155 1155 fm.startitem()
1156 1156 if not ui.quiet:
1157 1157 fm.plain(' %s ' % prefix, label=label)
1158 1158 fm.write('bookmark', '%s', bmark, label=label)
1159 1159 pad = " " * (25 - encoding.colwidth(bmark))
1160 1160 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1161 1161 repo.changelog.rev(n), hexfn(n), label=label)
1162 1162 fm.data(active=(bmark == active))
1163 1163 fm.plain('\n')
1164 1164 fm.end()
1165 1165
1166 1166 @command('branch',
1167 1167 [('f', 'force', None,
1168 1168 _('set branch name even if it shadows an existing branch')),
1169 1169 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1170 1170 _('[-fC] [NAME]'))
1171 1171 def branch(ui, repo, label=None, **opts):
1172 1172 """set or show the current branch name
1173 1173
1174 1174 .. note::
1175 1175
1176 1176 Branch names are permanent and global. Use :hg:`bookmark` to create a
1177 1177 light-weight bookmark instead. See :hg:`help glossary` for more
1178 1178 information about named branches and bookmarks.
1179 1179
1180 1180 With no argument, show the current branch name. With one argument,
1181 1181 set the working directory branch name (the branch will not exist
1182 1182 in the repository until the next commit). Standard practice
1183 1183 recommends that primary development take place on the 'default'
1184 1184 branch.
1185 1185
1186 1186 Unless -f/--force is specified, branch will not let you set a
1187 1187 branch name that already exists.
1188 1188
1189 1189 Use -C/--clean to reset the working directory branch to that of
1190 1190 the parent of the working directory, negating a previous branch
1191 1191 change.
1192 1192
1193 1193 Use the command :hg:`update` to switch to an existing branch. Use
1194 1194 :hg:`commit --close-branch` to mark this branch head as closed.
1195 1195 When all heads of a branch are closed, the branch will be
1196 1196 considered closed.
1197 1197
1198 1198 Returns 0 on success.
1199 1199 """
1200 1200 if label:
1201 1201 label = label.strip()
1202 1202
1203 1203 if not opts.get('clean') and not label:
1204 1204 ui.write("%s\n" % repo.dirstate.branch())
1205 1205 return
1206 1206
1207 1207 with repo.wlock():
1208 1208 if opts.get('clean'):
1209 1209 label = repo[None].p1().branch()
1210 1210 repo.dirstate.setbranch(label)
1211 1211 ui.status(_('reset working directory to branch %s\n') % label)
1212 1212 elif label:
1213 1213 if not opts.get('force') and label in repo.branchmap():
1214 1214 if label not in [p.branch() for p in repo[None].parents()]:
1215 1215 raise error.Abort(_('a branch of the same name already'
1216 1216 ' exists'),
1217 1217 # i18n: "it" refers to an existing branch
1218 1218 hint=_("use 'hg update' to switch to it"))
1219 1219 scmutil.checknewlabel(repo, label, 'branch')
1220 1220 repo.dirstate.setbranch(label)
1221 1221 ui.status(_('marked working directory as branch %s\n') % label)
1222 1222
1223 1223 # find any open named branches aside from default
1224 1224 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1225 1225 if n != "default" and not c]
1226 1226 if not others:
1227 1227 ui.status(_('(branches are permanent and global, '
1228 1228 'did you want a bookmark?)\n'))
1229 1229
1230 1230 @command('branches',
1231 1231 [('a', 'active', False,
1232 1232 _('show only branches that have unmerged heads (DEPRECATED)')),
1233 1233 ('c', 'closed', False, _('show normal and closed branches')),
1234 1234 ] + formatteropts,
1235 1235 _('[-ac]'))
1236 1236 def branches(ui, repo, active=False, closed=False, **opts):
1237 1237 """list repository named branches
1238 1238
1239 1239 List the repository's named branches, indicating which ones are
1240 1240 inactive. If -c/--closed is specified, also list branches which have
1241 1241 been marked closed (see :hg:`commit --close-branch`).
1242 1242
1243 1243 Use the command :hg:`update` to switch to an existing branch.
1244 1244
1245 1245 Returns 0.
1246 1246 """
1247 1247
1248 1248 fm = ui.formatter('branches', opts)
1249 1249 hexfunc = fm.hexfunc
1250 1250
1251 1251 allheads = set(repo.heads())
1252 1252 branches = []
1253 1253 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1254 1254 isactive = not isclosed and bool(set(heads) & allheads)
1255 1255 branches.append((tag, repo[tip], isactive, not isclosed))
1256 1256 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1257 1257 reverse=True)
1258 1258
1259 1259 for tag, ctx, isactive, isopen in branches:
1260 1260 if active and not isactive:
1261 1261 continue
1262 1262 if isactive:
1263 1263 label = 'branches.active'
1264 1264 notice = ''
1265 1265 elif not isopen:
1266 1266 if not closed:
1267 1267 continue
1268 1268 label = 'branches.closed'
1269 1269 notice = _(' (closed)')
1270 1270 else:
1271 1271 label = 'branches.inactive'
1272 1272 notice = _(' (inactive)')
1273 1273 current = (tag == repo.dirstate.branch())
1274 1274 if current:
1275 1275 label = 'branches.current'
1276 1276
1277 1277 fm.startitem()
1278 1278 fm.write('branch', '%s', tag, label=label)
1279 1279 rev = ctx.rev()
1280 1280 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1281 1281 fmt = ' ' * padsize + ' %d:%s'
1282 1282 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1283 1283 label='log.changeset changeset.%s' % ctx.phasestr())
1284 1284 fm.data(active=isactive, closed=not isopen, current=current)
1285 1285 if not ui.quiet:
1286 1286 fm.plain(notice)
1287 1287 fm.plain('\n')
1288 1288 fm.end()
1289 1289
1290 1290 @command('bundle',
1291 1291 [('f', 'force', None, _('run even when the destination is unrelated')),
1292 1292 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1293 1293 _('REV')),
1294 1294 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1295 1295 _('BRANCH')),
1296 1296 ('', 'base', [],
1297 1297 _('a base changeset assumed to be available at the destination'),
1298 1298 _('REV')),
1299 1299 ('a', 'all', None, _('bundle all changesets in the repository')),
1300 1300 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1301 1301 ] + remoteopts,
1302 1302 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1303 1303 def bundle(ui, repo, fname, dest=None, **opts):
1304 1304 """create a changegroup file
1305 1305
1306 1306 Generate a changegroup file collecting changesets to be added
1307 1307 to a repository.
1308 1308
1309 1309 To create a bundle containing all changesets, use -a/--all
1310 1310 (or --base null). Otherwise, hg assumes the destination will have
1311 1311 all the nodes you specify with --base parameters. Otherwise, hg
1312 1312 will assume the repository has all the nodes in destination, or
1313 1313 default-push/default if no destination is specified.
1314 1314
1315 1315 You can change bundle format with the -t/--type option. You can
1316 1316 specify a compression, a bundle version or both using a dash
1317 1317 (comp-version). The available compression methods are: none, bzip2,
1318 1318 and gzip (by default, bundles are compressed using bzip2). The
1319 1319 available formats are: v1, v2 (default to most suitable).
1320 1320
1321 1321 The bundle file can then be transferred using conventional means
1322 1322 and applied to another repository with the unbundle or pull
1323 1323 command. This is useful when direct push and pull are not
1324 1324 available or when exporting an entire repository is undesirable.
1325 1325
1326 1326 Applying bundles preserves all changeset contents including
1327 1327 permissions, copy/rename information, and revision history.
1328 1328
1329 1329 Returns 0 on success, 1 if no changes found.
1330 1330 """
1331 1331 revs = None
1332 1332 if 'rev' in opts:
1333 1333 revstrings = opts['rev']
1334 1334 revs = scmutil.revrange(repo, revstrings)
1335 1335 if revstrings and not revs:
1336 1336 raise error.Abort(_('no commits to bundle'))
1337 1337
1338 1338 bundletype = opts.get('type', 'bzip2').lower()
1339 1339 try:
1340 1340 bcompression, cgversion, params = exchange.parsebundlespec(
1341 1341 repo, bundletype, strict=False)
1342 1342 except error.UnsupportedBundleSpecification as e:
1343 1343 raise error.Abort(str(e),
1344 1344 hint=_('see "hg help bundle" for supported '
1345 1345 'values for --type'))
1346 1346
1347 1347 # Packed bundles are a pseudo bundle format for now.
1348 1348 if cgversion == 's1':
1349 1349 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1350 1350 hint=_('use "hg debugcreatestreamclonebundle"'))
1351 1351
1352 1352 if opts.get('all'):
1353 1353 if dest:
1354 1354 raise error.Abort(_("--all is incompatible with specifying "
1355 1355 "a destination"))
1356 1356 if opts.get('base'):
1357 1357 ui.warn(_("ignoring --base because --all was specified\n"))
1358 1358 base = ['null']
1359 1359 else:
1360 1360 base = scmutil.revrange(repo, opts.get('base'))
1361 1361 # TODO: get desired bundlecaps from command line.
1362 1362 bundlecaps = None
1363 1363 if base:
1364 1364 if dest:
1365 1365 raise error.Abort(_("--base is incompatible with specifying "
1366 1366 "a destination"))
1367 1367 common = [repo.lookup(rev) for rev in base]
1368 1368 heads = revs and map(repo.lookup, revs) or revs
1369 1369 cg = changegroup.getchangegroup(repo, 'bundle', heads=heads,
1370 1370 common=common, bundlecaps=bundlecaps,
1371 1371 version=cgversion)
1372 1372 outgoing = None
1373 1373 else:
1374 1374 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1375 1375 dest, branches = hg.parseurl(dest, opts.get('branch'))
1376 1376 other = hg.peer(repo, opts, dest)
1377 1377 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1378 1378 heads = revs and map(repo.lookup, revs) or revs
1379 1379 outgoing = discovery.findcommonoutgoing(repo, other,
1380 1380 onlyheads=heads,
1381 1381 force=opts.get('force'),
1382 1382 portable=True)
1383 1383 cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
1384 1384 bundlecaps, version=cgversion)
1385 1385 if not cg:
1386 1386 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1387 1387 return 1
1388 1388
1389 1389 if cgversion == '01': #bundle1
1390 1390 if bcompression is None:
1391 1391 bcompression = 'UN'
1392 1392 bversion = 'HG10' + bcompression
1393 1393 bcompression = None
1394 1394 else:
1395 1395 assert cgversion == '02'
1396 1396 bversion = 'HG20'
1397 1397
1398 1398
1399 1399 changegroup.writebundle(ui, cg, fname, bversion, compression=bcompression)
1400 1400
1401 1401 @command('cat',
1402 1402 [('o', 'output', '',
1403 1403 _('print output to file with formatted name'), _('FORMAT')),
1404 1404 ('r', 'rev', '', _('print the given revision'), _('REV')),
1405 1405 ('', 'decode', None, _('apply any matching decode filter')),
1406 1406 ] + walkopts,
1407 1407 _('[OPTION]... FILE...'),
1408 1408 inferrepo=True)
1409 1409 def cat(ui, repo, file1, *pats, **opts):
1410 1410 """output the current or given revision of files
1411 1411
1412 1412 Print the specified files as they were at the given revision. If
1413 1413 no revision is given, the parent of the working directory is used.
1414 1414
1415 1415 Output may be to a file, in which case the name of the file is
1416 1416 given using a format string. The formatting rules as follows:
1417 1417
1418 1418 :``%%``: literal "%" character
1419 1419 :``%s``: basename of file being printed
1420 1420 :``%d``: dirname of file being printed, or '.' if in repository root
1421 1421 :``%p``: root-relative path name of file being printed
1422 1422 :``%H``: changeset hash (40 hexadecimal digits)
1423 1423 :``%R``: changeset revision number
1424 1424 :``%h``: short-form changeset hash (12 hexadecimal digits)
1425 1425 :``%r``: zero-padded changeset revision number
1426 1426 :``%b``: basename of the exporting repository
1427 1427
1428 1428 Returns 0 on success.
1429 1429 """
1430 1430 ctx = scmutil.revsingle(repo, opts.get('rev'))
1431 1431 m = scmutil.match(ctx, (file1,) + pats, opts)
1432 1432
1433 1433 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1434 1434
1435 1435 @command('^clone',
1436 1436 [('U', 'noupdate', None, _('the clone will include an empty working '
1437 1437 'directory (only a repository)')),
1438 1438 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1439 1439 _('REV')),
1440 1440 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1441 1441 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1442 1442 ('', 'pull', None, _('use pull protocol to copy metadata')),
1443 1443 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1444 1444 ] + remoteopts,
1445 1445 _('[OPTION]... SOURCE [DEST]'),
1446 1446 norepo=True)
1447 1447 def clone(ui, source, dest=None, **opts):
1448 1448 """make a copy of an existing repository
1449 1449
1450 1450 Create a copy of an existing repository in a new directory.
1451 1451
1452 1452 If no destination directory name is specified, it defaults to the
1453 1453 basename of the source.
1454 1454
1455 1455 The location of the source is added to the new repository's
1456 1456 ``.hg/hgrc`` file, as the default to be used for future pulls.
1457 1457
1458 1458 Only local paths and ``ssh://`` URLs are supported as
1459 1459 destinations. For ``ssh://`` destinations, no working directory or
1460 1460 ``.hg/hgrc`` will be created on the remote side.
1461 1461
1462 1462 If the source repository has a bookmark called '@' set, that
1463 1463 revision will be checked out in the new repository by default.
1464 1464
1465 1465 To check out a particular version, use -u/--update, or
1466 1466 -U/--noupdate to create a clone with no working directory.
1467 1467
1468 1468 To pull only a subset of changesets, specify one or more revisions
1469 1469 identifiers with -r/--rev or branches with -b/--branch. The
1470 1470 resulting clone will contain only the specified changesets and
1471 1471 their ancestors. These options (or 'clone src#rev dest') imply
1472 1472 --pull, even for local source repositories.
1473 1473
1474 1474 .. note::
1475 1475
1476 1476 Specifying a tag will include the tagged changeset but not the
1477 1477 changeset containing the tag.
1478 1478
1479 1479 .. container:: verbose
1480 1480
1481 1481 For efficiency, hardlinks are used for cloning whenever the
1482 1482 source and destination are on the same filesystem (note this
1483 1483 applies only to the repository data, not to the working
1484 1484 directory). Some filesystems, such as AFS, implement hardlinking
1485 1485 incorrectly, but do not report errors. In these cases, use the
1486 1486 --pull option to avoid hardlinking.
1487 1487
1488 1488 In some cases, you can clone repositories and the working
1489 1489 directory using full hardlinks with ::
1490 1490
1491 1491 $ cp -al REPO REPOCLONE
1492 1492
1493 1493 This is the fastest way to clone, but it is not always safe. The
1494 1494 operation is not atomic (making sure REPO is not modified during
1495 1495 the operation is up to you) and you have to make sure your
1496 1496 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1497 1497 so). Also, this is not compatible with certain extensions that
1498 1498 place their metadata under the .hg directory, such as mq.
1499 1499
1500 1500 Mercurial will update the working directory to the first applicable
1501 1501 revision from this list:
1502 1502
1503 1503 a) null if -U or the source repository has no changesets
1504 1504 b) if -u . and the source repository is local, the first parent of
1505 1505 the source repository's working directory
1506 1506 c) the changeset specified with -u (if a branch name, this means the
1507 1507 latest head of that branch)
1508 1508 d) the changeset specified with -r
1509 1509 e) the tipmost head specified with -b
1510 1510 f) the tipmost head specified with the url#branch source syntax
1511 1511 g) the revision marked with the '@' bookmark, if present
1512 1512 h) the tipmost head of the default branch
1513 1513 i) tip
1514 1514
1515 1515 When cloning from servers that support it, Mercurial may fetch
1516 1516 pre-generated data from a server-advertised URL. When this is done,
1517 1517 hooks operating on incoming changesets and changegroups may fire twice,
1518 1518 once for the bundle fetched from the URL and another for any additional
1519 1519 data not fetched from this URL. In addition, if an error occurs, the
1520 1520 repository may be rolled back to a partial clone. This behavior may
1521 1521 change in future releases. See :hg:`help -e clonebundles` for more.
1522 1522
1523 1523 Examples:
1524 1524
1525 1525 - clone a remote repository to a new directory named hg/::
1526 1526
1527 1527 hg clone http://selenic.com/hg
1528 1528
1529 1529 - create a lightweight local clone::
1530 1530
1531 1531 hg clone project/ project-feature/
1532 1532
1533 1533 - clone from an absolute path on an ssh server (note double-slash)::
1534 1534
1535 1535 hg clone ssh://user@server//home/projects/alpha/
1536 1536
1537 1537 - do a high-speed clone over a LAN while checking out a
1538 1538 specified version::
1539 1539
1540 1540 hg clone --uncompressed http://server/repo -u 1.5
1541 1541
1542 1542 - create a repository without changesets after a particular revision::
1543 1543
1544 1544 hg clone -r 04e544 experimental/ good/
1545 1545
1546 1546 - clone (and track) a particular named branch::
1547 1547
1548 1548 hg clone http://selenic.com/hg#stable
1549 1549
1550 1550 See :hg:`help urls` for details on specifying URLs.
1551 1551
1552 1552 Returns 0 on success.
1553 1553 """
1554 1554 if opts.get('noupdate') and opts.get('updaterev'):
1555 1555 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1556 1556
1557 1557 r = hg.clone(ui, opts, source, dest,
1558 1558 pull=opts.get('pull'),
1559 1559 stream=opts.get('uncompressed'),
1560 1560 rev=opts.get('rev'),
1561 1561 update=opts.get('updaterev') or not opts.get('noupdate'),
1562 1562 branch=opts.get('branch'),
1563 1563 shareopts=opts.get('shareopts'))
1564 1564
1565 1565 return r is None
1566 1566
1567 1567 @command('^commit|ci',
1568 1568 [('A', 'addremove', None,
1569 1569 _('mark new/missing files as added/removed before committing')),
1570 1570 ('', 'close-branch', None,
1571 1571 _('mark a branch head as closed')),
1572 1572 ('', 'amend', None, _('amend the parent of the working directory')),
1573 1573 ('s', 'secret', None, _('use the secret phase for committing')),
1574 1574 ('e', 'edit', None, _('invoke editor on commit messages')),
1575 1575 ('i', 'interactive', None, _('use interactive mode')),
1576 1576 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1577 1577 _('[OPTION]... [FILE]...'),
1578 1578 inferrepo=True)
1579 1579 def commit(ui, repo, *pats, **opts):
1580 1580 """commit the specified files or all outstanding changes
1581 1581
1582 1582 Commit changes to the given files into the repository. Unlike a
1583 1583 centralized SCM, this operation is a local operation. See
1584 1584 :hg:`push` for a way to actively distribute your changes.
1585 1585
1586 1586 If a list of files is omitted, all changes reported by :hg:`status`
1587 1587 will be committed.
1588 1588
1589 1589 If you are committing the result of a merge, do not provide any
1590 1590 filenames or -I/-X filters.
1591 1591
1592 1592 If no commit message is specified, Mercurial starts your
1593 1593 configured editor where you can enter a message. In case your
1594 1594 commit fails, you will find a backup of your message in
1595 1595 ``.hg/last-message.txt``.
1596 1596
1597 1597 The --close-branch flag can be used to mark the current branch
1598 1598 head closed. When all heads of a branch are closed, the branch
1599 1599 will be considered closed and no longer listed.
1600 1600
1601 1601 The --amend flag can be used to amend the parent of the
1602 1602 working directory with a new commit that contains the changes
1603 1603 in the parent in addition to those currently reported by :hg:`status`,
1604 1604 if there are any. The old commit is stored in a backup bundle in
1605 1605 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1606 1606 on how to restore it).
1607 1607
1608 1608 Message, user and date are taken from the amended commit unless
1609 1609 specified. When a message isn't specified on the command line,
1610 1610 the editor will open with the message of the amended commit.
1611 1611
1612 1612 It is not possible to amend public changesets (see :hg:`help phases`)
1613 1613 or changesets that have children.
1614 1614
1615 1615 See :hg:`help dates` for a list of formats valid for -d/--date.
1616 1616
1617 1617 Returns 0 on success, 1 if nothing changed.
1618 1618
1619 1619 .. container:: verbose
1620 1620
1621 1621 Examples:
1622 1622
1623 1623 - commit all files ending in .py::
1624 1624
1625 1625 hg commit --include "set:**.py"
1626 1626
1627 1627 - commit all non-binary files::
1628 1628
1629 1629 hg commit --exclude "set:binary()"
1630 1630
1631 1631 - amend the current commit and set the date to now::
1632 1632
1633 1633 hg commit --amend --date now
1634 1634 """
1635 1635 wlock = lock = None
1636 1636 try:
1637 1637 wlock = repo.wlock()
1638 1638 lock = repo.lock()
1639 1639 return _docommit(ui, repo, *pats, **opts)
1640 1640 finally:
1641 1641 release(lock, wlock)
1642 1642
1643 1643 def _docommit(ui, repo, *pats, **opts):
1644 1644 if opts.get('interactive'):
1645 1645 opts.pop('interactive')
1646 1646 cmdutil.dorecord(ui, repo, commit, None, False,
1647 1647 cmdutil.recordfilter, *pats, **opts)
1648 1648 return
1649 1649
1650 1650 if opts.get('subrepos'):
1651 1651 if opts.get('amend'):
1652 1652 raise error.Abort(_('cannot amend with --subrepos'))
1653 1653 # Let --subrepos on the command line override config setting.
1654 1654 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1655 1655
1656 1656 cmdutil.checkunfinished(repo, commit=True)
1657 1657
1658 1658 branch = repo[None].branch()
1659 1659 bheads = repo.branchheads(branch)
1660 1660
1661 1661 extra = {}
1662 1662 if opts.get('close_branch'):
1663 1663 extra['close'] = 1
1664 1664
1665 1665 if not bheads:
1666 1666 raise error.Abort(_('can only close branch heads'))
1667 1667 elif opts.get('amend'):
1668 1668 if repo[None].parents()[0].p1().branch() != branch and \
1669 1669 repo[None].parents()[0].p2().branch() != branch:
1670 1670 raise error.Abort(_('can only close branch heads'))
1671 1671
1672 1672 if opts.get('amend'):
1673 1673 if ui.configbool('ui', 'commitsubrepos'):
1674 1674 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1675 1675
1676 1676 old = repo['.']
1677 1677 if not old.mutable():
1678 1678 raise error.Abort(_('cannot amend public changesets'))
1679 1679 if len(repo[None].parents()) > 1:
1680 1680 raise error.Abort(_('cannot amend while merging'))
1681 1681 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1682 1682 if not allowunstable and old.children():
1683 1683 raise error.Abort(_('cannot amend changeset with children'))
1684 1684
1685 1685 # commitfunc is used only for temporary amend commit by cmdutil.amend
1686 1686 def commitfunc(ui, repo, message, match, opts):
1687 1687 return repo.commit(message,
1688 1688 opts.get('user') or old.user(),
1689 1689 opts.get('date') or old.date(),
1690 1690 match,
1691 1691 extra=extra)
1692 1692
1693 1693 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1694 1694 if node == old.node():
1695 1695 ui.status(_("nothing changed\n"))
1696 1696 return 1
1697 1697 else:
1698 1698 def commitfunc(ui, repo, message, match, opts):
1699 1699 backup = ui.backupconfig('phases', 'new-commit')
1700 1700 baseui = repo.baseui
1701 1701 basebackup = baseui.backupconfig('phases', 'new-commit')
1702 1702 try:
1703 1703 if opts.get('secret'):
1704 1704 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1705 1705 # Propagate to subrepos
1706 1706 baseui.setconfig('phases', 'new-commit', 'secret', 'commit')
1707 1707
1708 1708 editform = cmdutil.mergeeditform(repo[None], 'commit.normal')
1709 1709 editor = cmdutil.getcommiteditor(editform=editform, **opts)
1710 1710 return repo.commit(message, opts.get('user'), opts.get('date'),
1711 1711 match,
1712 1712 editor=editor,
1713 1713 extra=extra)
1714 1714 finally:
1715 1715 ui.restoreconfig(backup)
1716 1716 repo.baseui.restoreconfig(basebackup)
1717 1717
1718 1718
1719 1719 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1720 1720
1721 1721 if not node:
1722 1722 stat = cmdutil.postcommitstatus(repo, pats, opts)
1723 1723 if stat[3]:
1724 1724 ui.status(_("nothing changed (%d missing files, see "
1725 1725 "'hg status')\n") % len(stat[3]))
1726 1726 else:
1727 1727 ui.status(_("nothing changed\n"))
1728 1728 return 1
1729 1729
1730 1730 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1731 1731
1732 1732 @command('config|showconfig|debugconfig',
1733 1733 [('u', 'untrusted', None, _('show untrusted configuration options')),
1734 1734 ('e', 'edit', None, _('edit user config')),
1735 1735 ('l', 'local', None, _('edit repository config')),
1736 1736 ('g', 'global', None, _('edit global config'))],
1737 1737 _('[-u] [NAME]...'),
1738 1738 optionalrepo=True)
1739 1739 def config(ui, repo, *values, **opts):
1740 1740 """show combined config settings from all hgrc files
1741 1741
1742 1742 With no arguments, print names and values of all config items.
1743 1743
1744 1744 With one argument of the form section.name, print just the value
1745 1745 of that config item.
1746 1746
1747 1747 With multiple arguments, print names and values of all config
1748 1748 items with matching section names.
1749 1749
1750 1750 With --edit, start an editor on the user-level config file. With
1751 1751 --global, edit the system-wide config file. With --local, edit the
1752 1752 repository-level config file.
1753 1753
1754 1754 With --debug, the source (filename and line number) is printed
1755 1755 for each config item.
1756 1756
1757 1757 See :hg:`help config` for more information about config files.
1758 1758
1759 1759 Returns 0 on success, 1 if NAME does not exist.
1760 1760
1761 1761 """
1762 1762
1763 1763 if opts.get('edit') or opts.get('local') or opts.get('global'):
1764 1764 if opts.get('local') and opts.get('global'):
1765 1765 raise error.Abort(_("can't use --local and --global together"))
1766 1766
1767 1767 if opts.get('local'):
1768 1768 if not repo:
1769 1769 raise error.Abort(_("can't use --local outside a repository"))
1770 1770 paths = [repo.join('hgrc')]
1771 1771 elif opts.get('global'):
1772 1772 paths = scmutil.systemrcpath()
1773 1773 else:
1774 1774 paths = scmutil.userrcpath()
1775 1775
1776 1776 for f in paths:
1777 1777 if os.path.exists(f):
1778 1778 break
1779 1779 else:
1780 1780 if opts.get('global'):
1781 1781 samplehgrc = uimod.samplehgrcs['global']
1782 1782 elif opts.get('local'):
1783 1783 samplehgrc = uimod.samplehgrcs['local']
1784 1784 else:
1785 1785 samplehgrc = uimod.samplehgrcs['user']
1786 1786
1787 1787 f = paths[0]
1788 1788 fp = open(f, "w")
1789 1789 fp.write(samplehgrc)
1790 1790 fp.close()
1791 1791
1792 1792 editor = ui.geteditor()
1793 1793 ui.system("%s \"%s\"" % (editor, f),
1794 1794 onerr=error.Abort, errprefix=_("edit failed"))
1795 1795 return
1796 1796
1797 1797 for f in scmutil.rcpath():
1798 1798 ui.debug('read config from: %s\n' % f)
1799 1799 untrusted = bool(opts.get('untrusted'))
1800 1800 if values:
1801 1801 sections = [v for v in values if '.' not in v]
1802 1802 items = [v for v in values if '.' in v]
1803 1803 if len(items) > 1 or items and sections:
1804 1804 raise error.Abort(_('only one config item permitted'))
1805 1805 matched = False
1806 1806 for section, name, value in ui.walkconfig(untrusted=untrusted):
1807 1807 value = str(value).replace('\n', '\\n')
1808 1808 sectname = section + '.' + name
1809 1809 if values:
1810 1810 for v in values:
1811 1811 if v == section:
1812 1812 ui.debug('%s: ' %
1813 1813 ui.configsource(section, name, untrusted))
1814 1814 ui.write('%s=%s\n' % (sectname, value))
1815 1815 matched = True
1816 1816 elif v == sectname:
1817 1817 ui.debug('%s: ' %
1818 1818 ui.configsource(section, name, untrusted))
1819 1819 ui.write(value, '\n')
1820 1820 matched = True
1821 1821 else:
1822 1822 ui.debug('%s: ' %
1823 1823 ui.configsource(section, name, untrusted))
1824 1824 ui.write('%s=%s\n' % (sectname, value))
1825 1825 matched = True
1826 1826 if matched:
1827 1827 return 0
1828 1828 return 1
1829 1829
1830 1830 @command('copy|cp',
1831 1831 [('A', 'after', None, _('record a copy that has already occurred')),
1832 1832 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1833 1833 ] + walkopts + dryrunopts,
1834 1834 _('[OPTION]... [SOURCE]... DEST'))
1835 1835 def copy(ui, repo, *pats, **opts):
1836 1836 """mark files as copied for the next commit
1837 1837
1838 1838 Mark dest as having copies of source files. If dest is a
1839 1839 directory, copies are put in that directory. If dest is a file,
1840 1840 the source must be a single file.
1841 1841
1842 1842 By default, this command copies the contents of files as they
1843 1843 exist in the working directory. If invoked with -A/--after, the
1844 1844 operation is recorded, but no copying is performed.
1845 1845
1846 1846 This command takes effect with the next commit. To undo a copy
1847 1847 before that, see :hg:`revert`.
1848 1848
1849 1849 Returns 0 on success, 1 if errors are encountered.
1850 1850 """
1851 1851 with repo.wlock(False):
1852 1852 return cmdutil.copy(ui, repo, pats, opts)
1853 1853
1854 1854 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
1855 1855 def debugancestor(ui, repo, *args):
1856 1856 """find the ancestor revision of two revisions in a given index"""
1857 1857 if len(args) == 3:
1858 1858 index, rev1, rev2 = args
1859 1859 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1860 1860 lookup = r.lookup
1861 1861 elif len(args) == 2:
1862 1862 if not repo:
1863 1863 raise error.Abort(_("there is no Mercurial repository here "
1864 1864 "(.hg not found)"))
1865 1865 rev1, rev2 = args
1866 1866 r = repo.changelog
1867 1867 lookup = repo.lookup
1868 1868 else:
1869 1869 raise error.Abort(_('either two or three arguments required'))
1870 1870 a = r.ancestor(lookup(rev1), lookup(rev2))
1871 1871 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1872 1872
1873 1873 @command('debugbuilddag',
1874 1874 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1875 1875 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1876 1876 ('n', 'new-file', None, _('add new file at each rev'))],
1877 1877 _('[OPTION]... [TEXT]'))
1878 1878 def debugbuilddag(ui, repo, text=None,
1879 1879 mergeable_file=False,
1880 1880 overwritten_file=False,
1881 1881 new_file=False):
1882 1882 """builds a repo with a given DAG from scratch in the current empty repo
1883 1883
1884 1884 The description of the DAG is read from stdin if not given on the
1885 1885 command line.
1886 1886
1887 1887 Elements:
1888 1888
1889 1889 - "+n" is a linear run of n nodes based on the current default parent
1890 1890 - "." is a single node based on the current default parent
1891 1891 - "$" resets the default parent to null (implied at the start);
1892 1892 otherwise the default parent is always the last node created
1893 1893 - "<p" sets the default parent to the backref p
1894 1894 - "*p" is a fork at parent p, which is a backref
1895 1895 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1896 1896 - "/p2" is a merge of the preceding node and p2
1897 1897 - ":tag" defines a local tag for the preceding node
1898 1898 - "@branch" sets the named branch for subsequent nodes
1899 1899 - "#...\\n" is a comment up to the end of the line
1900 1900
1901 1901 Whitespace between the above elements is ignored.
1902 1902
1903 1903 A backref is either
1904 1904
1905 1905 - a number n, which references the node curr-n, where curr is the current
1906 1906 node, or
1907 1907 - the name of a local tag you placed earlier using ":tag", or
1908 1908 - empty to denote the default parent.
1909 1909
1910 1910 All string valued-elements are either strictly alphanumeric, or must
1911 1911 be enclosed in double quotes ("..."), with "\\" as escape character.
1912 1912 """
1913 1913
1914 1914 if text is None:
1915 1915 ui.status(_("reading DAG from stdin\n"))
1916 1916 text = ui.fin.read()
1917 1917
1918 1918 cl = repo.changelog
1919 1919 if len(cl) > 0:
1920 1920 raise error.Abort(_('repository is not empty'))
1921 1921
1922 1922 # determine number of revs in DAG
1923 1923 total = 0
1924 1924 for type, data in dagparser.parsedag(text):
1925 1925 if type == 'n':
1926 1926 total += 1
1927 1927
1928 1928 if mergeable_file:
1929 1929 linesperrev = 2
1930 1930 # make a file with k lines per rev
1931 1931 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1932 1932 initialmergedlines.append("")
1933 1933
1934 1934 tags = []
1935 1935
1936 1936 lock = tr = None
1937 1937 try:
1938 1938 lock = repo.lock()
1939 1939 tr = repo.transaction("builddag")
1940 1940
1941 1941 at = -1
1942 1942 atbranch = 'default'
1943 1943 nodeids = []
1944 1944 id = 0
1945 1945 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1946 1946 for type, data in dagparser.parsedag(text):
1947 1947 if type == 'n':
1948 1948 ui.note(('node %s\n' % str(data)))
1949 1949 id, ps = data
1950 1950
1951 1951 files = []
1952 1952 fctxs = {}
1953 1953
1954 1954 p2 = None
1955 1955 if mergeable_file:
1956 1956 fn = "mf"
1957 1957 p1 = repo[ps[0]]
1958 1958 if len(ps) > 1:
1959 1959 p2 = repo[ps[1]]
1960 1960 pa = p1.ancestor(p2)
1961 1961 base, local, other = [x[fn].data() for x in (pa, p1,
1962 1962 p2)]
1963 1963 m3 = simplemerge.Merge3Text(base, local, other)
1964 1964 ml = [l.strip() for l in m3.merge_lines()]
1965 1965 ml.append("")
1966 1966 elif at > 0:
1967 1967 ml = p1[fn].data().split("\n")
1968 1968 else:
1969 1969 ml = initialmergedlines
1970 1970 ml[id * linesperrev] += " r%i" % id
1971 1971 mergedtext = "\n".join(ml)
1972 1972 files.append(fn)
1973 1973 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
1974 1974
1975 1975 if overwritten_file:
1976 1976 fn = "of"
1977 1977 files.append(fn)
1978 1978 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1979 1979
1980 1980 if new_file:
1981 1981 fn = "nf%i" % id
1982 1982 files.append(fn)
1983 1983 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1984 1984 if len(ps) > 1:
1985 1985 if not p2:
1986 1986 p2 = repo[ps[1]]
1987 1987 for fn in p2:
1988 1988 if fn.startswith("nf"):
1989 1989 files.append(fn)
1990 1990 fctxs[fn] = p2[fn]
1991 1991
1992 1992 def fctxfn(repo, cx, path):
1993 1993 return fctxs.get(path)
1994 1994
1995 1995 if len(ps) == 0 or ps[0] < 0:
1996 1996 pars = [None, None]
1997 1997 elif len(ps) == 1:
1998 1998 pars = [nodeids[ps[0]], None]
1999 1999 else:
2000 2000 pars = [nodeids[p] for p in ps]
2001 2001 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
2002 2002 date=(id, 0),
2003 2003 user="debugbuilddag",
2004 2004 extra={'branch': atbranch})
2005 2005 nodeid = repo.commitctx(cx)
2006 2006 nodeids.append(nodeid)
2007 2007 at = id
2008 2008 elif type == 'l':
2009 2009 id, name = data
2010 2010 ui.note(('tag %s\n' % name))
2011 2011 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
2012 2012 elif type == 'a':
2013 2013 ui.note(('branch %s\n' % data))
2014 2014 atbranch = data
2015 2015 ui.progress(_('building'), id, unit=_('revisions'), total=total)
2016 2016 tr.close()
2017 2017
2018 2018 if tags:
2019 2019 repo.vfs.write("localtags", "".join(tags))
2020 2020 finally:
2021 2021 ui.progress(_('building'), None)
2022 2022 release(tr, lock)
2023 2023
2024 2024 @command('debugbundle',
2025 2025 [('a', 'all', None, _('show all details')),
2026 2026 ('', 'spec', None, _('print the bundlespec of the bundle'))],
2027 2027 _('FILE'),
2028 2028 norepo=True)
2029 2029 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
2030 2030 """lists the contents of a bundle"""
2031 2031 with hg.openpath(ui, bundlepath) as f:
2032 2032 if spec:
2033 2033 spec = exchange.getbundlespec(ui, f)
2034 2034 ui.write('%s\n' % spec)
2035 2035 return
2036 2036
2037 2037 gen = exchange.readbundle(ui, f, bundlepath)
2038 2038 if isinstance(gen, bundle2.unbundle20):
2039 2039 return _debugbundle2(ui, gen, all=all, **opts)
2040 2040 if all:
2041 2041 ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
2042 2042
2043 2043 def showchunks(named):
2044 2044 ui.write("\n%s\n" % named)
2045 2045 chain = None
2046 2046 while True:
2047 2047 chunkdata = gen.deltachunk(chain)
2048 2048 if not chunkdata:
2049 2049 break
2050 2050 node = chunkdata['node']
2051 2051 p1 = chunkdata['p1']
2052 2052 p2 = chunkdata['p2']
2053 2053 cs = chunkdata['cs']
2054 2054 deltabase = chunkdata['deltabase']
2055 2055 delta = chunkdata['delta']
2056 2056 ui.write("%s %s %s %s %s %s\n" %
2057 2057 (hex(node), hex(p1), hex(p2),
2058 2058 hex(cs), hex(deltabase), len(delta)))
2059 2059 chain = node
2060 2060
2061 2061 chunkdata = gen.changelogheader()
2062 2062 showchunks("changelog")
2063 2063 chunkdata = gen.manifestheader()
2064 2064 showchunks("manifest")
2065 2065 while True:
2066 2066 chunkdata = gen.filelogheader()
2067 2067 if not chunkdata:
2068 2068 break
2069 2069 fname = chunkdata['filename']
2070 2070 showchunks(fname)
2071 2071 else:
2072 2072 if isinstance(gen, bundle2.unbundle20):
2073 2073 raise error.Abort(_('use debugbundle2 for this file'))
2074 2074 chunkdata = gen.changelogheader()
2075 2075 chain = None
2076 2076 while True:
2077 2077 chunkdata = gen.deltachunk(chain)
2078 2078 if not chunkdata:
2079 2079 break
2080 2080 node = chunkdata['node']
2081 2081 ui.write("%s\n" % hex(node))
2082 2082 chain = node
2083 2083
2084 2084 def _debugbundle2(ui, gen, **opts):
2085 2085 """lists the contents of a bundle2"""
2086 2086 if not isinstance(gen, bundle2.unbundle20):
2087 2087 raise error.Abort(_('not a bundle2 file'))
2088 2088 ui.write(('Stream params: %s\n' % repr(gen.params)))
2089 2089 for part in gen.iterparts():
2090 2090 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
2091 2091 if part.type == 'changegroup':
2092 2092 version = part.params.get('version', '01')
2093 2093 cg = changegroup.getunbundler(version, part, 'UN')
2094 2094 chunkdata = cg.changelogheader()
2095 2095 chain = None
2096 2096 while True:
2097 2097 chunkdata = cg.deltachunk(chain)
2098 2098 if not chunkdata:
2099 2099 break
2100 2100 node = chunkdata['node']
2101 2101 ui.write(" %s\n" % hex(node))
2102 2102 chain = node
2103 2103
2104 2104 @command('debugcreatestreamclonebundle', [], 'FILE')
2105 2105 def debugcreatestreamclonebundle(ui, repo, fname):
2106 2106 """create a stream clone bundle file
2107 2107
2108 2108 Stream bundles are special bundles that are essentially archives of
2109 2109 revlog files. They are commonly used for cloning very quickly.
2110 2110 """
2111 2111 requirements, gen = streamclone.generatebundlev1(repo)
2112 2112 changegroup.writechunks(ui, gen, fname)
2113 2113
2114 2114 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
2115 2115
2116 2116 @command('debugapplystreamclonebundle', [], 'FILE')
2117 2117 def debugapplystreamclonebundle(ui, repo, fname):
2118 2118 """apply a stream clone bundle file"""
2119 2119 f = hg.openpath(ui, fname)
2120 2120 gen = exchange.readbundle(ui, f, fname)
2121 2121 gen.apply(repo)
2122 2122
2123 2123 @command('debugcheckstate', [], '')
2124 2124 def debugcheckstate(ui, repo):
2125 2125 """validate the correctness of the current dirstate"""
2126 2126 parent1, parent2 = repo.dirstate.parents()
2127 2127 m1 = repo[parent1].manifest()
2128 2128 m2 = repo[parent2].manifest()
2129 2129 errors = 0
2130 2130 for f in repo.dirstate:
2131 2131 state = repo.dirstate[f]
2132 2132 if state in "nr" and f not in m1:
2133 2133 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
2134 2134 errors += 1
2135 2135 if state in "a" and f in m1:
2136 2136 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
2137 2137 errors += 1
2138 2138 if state in "m" and f not in m1 and f not in m2:
2139 2139 ui.warn(_("%s in state %s, but not in either manifest\n") %
2140 2140 (f, state))
2141 2141 errors += 1
2142 2142 for f in m1:
2143 2143 state = repo.dirstate[f]
2144 2144 if state not in "nrm":
2145 2145 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
2146 2146 errors += 1
2147 2147 if errors:
2148 2148 error = _(".hg/dirstate inconsistent with current parent's manifest")
2149 2149 raise error.Abort(error)
2150 2150
2151 2151 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
2152 2152 def debugcommands(ui, cmd='', *args):
2153 2153 """list all available commands and options"""
2154 2154 for cmd, vals in sorted(table.iteritems()):
2155 2155 cmd = cmd.split('|')[0].strip('^')
2156 2156 opts = ', '.join([i[1] for i in vals[1]])
2157 2157 ui.write('%s: %s\n' % (cmd, opts))
2158 2158
2159 2159 @command('debugcomplete',
2160 2160 [('o', 'options', None, _('show the command options'))],
2161 2161 _('[-o] CMD'),
2162 2162 norepo=True)
2163 2163 def debugcomplete(ui, cmd='', **opts):
2164 2164 """returns the completion list associated with the given command"""
2165 2165
2166 2166 if opts.get('options'):
2167 2167 options = []
2168 2168 otables = [globalopts]
2169 2169 if cmd:
2170 2170 aliases, entry = cmdutil.findcmd(cmd, table, False)
2171 2171 otables.append(entry[1])
2172 2172 for t in otables:
2173 2173 for o in t:
2174 2174 if "(DEPRECATED)" in o[3]:
2175 2175 continue
2176 2176 if o[0]:
2177 2177 options.append('-%s' % o[0])
2178 2178 options.append('--%s' % o[1])
2179 2179 ui.write("%s\n" % "\n".join(options))
2180 2180 return
2181 2181
2182 2182 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
2183 2183 if ui.verbose:
2184 2184 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
2185 2185 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
2186 2186
2187 2187 @command('debugdag',
2188 2188 [('t', 'tags', None, _('use tags as labels')),
2189 2189 ('b', 'branches', None, _('annotate with branch names')),
2190 2190 ('', 'dots', None, _('use dots for runs')),
2191 2191 ('s', 'spaces', None, _('separate elements by spaces'))],
2192 2192 _('[OPTION]... [FILE [REV]...]'),
2193 2193 optionalrepo=True)
2194 2194 def debugdag(ui, repo, file_=None, *revs, **opts):
2195 2195 """format the changelog or an index DAG as a concise textual description
2196 2196
2197 2197 If you pass a revlog index, the revlog's DAG is emitted. If you list
2198 2198 revision numbers, they get labeled in the output as rN.
2199 2199
2200 2200 Otherwise, the changelog DAG of the current repo is emitted.
2201 2201 """
2202 2202 spaces = opts.get('spaces')
2203 2203 dots = opts.get('dots')
2204 2204 if file_:
2205 2205 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2206 2206 revs = set((int(r) for r in revs))
2207 2207 def events():
2208 2208 for r in rlog:
2209 2209 yield 'n', (r, list(p for p in rlog.parentrevs(r)
2210 2210 if p != -1))
2211 2211 if r in revs:
2212 2212 yield 'l', (r, "r%i" % r)
2213 2213 elif repo:
2214 2214 cl = repo.changelog
2215 2215 tags = opts.get('tags')
2216 2216 branches = opts.get('branches')
2217 2217 if tags:
2218 2218 labels = {}
2219 2219 for l, n in repo.tags().items():
2220 2220 labels.setdefault(cl.rev(n), []).append(l)
2221 2221 def events():
2222 2222 b = "default"
2223 2223 for r in cl:
2224 2224 if branches:
2225 2225 newb = cl.read(cl.node(r))[5]['branch']
2226 2226 if newb != b:
2227 2227 yield 'a', newb
2228 2228 b = newb
2229 2229 yield 'n', (r, list(p for p in cl.parentrevs(r)
2230 2230 if p != -1))
2231 2231 if tags:
2232 2232 ls = labels.get(r)
2233 2233 if ls:
2234 2234 for l in ls:
2235 2235 yield 'l', (r, l)
2236 2236 else:
2237 2237 raise error.Abort(_('need repo for changelog dag'))
2238 2238
2239 2239 for line in dagparser.dagtextlines(events(),
2240 2240 addspaces=spaces,
2241 2241 wraplabels=True,
2242 2242 wrapannotations=True,
2243 2243 wrapnonlinear=dots,
2244 2244 usedots=dots,
2245 2245 maxlinewidth=70):
2246 2246 ui.write(line)
2247 2247 ui.write("\n")
2248 2248
2249 2249 @command('debugdata', debugrevlogopts, _('-c|-m|FILE REV'))
2250 2250 def debugdata(ui, repo, file_, rev=None, **opts):
2251 2251 """dump the contents of a data file revision"""
2252 2252 if opts.get('changelog') or opts.get('manifest'):
2253 2253 file_, rev = None, file_
2254 2254 elif rev is None:
2255 2255 raise error.CommandError('debugdata', _('invalid arguments'))
2256 2256 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
2257 2257 try:
2258 2258 ui.write(r.revision(r.lookup(rev)))
2259 2259 except KeyError:
2260 2260 raise error.Abort(_('invalid revision identifier %s') % rev)
2261 2261
2262 2262 @command('debugdate',
2263 2263 [('e', 'extended', None, _('try extended date formats'))],
2264 2264 _('[-e] DATE [RANGE]'),
2265 2265 norepo=True, optionalrepo=True)
2266 2266 def debugdate(ui, date, range=None, **opts):
2267 2267 """parse and display a date"""
2268 2268 if opts["extended"]:
2269 2269 d = util.parsedate(date, util.extendeddateformats)
2270 2270 else:
2271 2271 d = util.parsedate(date)
2272 2272 ui.write(("internal: %s %s\n") % d)
2273 2273 ui.write(("standard: %s\n") % util.datestr(d))
2274 2274 if range:
2275 2275 m = util.matchdate(range)
2276 2276 ui.write(("match: %s\n") % m(d[0]))
2277 2277
2278 2278 @command('debugdiscovery',
2279 2279 [('', 'old', None, _('use old-style discovery')),
2280 2280 ('', 'nonheads', None,
2281 2281 _('use old-style discovery with non-heads included')),
2282 2282 ] + remoteopts,
2283 2283 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
2284 2284 def debugdiscovery(ui, repo, remoteurl="default", **opts):
2285 2285 """runs the changeset discovery protocol in isolation"""
2286 2286 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
2287 2287 opts.get('branch'))
2288 2288 remote = hg.peer(repo, opts, remoteurl)
2289 2289 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
2290 2290
2291 2291 # make sure tests are repeatable
2292 2292 random.seed(12323)
2293 2293
2294 2294 def doit(localheads, remoteheads, remote=remote):
2295 2295 if opts.get('old'):
2296 2296 if localheads:
2297 2297 raise error.Abort('cannot use localheads with old style '
2298 2298 'discovery')
2299 2299 if not util.safehasattr(remote, 'branches'):
2300 2300 # enable in-client legacy support
2301 2301 remote = localrepo.locallegacypeer(remote.local())
2302 2302 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
2303 2303 force=True)
2304 2304 common = set(common)
2305 2305 if not opts.get('nonheads'):
2306 2306 ui.write(("unpruned common: %s\n") %
2307 2307 " ".join(sorted(short(n) for n in common)))
2308 2308 dag = dagutil.revlogdag(repo.changelog)
2309 2309 all = dag.ancestorset(dag.internalizeall(common))
2310 2310 common = dag.externalizeall(dag.headsetofconnecteds(all))
2311 2311 else:
2312 2312 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
2313 2313 common = set(common)
2314 2314 rheads = set(hds)
2315 2315 lheads = set(repo.heads())
2316 2316 ui.write(("common heads: %s\n") %
2317 2317 " ".join(sorted(short(n) for n in common)))
2318 2318 if lheads <= common:
2319 2319 ui.write(("local is subset\n"))
2320 2320 elif rheads <= common:
2321 2321 ui.write(("remote is subset\n"))
2322 2322
2323 2323 serverlogs = opts.get('serverlog')
2324 2324 if serverlogs:
2325 2325 for filename in serverlogs:
2326 2326 with open(filename, 'r') as logfile:
2327 2327 line = logfile.readline()
2328 2328 while line:
2329 2329 parts = line.strip().split(';')
2330 2330 op = parts[1]
2331 2331 if op == 'cg':
2332 2332 pass
2333 2333 elif op == 'cgss':
2334 2334 doit(parts[2].split(' '), parts[3].split(' '))
2335 2335 elif op == 'unb':
2336 2336 doit(parts[3].split(' '), parts[2].split(' '))
2337 2337 line = logfile.readline()
2338 2338 else:
2339 2339 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2340 2340 opts.get('remote_head'))
2341 2341 localrevs = opts.get('local_head')
2342 2342 doit(localrevs, remoterevs)
2343 2343
2344 2344 @command('debugextensions', formatteropts, [], norepo=True)
2345 2345 def debugextensions(ui, **opts):
2346 2346 '''show information about active extensions'''
2347 2347 exts = extensions.extensions(ui)
2348 2348 fm = ui.formatter('debugextensions', opts)
2349 2349 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
2350 2350 extsource = extmod.__file__
2351 2351 exttestedwith = getattr(extmod, 'testedwith', None)
2352 2352 if exttestedwith is not None:
2353 2353 exttestedwith = exttestedwith.split()
2354 2354 extbuglink = getattr(extmod, 'buglink', None)
2355 2355
2356 2356 fm.startitem()
2357 2357
2358 2358 if ui.quiet or ui.verbose:
2359 2359 fm.write('name', '%s\n', extname)
2360 2360 else:
2361 2361 fm.write('name', '%s', extname)
2362 2362 if not exttestedwith:
2363 2363 fm.plain(_(' (untested!)\n'))
2364 2364 else:
2365 2365 if exttestedwith == ['internal'] or \
2366 2366 util.version() in exttestedwith:
2367 2367 fm.plain('\n')
2368 2368 else:
2369 2369 lasttestedversion = exttestedwith[-1]
2370 2370 fm.plain(' (%s!)\n' % lasttestedversion)
2371 2371
2372 2372 fm.condwrite(ui.verbose and extsource, 'source',
2373 2373 _(' location: %s\n'), extsource or "")
2374 2374
2375 2375 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
2376 2376 _(' tested with: %s\n'), ' '.join(exttestedwith or []))
2377 2377
2378 2378 fm.condwrite(ui.verbose and extbuglink, 'buglink',
2379 2379 _(' bug reporting: %s\n'), extbuglink or "")
2380 2380
2381 2381 fm.end()
2382 2382
2383 2383 @command('debugfileset',
2384 2384 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2385 2385 _('[-r REV] FILESPEC'))
2386 2386 def debugfileset(ui, repo, expr, **opts):
2387 2387 '''parse and apply a fileset specification'''
2388 2388 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2389 2389 if ui.verbose:
2390 2390 tree = fileset.parse(expr)
2391 2391 ui.note(fileset.prettyformat(tree), "\n")
2392 2392
2393 2393 for f in ctx.getfileset(expr):
2394 2394 ui.write("%s\n" % f)
2395 2395
2396 2396 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
2397 2397 def debugfsinfo(ui, path="."):
2398 2398 """show information detected about current filesystem"""
2399 2399 util.writefile('.debugfsinfo', '')
2400 2400 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2401 2401 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2402 2402 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2403 2403 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2404 2404 and 'yes' or 'no'))
2405 2405 os.unlink('.debugfsinfo')
2406 2406
2407 2407 @command('debuggetbundle',
2408 2408 [('H', 'head', [], _('id of head node'), _('ID')),
2409 2409 ('C', 'common', [], _('id of common node'), _('ID')),
2410 2410 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2411 2411 _('REPO FILE [-H|-C ID]...'),
2412 2412 norepo=True)
2413 2413 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2414 2414 """retrieves a bundle from a repo
2415 2415
2416 2416 Every ID must be a full-length hex node id string. Saves the bundle to the
2417 2417 given file.
2418 2418 """
2419 2419 repo = hg.peer(ui, opts, repopath)
2420 2420 if not repo.capable('getbundle'):
2421 2421 raise error.Abort("getbundle() not supported by target repository")
2422 2422 args = {}
2423 2423 if common:
2424 2424 args['common'] = [bin(s) for s in common]
2425 2425 if head:
2426 2426 args['heads'] = [bin(s) for s in head]
2427 2427 # TODO: get desired bundlecaps from command line.
2428 2428 args['bundlecaps'] = None
2429 2429 bundle = repo.getbundle('debug', **args)
2430 2430
2431 2431 bundletype = opts.get('type', 'bzip2').lower()
2432 2432 btypes = {'none': 'HG10UN',
2433 2433 'bzip2': 'HG10BZ',
2434 2434 'gzip': 'HG10GZ',
2435 2435 'bundle2': 'HG20'}
2436 2436 bundletype = btypes.get(bundletype)
2437 2437 if bundletype not in changegroup.bundletypes:
2438 2438 raise error.Abort(_('unknown bundle type specified with --type'))
2439 2439 changegroup.writebundle(ui, bundle, bundlepath, bundletype)
2440 2440
2441 2441 @command('debugignore', [], '[FILE]')
2442 2442 def debugignore(ui, repo, *files, **opts):
2443 2443 """display the combined ignore pattern and information about ignored files
2444 2444
2445 2445 With no argument display the combined ignore pattern.
2446 2446
2447 2447 Given space separated file names, shows if the given file is ignored and
2448 2448 if so, show the ignore rule (file and line number) that matched it.
2449 2449 """
2450 2450 ignore = repo.dirstate._ignore
2451 2451 if not files:
2452 2452 # Show all the patterns
2453 2453 includepat = getattr(ignore, 'includepat', None)
2454 2454 if includepat is not None:
2455 2455 ui.write("%s\n" % includepat)
2456 2456 else:
2457 2457 raise error.Abort(_("no ignore patterns found"))
2458 2458 else:
2459 2459 for f in files:
2460 2460 ignored = None
2461 2461 ignoredata = None
2462 2462 if f != '.':
2463 2463 if ignore(f):
2464 2464 ignored = f
2465 2465 ignoredata = repo.dirstate._ignorefileandline(f)
2466 2466 else:
2467 2467 for p in util.finddirs(f):
2468 2468 if ignore(p):
2469 2469 ignored = p
2470 2470 ignoredata = repo.dirstate._ignorefileandline(p)
2471 2471 break
2472 2472 if ignored:
2473 2473 if ignored == f:
2474 2474 ui.write("%s is ignored\n" % f)
2475 2475 else:
2476 2476 ui.write("%s is ignored because of containing folder %s\n"
2477 2477 % (f, ignored))
2478 2478 ignorefile, lineno, line = ignoredata
2479 2479 ui.write("(ignore rule in %s, line %d: '%s')\n"
2480 2480 % (ignorefile, lineno, line))
2481 2481 else:
2482 2482 ui.write("%s is not ignored\n" % f)
2483 2483
2484 2484 @command('debugindex', debugrevlogopts +
2485 2485 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2486 2486 _('[-f FORMAT] -c|-m|FILE'),
2487 2487 optionalrepo=True)
2488 2488 def debugindex(ui, repo, file_=None, **opts):
2489 2489 """dump the contents of an index file"""
2490 2490 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2491 2491 format = opts.get('format', 0)
2492 2492 if format not in (0, 1):
2493 2493 raise error.Abort(_("unknown format %d") % format)
2494 2494
2495 2495 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2496 2496 if generaldelta:
2497 2497 basehdr = ' delta'
2498 2498 else:
2499 2499 basehdr = ' base'
2500 2500
2501 2501 if ui.debugflag:
2502 2502 shortfn = hex
2503 2503 else:
2504 2504 shortfn = short
2505 2505
2506 2506 # There might not be anything in r, so have a sane default
2507 2507 idlen = 12
2508 2508 for i in r:
2509 2509 idlen = len(shortfn(r.node(i)))
2510 2510 break
2511 2511
2512 2512 if format == 0:
2513 2513 ui.write(" rev offset length " + basehdr + " linkrev"
2514 2514 " %s %s p2\n" % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
2515 2515 elif format == 1:
2516 2516 ui.write(" rev flag offset length"
2517 2517 " size " + basehdr + " link p1 p2"
2518 2518 " %s\n" % "nodeid".rjust(idlen))
2519 2519
2520 2520 for i in r:
2521 2521 node = r.node(i)
2522 2522 if generaldelta:
2523 2523 base = r.deltaparent(i)
2524 2524 else:
2525 2525 base = r.chainbase(i)
2526 2526 if format == 0:
2527 2527 try:
2528 2528 pp = r.parents(node)
2529 2529 except Exception:
2530 2530 pp = [nullid, nullid]
2531 2531 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2532 2532 i, r.start(i), r.length(i), base, r.linkrev(i),
2533 2533 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2534 2534 elif format == 1:
2535 2535 pr = r.parentrevs(i)
2536 2536 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2537 2537 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2538 2538 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
2539 2539
2540 2540 @command('debugindexdot', debugrevlogopts,
2541 2541 _('-c|-m|FILE'), optionalrepo=True)
2542 2542 def debugindexdot(ui, repo, file_=None, **opts):
2543 2543 """dump an index DAG as a graphviz dot file"""
2544 2544 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
2545 2545 ui.write(("digraph G {\n"))
2546 2546 for i in r:
2547 2547 node = r.node(i)
2548 2548 pp = r.parents(node)
2549 2549 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2550 2550 if pp[1] != nullid:
2551 2551 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2552 2552 ui.write("}\n")
2553 2553
2554 2554 @command('debugdeltachain',
2555 2555 debugrevlogopts + formatteropts,
2556 2556 _('-c|-m|FILE'),
2557 2557 optionalrepo=True)
2558 2558 def debugdeltachain(ui, repo, file_=None, **opts):
2559 2559 """dump information about delta chains in a revlog
2560 2560
2561 2561 Output can be templatized. Available template keywords are:
2562 2562
2563 2563 rev revision number
2564 2564 chainid delta chain identifier (numbered by unique base)
2565 2565 chainlen delta chain length to this revision
2566 2566 prevrev previous revision in delta chain
2567 2567 deltatype role of delta / how it was computed
2568 2568 compsize compressed size of revision
2569 2569 uncompsize uncompressed size of revision
2570 2570 chainsize total size of compressed revisions in chain
2571 2571 chainratio total chain size divided by uncompressed revision size
2572 2572 (new delta chains typically start at ratio 2.00)
2573 2573 lindist linear distance from base revision in delta chain to end
2574 2574 of this revision
2575 2575 extradist total size of revisions not part of this delta chain from
2576 2576 base of delta chain to end of this revision; a measurement
2577 2577 of how much extra data we need to read/seek across to read
2578 2578 the delta chain for this revision
2579 2579 extraratio extradist divided by chainsize; another representation of
2580 2580 how much unrelated data is needed to load this delta chain
2581 2581 """
2582 2582 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
2583 2583 index = r.index
2584 2584 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2585 2585
2586 2586 def revinfo(rev):
2587 2587 e = index[rev]
2588 2588 compsize = e[1]
2589 2589 uncompsize = e[2]
2590 2590 chainsize = 0
2591 2591
2592 2592 if generaldelta:
2593 2593 if e[3] == e[5]:
2594 2594 deltatype = 'p1'
2595 2595 elif e[3] == e[6]:
2596 2596 deltatype = 'p2'
2597 2597 elif e[3] == rev - 1:
2598 2598 deltatype = 'prev'
2599 2599 elif e[3] == rev:
2600 2600 deltatype = 'base'
2601 2601 else:
2602 2602 deltatype = 'other'
2603 2603 else:
2604 2604 if e[3] == rev:
2605 2605 deltatype = 'base'
2606 2606 else:
2607 2607 deltatype = 'prev'
2608 2608
2609 2609 chain = r._deltachain(rev)[0]
2610 2610 for iterrev in chain:
2611 2611 e = index[iterrev]
2612 2612 chainsize += e[1]
2613 2613
2614 2614 return compsize, uncompsize, deltatype, chain, chainsize
2615 2615
2616 2616 fm = ui.formatter('debugdeltachain', opts)
2617 2617
2618 2618 fm.plain(' rev chain# chainlen prev delta '
2619 2619 'size rawsize chainsize ratio lindist extradist '
2620 2620 'extraratio\n')
2621 2621
2622 2622 chainbases = {}
2623 2623 for rev in r:
2624 2624 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
2625 2625 chainbase = chain[0]
2626 2626 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
2627 2627 basestart = r.start(chainbase)
2628 2628 revstart = r.start(rev)
2629 2629 lineardist = revstart + comp - basestart
2630 2630 extradist = lineardist - chainsize
2631 2631 try:
2632 2632 prevrev = chain[-2]
2633 2633 except IndexError:
2634 2634 prevrev = -1
2635 2635
2636 2636 chainratio = float(chainsize) / float(uncomp)
2637 2637 extraratio = float(extradist) / float(chainsize)
2638 2638
2639 2639 fm.startitem()
2640 2640 fm.write('rev chainid chainlen prevrev deltatype compsize '
2641 2641 'uncompsize chainsize chainratio lindist extradist '
2642 2642 'extraratio',
2643 2643 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
2644 2644 rev, chainid, len(chain), prevrev, deltatype, comp,
2645 2645 uncomp, chainsize, chainratio, lineardist, extradist,
2646 2646 extraratio,
2647 2647 rev=rev, chainid=chainid, chainlen=len(chain),
2648 2648 prevrev=prevrev, deltatype=deltatype, compsize=comp,
2649 2649 uncompsize=uncomp, chainsize=chainsize,
2650 2650 chainratio=chainratio, lindist=lineardist,
2651 2651 extradist=extradist, extraratio=extraratio)
2652 2652
2653 2653 fm.end()
2654 2654
2655 2655 @command('debuginstall', [], '', norepo=True)
2656 2656 def debuginstall(ui):
2657 2657 '''test Mercurial installation
2658 2658
2659 2659 Returns 0 on success.
2660 2660 '''
2661 2661
2662 2662 def writetemp(contents):
2663 2663 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2664 2664 f = os.fdopen(fd, "wb")
2665 2665 f.write(contents)
2666 2666 f.close()
2667 2667 return name
2668 2668
2669 2669 problems = 0
2670 2670
2671 2671 # encoding
2672 2672 ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
2673 2673 try:
2674 2674 encoding.fromlocal("test")
2675 2675 except error.Abort as inst:
2676 2676 ui.write(" %s\n" % inst)
2677 2677 ui.write(_(" (check that your locale is properly set)\n"))
2678 2678 problems += 1
2679 2679
2680 2680 # Python
2681 2681 ui.status(_("checking Python executable (%s)\n") % sys.executable)
2682 2682 ui.status(_("checking Python version (%s)\n")
2683 2683 % ("%s.%s.%s" % sys.version_info[:3]))
2684 2684 ui.status(_("checking Python lib (%s)...\n")
2685 2685 % os.path.dirname(os.__file__))
2686 2686
2687 2687 # compiled modules
2688 2688 ui.status(_("checking installed modules (%s)...\n")
2689 2689 % os.path.dirname(__file__))
2690 2690 try:
2691 2691 import bdiff, mpatch, base85, osutil
2692 2692 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2693 2693 except Exception as inst:
2694 2694 ui.write(" %s\n" % inst)
2695 2695 ui.write(_(" One or more extensions could not be found"))
2696 2696 ui.write(_(" (check that you compiled the extensions)\n"))
2697 2697 problems += 1
2698 2698
2699 2699 # templates
2700 2700 import templater
2701 2701 p = templater.templatepaths()
2702 2702 ui.status(_("checking templates (%s)...\n") % ' '.join(p))
2703 2703 if p:
2704 2704 m = templater.templatepath("map-cmdline.default")
2705 2705 if m:
2706 2706 # template found, check if it is working
2707 2707 try:
2708 2708 templater.templater(m)
2709 2709 except Exception as inst:
2710 2710 ui.write(" %s\n" % inst)
2711 2711 p = None
2712 2712 else:
2713 2713 ui.write(_(" template 'default' not found\n"))
2714 2714 p = None
2715 2715 else:
2716 2716 ui.write(_(" no template directories found\n"))
2717 2717 if not p:
2718 2718 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
2719 2719 problems += 1
2720 2720
2721 2721 # editor
2722 2722 ui.status(_("checking commit editor...\n"))
2723 2723 editor = ui.geteditor()
2724 2724 editor = util.expandpath(editor)
2725 2725 cmdpath = util.findexe(shlex.split(editor)[0])
2726 2726 if not cmdpath:
2727 2727 if editor == 'vi':
2728 2728 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2729 2729 ui.write(_(" (specify a commit editor in your configuration"
2730 2730 " file)\n"))
2731 2731 else:
2732 2732 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2733 2733 ui.write(_(" (specify a commit editor in your configuration"
2734 2734 " file)\n"))
2735 2735 problems += 1
2736 2736
2737 2737 # check username
2738 2738 ui.status(_("checking username...\n"))
2739 2739 try:
2740 2740 ui.username()
2741 2741 except error.Abort as e:
2742 2742 ui.write(" %s\n" % e)
2743 2743 ui.write(_(" (specify a username in your configuration file)\n"))
2744 2744 problems += 1
2745 2745
2746 2746 if not problems:
2747 2747 ui.status(_("no problems detected\n"))
2748 2748 else:
2749 2749 ui.write(_("%s problems detected,"
2750 2750 " please check your install!\n") % problems)
2751 2751
2752 2752 return problems
2753 2753
2754 2754 @command('debugknown', [], _('REPO ID...'), norepo=True)
2755 2755 def debugknown(ui, repopath, *ids, **opts):
2756 2756 """test whether node ids are known to a repo
2757 2757
2758 2758 Every ID must be a full-length hex node id string. Returns a list of 0s
2759 2759 and 1s indicating unknown/known.
2760 2760 """
2761 2761 repo = hg.peer(ui, opts, repopath)
2762 2762 if not repo.capable('known'):
2763 2763 raise error.Abort("known() not supported by target repository")
2764 2764 flags = repo.known([bin(s) for s in ids])
2765 2765 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2766 2766
2767 2767 @command('debuglabelcomplete', [], _('LABEL...'))
2768 2768 def debuglabelcomplete(ui, repo, *args):
2769 2769 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2770 2770 debugnamecomplete(ui, repo, *args)
2771 2771
2772 2772 @command('debugmergestate', [], '')
2773 2773 def debugmergestate(ui, repo, *args):
2774 2774 """print merge state
2775 2775
2776 2776 Use --verbose to print out information about whether v1 or v2 merge state
2777 2777 was chosen."""
2778 2778 def _hashornull(h):
2779 2779 if h == nullhex:
2780 2780 return 'null'
2781 2781 else:
2782 2782 return h
2783 2783
2784 2784 def printrecords(version):
2785 2785 ui.write(('* version %s records\n') % version)
2786 2786 if version == 1:
2787 2787 records = v1records
2788 2788 else:
2789 2789 records = v2records
2790 2790
2791 2791 for rtype, record in records:
2792 2792 # pretty print some record types
2793 2793 if rtype == 'L':
2794 2794 ui.write(('local: %s\n') % record)
2795 2795 elif rtype == 'O':
2796 2796 ui.write(('other: %s\n') % record)
2797 2797 elif rtype == 'm':
2798 2798 driver, mdstate = record.split('\0', 1)
2799 2799 ui.write(('merge driver: %s (state "%s")\n')
2800 2800 % (driver, mdstate))
2801 2801 elif rtype in 'FDC':
2802 2802 r = record.split('\0')
2803 2803 f, state, hash, lfile, afile, anode, ofile = r[0:7]
2804 2804 if version == 1:
2805 2805 onode = 'not stored in v1 format'
2806 2806 flags = r[7]
2807 2807 else:
2808 2808 onode, flags = r[7:9]
2809 2809 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
2810 2810 % (f, rtype, state, _hashornull(hash)))
2811 2811 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
2812 2812 ui.write((' ancestor path: %s (node %s)\n')
2813 2813 % (afile, _hashornull(anode)))
2814 2814 ui.write((' other path: %s (node %s)\n')
2815 2815 % (ofile, _hashornull(onode)))
2816 2816 elif rtype == 'f':
2817 2817 filename, rawextras = record.split('\0', 1)
2818 2818 extras = rawextras.split('\0')
2819 2819 i = 0
2820 2820 extrastrings = []
2821 2821 while i < len(extras):
2822 2822 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
2823 2823 i += 2
2824 2824
2825 2825 ui.write(('file extras: %s (%s)\n')
2826 2826 % (filename, ', '.join(extrastrings)))
2827 2827 else:
2828 2828 ui.write(('unrecognized entry: %s\t%s\n')
2829 2829 % (rtype, record.replace('\0', '\t')))
2830 2830
2831 2831 # Avoid mergestate.read() since it may raise an exception for unsupported
2832 2832 # merge state records. We shouldn't be doing this, but this is OK since this
2833 2833 # command is pretty low-level.
2834 2834 ms = mergemod.mergestate(repo)
2835 2835
2836 2836 # sort so that reasonable information is on top
2837 2837 v1records = ms._readrecordsv1()
2838 2838 v2records = ms._readrecordsv2()
2839 2839 order = 'LOm'
2840 2840 def key(r):
2841 2841 idx = order.find(r[0])
2842 2842 if idx == -1:
2843 2843 return (1, r[1])
2844 2844 else:
2845 2845 return (0, idx)
2846 2846 v1records.sort(key=key)
2847 2847 v2records.sort(key=key)
2848 2848
2849 2849 if not v1records and not v2records:
2850 2850 ui.write(('no merge state found\n'))
2851 2851 elif not v2records:
2852 2852 ui.note(('no version 2 merge state\n'))
2853 2853 printrecords(1)
2854 2854 elif ms._v1v2match(v1records, v2records):
2855 2855 ui.note(('v1 and v2 states match: using v2\n'))
2856 2856 printrecords(2)
2857 2857 else:
2858 2858 ui.note(('v1 and v2 states mismatch: using v1\n'))
2859 2859 printrecords(1)
2860 2860 if ui.verbose:
2861 2861 printrecords(2)
2862 2862
2863 2863 @command('debugnamecomplete', [], _('NAME...'))
2864 2864 def debugnamecomplete(ui, repo, *args):
2865 2865 '''complete "names" - tags, open branch names, bookmark names'''
2866 2866
2867 2867 names = set()
2868 2868 # since we previously only listed open branches, we will handle that
2869 2869 # specially (after this for loop)
2870 2870 for name, ns in repo.names.iteritems():
2871 2871 if name != 'branches':
2872 2872 names.update(ns.listnames(repo))
2873 2873 names.update(tag for (tag, heads, tip, closed)
2874 2874 in repo.branchmap().iterbranches() if not closed)
2875 2875 completions = set()
2876 2876 if not args:
2877 2877 args = ['']
2878 2878 for a in args:
2879 2879 completions.update(n for n in names if n.startswith(a))
2880 2880 ui.write('\n'.join(sorted(completions)))
2881 2881 ui.write('\n')
2882 2882
2883 2883 @command('debuglocks',
2884 2884 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
2885 2885 ('W', 'force-wlock', None,
2886 2886 _('free the working state lock (DANGEROUS)'))],
2887 2887 _('[OPTION]...'))
2888 2888 def debuglocks(ui, repo, **opts):
2889 2889 """show or modify state of locks
2890 2890
2891 2891 By default, this command will show which locks are held. This
2892 2892 includes the user and process holding the lock, the amount of time
2893 2893 the lock has been held, and the machine name where the process is
2894 2894 running if it's not local.
2895 2895
2896 2896 Locks protect the integrity of Mercurial's data, so should be
2897 2897 treated with care. System crashes or other interruptions may cause
2898 2898 locks to not be properly released, though Mercurial will usually
2899 2899 detect and remove such stale locks automatically.
2900 2900
2901 2901 However, detecting stale locks may not always be possible (for
2902 2902 instance, on a shared filesystem). Removing locks may also be
2903 2903 blocked by filesystem permissions.
2904 2904
2905 2905 Returns 0 if no locks are held.
2906 2906
2907 2907 """
2908 2908
2909 2909 if opts.get('force_lock'):
2910 2910 repo.svfs.unlink('lock')
2911 2911 if opts.get('force_wlock'):
2912 2912 repo.vfs.unlink('wlock')
2913 2913 if opts.get('force_lock') or opts.get('force_lock'):
2914 2914 return 0
2915 2915
2916 2916 now = time.time()
2917 2917 held = 0
2918 2918
2919 2919 def report(vfs, name, method):
2920 2920 # this causes stale locks to get reaped for more accurate reporting
2921 2921 try:
2922 2922 l = method(False)
2923 2923 except error.LockHeld:
2924 2924 l = None
2925 2925
2926 2926 if l:
2927 2927 l.release()
2928 2928 else:
2929 2929 try:
2930 2930 stat = vfs.lstat(name)
2931 2931 age = now - stat.st_mtime
2932 2932 user = util.username(stat.st_uid)
2933 2933 locker = vfs.readlock(name)
2934 2934 if ":" in locker:
2935 2935 host, pid = locker.split(':')
2936 2936 if host == socket.gethostname():
2937 2937 locker = 'user %s, process %s' % (user, pid)
2938 2938 else:
2939 2939 locker = 'user %s, process %s, host %s' \
2940 2940 % (user, pid, host)
2941 2941 ui.write("%-6s %s (%ds)\n" % (name + ":", locker, age))
2942 2942 return 1
2943 2943 except OSError as e:
2944 2944 if e.errno != errno.ENOENT:
2945 2945 raise
2946 2946
2947 2947 ui.write("%-6s free\n" % (name + ":"))
2948 2948 return 0
2949 2949
2950 2950 held += report(repo.svfs, "lock", repo.lock)
2951 2951 held += report(repo.vfs, "wlock", repo.wlock)
2952 2952
2953 2953 return held
2954 2954
2955 2955 @command('debugobsolete',
2956 2956 [('', 'flags', 0, _('markers flag')),
2957 2957 ('', 'record-parents', False,
2958 2958 _('record parent information for the precursor')),
2959 2959 ('r', 'rev', [], _('display markers relevant to REV')),
2960 2960 ] + commitopts2,
2961 2961 _('[OBSOLETED [REPLACEMENT ...]]'))
2962 2962 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2963 2963 """create arbitrary obsolete marker
2964 2964
2965 2965 With no arguments, displays the list of obsolescence markers."""
2966 2966
2967 2967 def parsenodeid(s):
2968 2968 try:
2969 2969 # We do not use revsingle/revrange functions here to accept
2970 2970 # arbitrary node identifiers, possibly not present in the
2971 2971 # local repository.
2972 2972 n = bin(s)
2973 2973 if len(n) != len(nullid):
2974 2974 raise TypeError()
2975 2975 return n
2976 2976 except TypeError:
2977 2977 raise error.Abort('changeset references must be full hexadecimal '
2978 2978 'node identifiers')
2979 2979
2980 2980 if precursor is not None:
2981 2981 if opts['rev']:
2982 2982 raise error.Abort('cannot select revision when creating marker')
2983 2983 metadata = {}
2984 2984 metadata['user'] = opts['user'] or ui.username()
2985 2985 succs = tuple(parsenodeid(succ) for succ in successors)
2986 2986 l = repo.lock()
2987 2987 try:
2988 2988 tr = repo.transaction('debugobsolete')
2989 2989 try:
2990 2990 date = opts.get('date')
2991 2991 if date:
2992 2992 date = util.parsedate(date)
2993 2993 else:
2994 2994 date = None
2995 2995 prec = parsenodeid(precursor)
2996 2996 parents = None
2997 2997 if opts['record_parents']:
2998 2998 if prec not in repo.unfiltered():
2999 2999 raise error.Abort('cannot used --record-parents on '
3000 3000 'unknown changesets')
3001 3001 parents = repo.unfiltered()[prec].parents()
3002 3002 parents = tuple(p.node() for p in parents)
3003 3003 repo.obsstore.create(tr, prec, succs, opts['flags'],
3004 3004 parents=parents, date=date,
3005 3005 metadata=metadata)
3006 3006 tr.close()
3007 3007 except ValueError as exc:
3008 3008 raise error.Abort(_('bad obsmarker input: %s') % exc)
3009 3009 finally:
3010 3010 tr.release()
3011 3011 finally:
3012 3012 l.release()
3013 3013 else:
3014 3014 if opts['rev']:
3015 3015 revs = scmutil.revrange(repo, opts['rev'])
3016 3016 nodes = [repo[r].node() for r in revs]
3017 3017 markers = list(obsolete.getmarkers(repo, nodes=nodes))
3018 3018 markers.sort(key=lambda x: x._data)
3019 3019 else:
3020 3020 markers = obsolete.getmarkers(repo)
3021 3021
3022 3022 for m in markers:
3023 3023 cmdutil.showmarker(ui, m)
3024 3024
3025 3025 @command('debugpathcomplete',
3026 3026 [('f', 'full', None, _('complete an entire path')),
3027 3027 ('n', 'normal', None, _('show only normal files')),
3028 3028 ('a', 'added', None, _('show only added files')),
3029 3029 ('r', 'removed', None, _('show only removed files'))],
3030 3030 _('FILESPEC...'))
3031 3031 def debugpathcomplete(ui, repo, *specs, **opts):
3032 3032 '''complete part or all of a tracked path
3033 3033
3034 3034 This command supports shells that offer path name completion. It
3035 3035 currently completes only files already known to the dirstate.
3036 3036
3037 3037 Completion extends only to the next path segment unless
3038 3038 --full is specified, in which case entire paths are used.'''
3039 3039
3040 3040 def complete(path, acceptable):
3041 3041 dirstate = repo.dirstate
3042 3042 spec = os.path.normpath(os.path.join(os.getcwd(), path))
3043 3043 rootdir = repo.root + os.sep
3044 3044 if spec != repo.root and not spec.startswith(rootdir):
3045 3045 return [], []
3046 3046 if os.path.isdir(spec):
3047 3047 spec += '/'
3048 3048 spec = spec[len(rootdir):]
3049 3049 fixpaths = os.sep != '/'
3050 3050 if fixpaths:
3051 3051 spec = spec.replace(os.sep, '/')
3052 3052 speclen = len(spec)
3053 3053 fullpaths = opts['full']
3054 3054 files, dirs = set(), set()
3055 3055 adddir, addfile = dirs.add, files.add
3056 3056 for f, st in dirstate.iteritems():
3057 3057 if f.startswith(spec) and st[0] in acceptable:
3058 3058 if fixpaths:
3059 3059 f = f.replace('/', os.sep)
3060 3060 if fullpaths:
3061 3061 addfile(f)
3062 3062 continue
3063 3063 s = f.find(os.sep, speclen)
3064 3064 if s >= 0:
3065 3065 adddir(f[:s])
3066 3066 else:
3067 3067 addfile(f)
3068 3068 return files, dirs
3069 3069
3070 3070 acceptable = ''
3071 3071 if opts['normal']:
3072 3072 acceptable += 'nm'
3073 3073 if opts['added']:
3074 3074 acceptable += 'a'
3075 3075 if opts['removed']:
3076 3076 acceptable += 'r'
3077 3077 cwd = repo.getcwd()
3078 3078 if not specs:
3079 3079 specs = ['.']
3080 3080
3081 3081 files, dirs = set(), set()
3082 3082 for spec in specs:
3083 3083 f, d = complete(spec, acceptable or 'nmar')
3084 3084 files.update(f)
3085 3085 dirs.update(d)
3086 3086 files.update(dirs)
3087 3087 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
3088 3088 ui.write('\n')
3089 3089
3090 3090 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3091 3091 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3092 3092 '''access the pushkey key/value protocol
3093 3093
3094 3094 With two args, list the keys in the given namespace.
3095 3095
3096 3096 With five args, set a key to new if it currently is set to old.
3097 3097 Reports success or failure.
3098 3098 '''
3099 3099
3100 3100 target = hg.peer(ui, {}, repopath)
3101 3101 if keyinfo:
3102 3102 key, old, new = keyinfo
3103 3103 r = target.pushkey(namespace, key, old, new)
3104 3104 ui.status(str(r) + '\n')
3105 3105 return not r
3106 3106 else:
3107 3107 for k, v in sorted(target.listkeys(namespace).iteritems()):
3108 3108 ui.write("%s\t%s\n" % (k.encode('string-escape'),
3109 3109 v.encode('string-escape')))
3110 3110
3111 3111 @command('debugpvec', [], _('A B'))
3112 3112 def debugpvec(ui, repo, a, b=None):
3113 3113 ca = scmutil.revsingle(repo, a)
3114 3114 cb = scmutil.revsingle(repo, b)
3115 3115 pa = pvec.ctxpvec(ca)
3116 3116 pb = pvec.ctxpvec(cb)
3117 3117 if pa == pb:
3118 3118 rel = "="
3119 3119 elif pa > pb:
3120 3120 rel = ">"
3121 3121 elif pa < pb:
3122 3122 rel = "<"
3123 3123 elif pa | pb:
3124 3124 rel = "|"
3125 3125 ui.write(_("a: %s\n") % pa)
3126 3126 ui.write(_("b: %s\n") % pb)
3127 3127 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3128 3128 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
3129 3129 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
3130 3130 pa.distance(pb), rel))
3131 3131
3132 3132 @command('debugrebuilddirstate|debugrebuildstate',
3133 3133 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
3134 3134 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
3135 3135 'the working copy parent')),
3136 3136 ],
3137 3137 _('[-r REV]'))
3138 3138 def debugrebuilddirstate(ui, repo, rev, **opts):
3139 3139 """rebuild the dirstate as it would look like for the given revision
3140 3140
3141 3141 If no revision is specified the first current parent will be used.
3142 3142
3143 3143 The dirstate will be set to the files of the given revision.
3144 3144 The actual working directory content or existing dirstate
3145 3145 information such as adds or removes is not considered.
3146 3146
3147 3147 ``minimal`` will only rebuild the dirstate status for files that claim to be
3148 3148 tracked but are not in the parent manifest, or that exist in the parent
3149 3149 manifest but are not in the dirstate. It will not change adds, removes, or
3150 3150 modified files that are in the working copy parent.
3151 3151
3152 3152 One use of this command is to make the next :hg:`status` invocation
3153 3153 check the actual file content.
3154 3154 """
3155 3155 ctx = scmutil.revsingle(repo, rev)
3156 3156 with repo.wlock():
3157 3157 dirstate = repo.dirstate
3158 3158 changedfiles = None
3159 3159 # See command doc for what minimal does.
3160 3160 if opts.get('minimal'):
3161 3161 manifestfiles = set(ctx.manifest().keys())
3162 3162 dirstatefiles = set(dirstate)
3163 3163 manifestonly = manifestfiles - dirstatefiles
3164 3164 dsonly = dirstatefiles - manifestfiles
3165 3165 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
3166 3166 changedfiles = manifestonly | dsnotadded
3167 3167
3168 3168 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3169 3169
3170 3170 @command('debugrebuildfncache', [], '')
3171 3171 def debugrebuildfncache(ui, repo):
3172 3172 """rebuild the fncache file"""
3173 3173 repair.rebuildfncache(ui, repo)
3174 3174
3175 3175 @command('debugrename',
3176 3176 [('r', 'rev', '', _('revision to debug'), _('REV'))],
3177 3177 _('[-r REV] FILE'))
3178 3178 def debugrename(ui, repo, file1, *pats, **opts):
3179 3179 """dump rename information"""
3180 3180
3181 3181 ctx = scmutil.revsingle(repo, opts.get('rev'))
3182 3182 m = scmutil.match(ctx, (file1,) + pats, opts)
3183 3183 for abs in ctx.walk(m):
3184 3184 fctx = ctx[abs]
3185 3185 o = fctx.filelog().renamed(fctx.filenode())
3186 3186 rel = m.rel(abs)
3187 3187 if o:
3188 3188 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3189 3189 else:
3190 3190 ui.write(_("%s not renamed\n") % rel)
3191 3191
3192 3192 @command('debugrevlog', debugrevlogopts +
3193 3193 [('d', 'dump', False, _('dump index data'))],
3194 3194 _('-c|-m|FILE'),
3195 3195 optionalrepo=True)
3196 3196 def debugrevlog(ui, repo, file_=None, **opts):
3197 3197 """show data and statistics about a revlog"""
3198 3198 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
3199 3199
3200 3200 if opts.get("dump"):
3201 3201 numrevs = len(r)
3202 3202 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
3203 3203 " rawsize totalsize compression heads chainlen\n")
3204 3204 ts = 0
3205 3205 heads = set()
3206 3206
3207 3207 for rev in xrange(numrevs):
3208 3208 dbase = r.deltaparent(rev)
3209 3209 if dbase == -1:
3210 3210 dbase = rev
3211 3211 cbase = r.chainbase(rev)
3212 3212 clen = r.chainlen(rev)
3213 3213 p1, p2 = r.parentrevs(rev)
3214 3214 rs = r.rawsize(rev)
3215 3215 ts = ts + rs
3216 3216 heads -= set(r.parentrevs(rev))
3217 3217 heads.add(rev)
3218 3218 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3219 3219 "%11d %5d %8d\n" %
3220 3220 (rev, p1, p2, r.start(rev), r.end(rev),
3221 3221 r.start(dbase), r.start(cbase),
3222 3222 r.start(p1), r.start(p2),
3223 3223 rs, ts, ts / r.end(rev), len(heads), clen))
3224 3224 return 0
3225 3225
3226 3226 v = r.version
3227 3227 format = v & 0xFFFF
3228 3228 flags = []
3229 3229 gdelta = False
3230 3230 if v & revlog.REVLOGNGINLINEDATA:
3231 3231 flags.append('inline')
3232 3232 if v & revlog.REVLOGGENERALDELTA:
3233 3233 gdelta = True
3234 3234 flags.append('generaldelta')
3235 3235 if not flags:
3236 3236 flags = ['(none)']
3237 3237
3238 3238 nummerges = 0
3239 3239 numfull = 0
3240 3240 numprev = 0
3241 3241 nump1 = 0
3242 3242 nump2 = 0
3243 3243 numother = 0
3244 3244 nump1prev = 0
3245 3245 nump2prev = 0
3246 3246 chainlengths = []
3247 3247
3248 3248 datasize = [None, 0, 0L]
3249 3249 fullsize = [None, 0, 0L]
3250 3250 deltasize = [None, 0, 0L]
3251 3251
3252 3252 def addsize(size, l):
3253 3253 if l[0] is None or size < l[0]:
3254 3254 l[0] = size
3255 3255 if size > l[1]:
3256 3256 l[1] = size
3257 3257 l[2] += size
3258 3258
3259 3259 numrevs = len(r)
3260 3260 for rev in xrange(numrevs):
3261 3261 p1, p2 = r.parentrevs(rev)
3262 3262 delta = r.deltaparent(rev)
3263 3263 if format > 0:
3264 3264 addsize(r.rawsize(rev), datasize)
3265 3265 if p2 != nullrev:
3266 3266 nummerges += 1
3267 3267 size = r.length(rev)
3268 3268 if delta == nullrev:
3269 3269 chainlengths.append(0)
3270 3270 numfull += 1
3271 3271 addsize(size, fullsize)
3272 3272 else:
3273 3273 chainlengths.append(chainlengths[delta] + 1)
3274 3274 addsize(size, deltasize)
3275 3275 if delta == rev - 1:
3276 3276 numprev += 1
3277 3277 if delta == p1:
3278 3278 nump1prev += 1
3279 3279 elif delta == p2:
3280 3280 nump2prev += 1
3281 3281 elif delta == p1:
3282 3282 nump1 += 1
3283 3283 elif delta == p2:
3284 3284 nump2 += 1
3285 3285 elif delta != nullrev:
3286 3286 numother += 1
3287 3287
3288 3288 # Adjust size min value for empty cases
3289 3289 for size in (datasize, fullsize, deltasize):
3290 3290 if size[0] is None:
3291 3291 size[0] = 0
3292 3292
3293 3293 numdeltas = numrevs - numfull
3294 3294 numoprev = numprev - nump1prev - nump2prev
3295 3295 totalrawsize = datasize[2]
3296 3296 datasize[2] /= numrevs
3297 3297 fulltotal = fullsize[2]
3298 3298 fullsize[2] /= numfull
3299 3299 deltatotal = deltasize[2]
3300 3300 if numrevs - numfull > 0:
3301 3301 deltasize[2] /= numrevs - numfull
3302 3302 totalsize = fulltotal + deltatotal
3303 3303 avgchainlen = sum(chainlengths) / numrevs
3304 3304 maxchainlen = max(chainlengths)
3305 3305 compratio = 1
3306 3306 if totalsize:
3307 3307 compratio = totalrawsize / totalsize
3308 3308
3309 3309 basedfmtstr = '%%%dd\n'
3310 3310 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
3311 3311
3312 3312 def dfmtstr(max):
3313 3313 return basedfmtstr % len(str(max))
3314 3314 def pcfmtstr(max, padding=0):
3315 3315 return basepcfmtstr % (len(str(max)), ' ' * padding)
3316 3316
3317 3317 def pcfmt(value, total):
3318 3318 if total:
3319 3319 return (value, 100 * float(value) / total)
3320 3320 else:
3321 3321 return value, 100.0
3322 3322
3323 3323 ui.write(('format : %d\n') % format)
3324 3324 ui.write(('flags : %s\n') % ', '.join(flags))
3325 3325
3326 3326 ui.write('\n')
3327 3327 fmt = pcfmtstr(totalsize)
3328 3328 fmt2 = dfmtstr(totalsize)
3329 3329 ui.write(('revisions : ') + fmt2 % numrevs)
3330 3330 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
3331 3331 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
3332 3332 ui.write(('revisions : ') + fmt2 % numrevs)
3333 3333 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
3334 3334 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
3335 3335 ui.write(('revision size : ') + fmt2 % totalsize)
3336 3336 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
3337 3337 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
3338 3338
3339 3339 ui.write('\n')
3340 3340 fmt = dfmtstr(max(avgchainlen, compratio))
3341 3341 ui.write(('avg chain length : ') + fmt % avgchainlen)
3342 3342 ui.write(('max chain length : ') + fmt % maxchainlen)
3343 3343 ui.write(('compression ratio : ') + fmt % compratio)
3344 3344
3345 3345 if format > 0:
3346 3346 ui.write('\n')
3347 3347 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
3348 3348 % tuple(datasize))
3349 3349 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
3350 3350 % tuple(fullsize))
3351 3351 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
3352 3352 % tuple(deltasize))
3353 3353
3354 3354 if numdeltas > 0:
3355 3355 ui.write('\n')
3356 3356 fmt = pcfmtstr(numdeltas)
3357 3357 fmt2 = pcfmtstr(numdeltas, 4)
3358 3358 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
3359 3359 if numprev > 0:
3360 3360 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
3361 3361 numprev))
3362 3362 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
3363 3363 numprev))
3364 3364 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
3365 3365 numprev))
3366 3366 if gdelta:
3367 3367 ui.write(('deltas against p1 : ')
3368 3368 + fmt % pcfmt(nump1, numdeltas))
3369 3369 ui.write(('deltas against p2 : ')
3370 3370 + fmt % pcfmt(nump2, numdeltas))
3371 3371 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
3372 3372 numdeltas))
3373 3373
3374 3374 @command('debugrevspec',
3375 3375 [('', 'optimize', None, _('print parsed tree after optimizing'))],
3376 3376 ('REVSPEC'))
3377 3377 def debugrevspec(ui, repo, expr, **opts):
3378 3378 """parse and apply a revision specification
3379 3379
3380 3380 Use --verbose to print the parsed tree before and after aliases
3381 3381 expansion.
3382 3382 """
3383 3383 if ui.verbose:
3384 3384 tree = revset.parse(expr, lookup=repo.__contains__)
3385 3385 ui.note(revset.prettyformat(tree), "\n")
3386 3386 newtree = revset.findaliases(ui, tree)
3387 3387 if newtree != tree:
3388 3388 ui.note(revset.prettyformat(newtree), "\n")
3389 3389 tree = newtree
3390 3390 newtree = revset.foldconcat(tree)
3391 3391 if newtree != tree:
3392 3392 ui.note(revset.prettyformat(newtree), "\n")
3393 3393 if opts["optimize"]:
3394 3394 weight, optimizedtree = revset.optimize(newtree, True)
3395 3395 ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
3396 3396 func = revset.match(ui, expr, repo)
3397 3397 revs = func(repo)
3398 3398 if ui.verbose:
3399 3399 ui.note("* set:\n", revset.prettyformatset(revs), "\n")
3400 3400 for c in revs:
3401 3401 ui.write("%s\n" % c)
3402 3402
3403 3403 @command('debugsetparents', [], _('REV1 [REV2]'))
3404 3404 def debugsetparents(ui, repo, rev1, rev2=None):
3405 3405 """manually set the parents of the current working directory
3406 3406
3407 3407 This is useful for writing repository conversion tools, but should
3408 3408 be used with care. For example, neither the working directory nor the
3409 3409 dirstate is updated, so file status may be incorrect after running this
3410 3410 command.
3411 3411
3412 3412 Returns 0 on success.
3413 3413 """
3414 3414
3415 3415 r1 = scmutil.revsingle(repo, rev1).node()
3416 3416 r2 = scmutil.revsingle(repo, rev2, 'null').node()
3417 3417
3418 3418 with repo.wlock():
3419 3419 repo.dirstate.beginparentchange()
3420 3420 repo.setparents(r1, r2)
3421 3421 repo.dirstate.endparentchange()
3422 3422
3423 3423 @command('debugdirstate|debugstate',
3424 3424 [('', 'nodates', None, _('do not display the saved mtime')),
3425 3425 ('', 'datesort', None, _('sort by saved mtime'))],
3426 3426 _('[OPTION]...'))
3427 3427 def debugstate(ui, repo, **opts):
3428 3428 """show the contents of the current dirstate"""
3429 3429
3430 3430 nodates = opts.get('nodates')
3431 3431 datesort = opts.get('datesort')
3432 3432
3433 3433 timestr = ""
3434 3434 if datesort:
3435 3435 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
3436 3436 else:
3437 3437 keyfunc = None # sort by filename
3438 3438 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
3439 3439 if ent[3] == -1:
3440 3440 timestr = 'unset '
3441 3441 elif nodates:
3442 3442 timestr = 'set '
3443 3443 else:
3444 3444 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
3445 3445 time.localtime(ent[3]))
3446 3446 if ent[1] & 0o20000:
3447 3447 mode = 'lnk'
3448 3448 else:
3449 3449 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
3450 3450 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
3451 3451 for f in repo.dirstate.copies():
3452 3452 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
3453 3453
3454 3454 @command('debugsub',
3455 3455 [('r', 'rev', '',
3456 3456 _('revision to check'), _('REV'))],
3457 3457 _('[-r REV] [REV]'))
3458 3458 def debugsub(ui, repo, rev=None):
3459 3459 ctx = scmutil.revsingle(repo, rev, None)
3460 3460 for k, v in sorted(ctx.substate.items()):
3461 3461 ui.write(('path %s\n') % k)
3462 3462 ui.write((' source %s\n') % v[0])
3463 3463 ui.write((' revision %s\n') % v[1])
3464 3464
3465 3465 @command('debugsuccessorssets',
3466 3466 [],
3467 3467 _('[REV]'))
3468 3468 def debugsuccessorssets(ui, repo, *revs):
3469 3469 """show set of successors for revision
3470 3470
3471 3471 A successors set of changeset A is a consistent group of revisions that
3472 3472 succeed A. It contains non-obsolete changesets only.
3473 3473
3474 3474 In most cases a changeset A has a single successors set containing a single
3475 3475 successor (changeset A replaced by A').
3476 3476
3477 3477 A changeset that is made obsolete with no successors are called "pruned".
3478 3478 Such changesets have no successors sets at all.
3479 3479
3480 3480 A changeset that has been "split" will have a successors set containing
3481 3481 more than one successor.
3482 3482
3483 3483 A changeset that has been rewritten in multiple different ways is called
3484 3484 "divergent". Such changesets have multiple successor sets (each of which
3485 3485 may also be split, i.e. have multiple successors).
3486 3486
3487 3487 Results are displayed as follows::
3488 3488
3489 3489 <rev1>
3490 3490 <successors-1A>
3491 3491 <rev2>
3492 3492 <successors-2A>
3493 3493 <successors-2B1> <successors-2B2> <successors-2B3>
3494 3494
3495 3495 Here rev2 has two possible (i.e. divergent) successors sets. The first
3496 3496 holds one element, whereas the second holds three (i.e. the changeset has
3497 3497 been split).
3498 3498 """
3499 3499 # passed to successorssets caching computation from one call to another
3500 3500 cache = {}
3501 3501 ctx2str = str
3502 3502 node2str = short
3503 3503 if ui.debug():
3504 3504 def ctx2str(ctx):
3505 3505 return ctx.hex()
3506 3506 node2str = hex
3507 3507 for rev in scmutil.revrange(repo, revs):
3508 3508 ctx = repo[rev]
3509 3509 ui.write('%s\n'% ctx2str(ctx))
3510 3510 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
3511 3511 if succsset:
3512 3512 ui.write(' ')
3513 3513 ui.write(node2str(succsset[0]))
3514 3514 for node in succsset[1:]:
3515 3515 ui.write(' ')
3516 3516 ui.write(node2str(node))
3517 3517 ui.write('\n')
3518 3518
3519 3519 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True)
3520 3520 def debugwalk(ui, repo, *pats, **opts):
3521 3521 """show how files match on given patterns"""
3522 3522 m = scmutil.match(repo[None], pats, opts)
3523 3523 items = list(repo.walk(m))
3524 3524 if not items:
3525 3525 return
3526 3526 f = lambda fn: fn
3527 3527 if ui.configbool('ui', 'slash') and os.sep != '/':
3528 3528 f = lambda fn: util.normpath(fn)
3529 3529 fmt = 'f %%-%ds %%-%ds %%s' % (
3530 3530 max([len(abs) for abs in items]),
3531 3531 max([len(m.rel(abs)) for abs in items]))
3532 3532 for abs in items:
3533 3533 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
3534 3534 ui.write("%s\n" % line.rstrip())
3535 3535
3536 3536 @command('debugwireargs',
3537 3537 [('', 'three', '', 'three'),
3538 3538 ('', 'four', '', 'four'),
3539 3539 ('', 'five', '', 'five'),
3540 3540 ] + remoteopts,
3541 3541 _('REPO [OPTIONS]... [ONE [TWO]]'),
3542 3542 norepo=True)
3543 3543 def debugwireargs(ui, repopath, *vals, **opts):
3544 3544 repo = hg.peer(ui, opts, repopath)
3545 3545 for opt in remoteopts:
3546 3546 del opts[opt[1]]
3547 3547 args = {}
3548 3548 for k, v in opts.iteritems():
3549 3549 if v:
3550 3550 args[k] = v
3551 3551 # run twice to check that we don't mess up the stream for the next command
3552 3552 res1 = repo.debugwireargs(*vals, **args)
3553 3553 res2 = repo.debugwireargs(*vals, **args)
3554 3554 ui.write("%s\n" % res1)
3555 3555 if res1 != res2:
3556 3556 ui.warn("%s\n" % res2)
3557 3557
3558 3558 @command('^diff',
3559 3559 [('r', 'rev', [], _('revision'), _('REV')),
3560 3560 ('c', 'change', '', _('change made by revision'), _('REV'))
3561 3561 ] + diffopts + diffopts2 + walkopts + subrepoopts,
3562 3562 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
3563 3563 inferrepo=True)
3564 3564 def diff(ui, repo, *pats, **opts):
3565 3565 """diff repository (or selected files)
3566 3566
3567 3567 Show differences between revisions for the specified files.
3568 3568
3569 3569 Differences between files are shown using the unified diff format.
3570 3570
3571 3571 .. note::
3572 3572
3573 3573 :hg:`diff` may generate unexpected results for merges, as it will
3574 3574 default to comparing against the working directory's first
3575 3575 parent changeset if no revisions are specified.
3576 3576
3577 3577 When two revision arguments are given, then changes are shown
3578 3578 between those revisions. If only one revision is specified then
3579 3579 that revision is compared to the working directory, and, when no
3580 3580 revisions are specified, the working directory files are compared
3581 3581 to its first parent.
3582 3582
3583 3583 Alternatively you can specify -c/--change with a revision to see
3584 3584 the changes in that changeset relative to its first parent.
3585 3585
3586 3586 Without the -a/--text option, diff will avoid generating diffs of
3587 3587 files it detects as binary. With -a, diff will generate a diff
3588 3588 anyway, probably with undesirable results.
3589 3589
3590 3590 Use the -g/--git option to generate diffs in the git extended diff
3591 3591 format. For more information, read :hg:`help diffs`.
3592 3592
3593 3593 .. container:: verbose
3594 3594
3595 3595 Examples:
3596 3596
3597 3597 - compare a file in the current working directory to its parent::
3598 3598
3599 3599 hg diff foo.c
3600 3600
3601 3601 - compare two historical versions of a directory, with rename info::
3602 3602
3603 3603 hg diff --git -r 1.0:1.2 lib/
3604 3604
3605 3605 - get change stats relative to the last change on some date::
3606 3606
3607 3607 hg diff --stat -r "date('may 2')"
3608 3608
3609 3609 - diff all newly-added files that contain a keyword::
3610 3610
3611 3611 hg diff "set:added() and grep(GNU)"
3612 3612
3613 3613 - compare a revision and its parents::
3614 3614
3615 3615 hg diff -c 9353 # compare against first parent
3616 3616 hg diff -r 9353^:9353 # same using revset syntax
3617 3617 hg diff -r 9353^2:9353 # compare against the second parent
3618 3618
3619 3619 Returns 0 on success.
3620 3620 """
3621 3621
3622 3622 revs = opts.get('rev')
3623 3623 change = opts.get('change')
3624 3624 stat = opts.get('stat')
3625 3625 reverse = opts.get('reverse')
3626 3626
3627 3627 if revs and change:
3628 3628 msg = _('cannot specify --rev and --change at the same time')
3629 3629 raise error.Abort(msg)
3630 3630 elif change:
3631 3631 node2 = scmutil.revsingle(repo, change, None).node()
3632 3632 node1 = repo[node2].p1().node()
3633 3633 else:
3634 3634 node1, node2 = scmutil.revpair(repo, revs)
3635 3635
3636 3636 if reverse:
3637 3637 node1, node2 = node2, node1
3638 3638
3639 3639 diffopts = patch.diffallopts(ui, opts)
3640 3640 m = scmutil.match(repo[node2], pats, opts)
3641 3641 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
3642 3642 listsubrepos=opts.get('subrepos'),
3643 3643 root=opts.get('root'))
3644 3644
3645 3645 @command('^export',
3646 3646 [('o', 'output', '',
3647 3647 _('print output to file with formatted name'), _('FORMAT')),
3648 3648 ('', 'switch-parent', None, _('diff against the second parent')),
3649 3649 ('r', 'rev', [], _('revisions to export'), _('REV')),
3650 3650 ] + diffopts,
3651 3651 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
3652 3652 def export(ui, repo, *changesets, **opts):
3653 3653 """dump the header and diffs for one or more changesets
3654 3654
3655 3655 Print the changeset header and diffs for one or more revisions.
3656 3656 If no revision is given, the parent of the working directory is used.
3657 3657
3658 3658 The information shown in the changeset header is: author, date,
3659 3659 branch name (if non-default), changeset hash, parent(s) and commit
3660 3660 comment.
3661 3661
3662 3662 .. note::
3663 3663
3664 3664 :hg:`export` may generate unexpected diff output for merge
3665 3665 changesets, as it will compare the merge changeset against its
3666 3666 first parent only.
3667 3667
3668 3668 Output may be to a file, in which case the name of the file is
3669 3669 given using a format string. The formatting rules are as follows:
3670 3670
3671 3671 :``%%``: literal "%" character
3672 3672 :``%H``: changeset hash (40 hexadecimal digits)
3673 3673 :``%N``: number of patches being generated
3674 3674 :``%R``: changeset revision number
3675 3675 :``%b``: basename of the exporting repository
3676 3676 :``%h``: short-form changeset hash (12 hexadecimal digits)
3677 3677 :``%m``: first line of the commit message (only alphanumeric characters)
3678 3678 :``%n``: zero-padded sequence number, starting at 1
3679 3679 :``%r``: zero-padded changeset revision number
3680 3680
3681 3681 Without the -a/--text option, export will avoid generating diffs
3682 3682 of files it detects as binary. With -a, export will generate a
3683 3683 diff anyway, probably with undesirable results.
3684 3684
3685 3685 Use the -g/--git option to generate diffs in the git extended diff
3686 3686 format. See :hg:`help diffs` for more information.
3687 3687
3688 3688 With the --switch-parent option, the diff will be against the
3689 3689 second parent. It can be useful to review a merge.
3690 3690
3691 3691 .. container:: verbose
3692 3692
3693 3693 Examples:
3694 3694
3695 3695 - use export and import to transplant a bugfix to the current
3696 3696 branch::
3697 3697
3698 3698 hg export -r 9353 | hg import -
3699 3699
3700 3700 - export all the changesets between two revisions to a file with
3701 3701 rename information::
3702 3702
3703 3703 hg export --git -r 123:150 > changes.txt
3704 3704
3705 3705 - split outgoing changes into a series of patches with
3706 3706 descriptive names::
3707 3707
3708 3708 hg export -r "outgoing()" -o "%n-%m.patch"
3709 3709
3710 3710 Returns 0 on success.
3711 3711 """
3712 3712 changesets += tuple(opts.get('rev', []))
3713 3713 if not changesets:
3714 3714 changesets = ['.']
3715 3715 revs = scmutil.revrange(repo, changesets)
3716 3716 if not revs:
3717 3717 raise error.Abort(_("export requires at least one changeset"))
3718 3718 if len(revs) > 1:
3719 3719 ui.note(_('exporting patches:\n'))
3720 3720 else:
3721 3721 ui.note(_('exporting patch:\n'))
3722 3722 cmdutil.export(repo, revs, template=opts.get('output'),
3723 3723 switch_parent=opts.get('switch_parent'),
3724 3724 opts=patch.diffallopts(ui, opts))
3725 3725
3726 3726 @command('files',
3727 3727 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3728 3728 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3729 3729 ] + walkopts + formatteropts + subrepoopts,
3730 3730 _('[OPTION]... [PATTERN]...'))
3731 3731 def files(ui, repo, *pats, **opts):
3732 3732 """list tracked files
3733 3733
3734 3734 Print files under Mercurial control in the working directory or
3735 3735 specified revision whose names match the given patterns (excluding
3736 3736 removed files).
3737 3737
3738 3738 If no patterns are given to match, this command prints the names
3739 3739 of all files under Mercurial control in the working directory.
3740 3740
3741 3741 .. container:: verbose
3742 3742
3743 3743 Examples:
3744 3744
3745 3745 - list all files under the current directory::
3746 3746
3747 3747 hg files .
3748 3748
3749 3749 - shows sizes and flags for current revision::
3750 3750
3751 3751 hg files -vr .
3752 3752
3753 3753 - list all files named README::
3754 3754
3755 3755 hg files -I "**/README"
3756 3756
3757 3757 - list all binary files::
3758 3758
3759 3759 hg files "set:binary()"
3760 3760
3761 3761 - find files containing a regular expression::
3762 3762
3763 3763 hg files "set:grep('bob')"
3764 3764
3765 3765 - search tracked file contents with xargs and grep::
3766 3766
3767 3767 hg files -0 | xargs -0 grep foo
3768 3768
3769 3769 See :hg:`help patterns` and :hg:`help filesets` for more information
3770 3770 on specifying file patterns.
3771 3771
3772 3772 Returns 0 if a match is found, 1 otherwise.
3773 3773
3774 3774 """
3775 3775 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3776 3776
3777 3777 end = '\n'
3778 3778 if opts.get('print0'):
3779 3779 end = '\0'
3780 3780 fm = ui.formatter('files', opts)
3781 3781 fmt = '%s' + end
3782 3782
3783 3783 m = scmutil.match(ctx, pats, opts)
3784 3784 ret = cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
3785 3785
3786 3786 fm.end()
3787 3787
3788 3788 return ret
3789 3789
3790 3790 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
3791 3791 def forget(ui, repo, *pats, **opts):
3792 3792 """forget the specified files on the next commit
3793 3793
3794 3794 Mark the specified files so they will no longer be tracked
3795 3795 after the next commit.
3796 3796
3797 3797 This only removes files from the current branch, not from the
3798 3798 entire project history, and it does not delete them from the
3799 3799 working directory.
3800 3800
3801 3801 To delete the file from the working directory, see :hg:`remove`.
3802 3802
3803 3803 To undo a forget before the next commit, see :hg:`add`.
3804 3804
3805 3805 .. container:: verbose
3806 3806
3807 3807 Examples:
3808 3808
3809 3809 - forget newly-added binary files::
3810 3810
3811 3811 hg forget "set:added() and binary()"
3812 3812
3813 3813 - forget files that would be excluded by .hgignore::
3814 3814
3815 3815 hg forget "set:hgignore()"
3816 3816
3817 3817 Returns 0 on success.
3818 3818 """
3819 3819
3820 3820 if not pats:
3821 3821 raise error.Abort(_('no files specified'))
3822 3822
3823 3823 m = scmutil.match(repo[None], pats, opts)
3824 3824 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3825 3825 return rejected and 1 or 0
3826 3826
3827 3827 @command(
3828 3828 'graft',
3829 3829 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3830 3830 ('c', 'continue', False, _('resume interrupted graft')),
3831 3831 ('e', 'edit', False, _('invoke editor on commit messages')),
3832 3832 ('', 'log', None, _('append graft info to log message')),
3833 3833 ('f', 'force', False, _('force graft')),
3834 3834 ('D', 'currentdate', False,
3835 3835 _('record the current date as commit date')),
3836 3836 ('U', 'currentuser', False,
3837 3837 _('record the current user as committer'), _('DATE'))]
3838 3838 + commitopts2 + mergetoolopts + dryrunopts,
3839 3839 _('[OPTION]... [-r REV]... REV...'))
3840 3840 def graft(ui, repo, *revs, **opts):
3841 3841 '''copy changes from other branches onto the current branch
3842 3842
3843 3843 This command uses Mercurial's merge logic to copy individual
3844 3844 changes from other branches without merging branches in the
3845 3845 history graph. This is sometimes known as 'backporting' or
3846 3846 'cherry-picking'. By default, graft will copy user, date, and
3847 3847 description from the source changesets.
3848 3848
3849 3849 Changesets that are ancestors of the current revision, that have
3850 3850 already been grafted, or that are merges will be skipped.
3851 3851
3852 3852 If --log is specified, log messages will have a comment appended
3853 3853 of the form::
3854 3854
3855 3855 (grafted from CHANGESETHASH)
3856 3856
3857 3857 If --force is specified, revisions will be grafted even if they
3858 3858 are already ancestors of or have been grafted to the destination.
3859 3859 This is useful when the revisions have since been backed out.
3860 3860
3861 3861 If a graft merge results in conflicts, the graft process is
3862 3862 interrupted so that the current merge can be manually resolved.
3863 3863 Once all conflicts are addressed, the graft process can be
3864 3864 continued with the -c/--continue option.
3865 3865
3866 3866 .. note::
3867 3867
3868 3868 The -c/--continue option does not reapply earlier options, except
3869 3869 for --force.
3870 3870
3871 3871 .. container:: verbose
3872 3872
3873 3873 Examples:
3874 3874
3875 3875 - copy a single change to the stable branch and edit its description::
3876 3876
3877 3877 hg update stable
3878 3878 hg graft --edit 9393
3879 3879
3880 3880 - graft a range of changesets with one exception, updating dates::
3881 3881
3882 3882 hg graft -D "2085::2093 and not 2091"
3883 3883
3884 3884 - continue a graft after resolving conflicts::
3885 3885
3886 3886 hg graft -c
3887 3887
3888 3888 - show the source of a grafted changeset::
3889 3889
3890 3890 hg log --debug -r .
3891 3891
3892 3892 - show revisions sorted by date::
3893 3893
3894 3894 hg log -r 'sort(all(), date)'
3895 3895
3896 3896 See :hg:`help revisions` and :hg:`help revsets` for more about
3897 3897 specifying revisions.
3898 3898
3899 3899 Returns 0 on successful completion.
3900 3900 '''
3901 3901 with repo.wlock():
3902 3902 return _dograft(ui, repo, *revs, **opts)
3903 3903
3904 3904 def _dograft(ui, repo, *revs, **opts):
3905 3905 if revs and opts['rev']:
3906 3906 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
3907 3907 'revision ordering!\n'))
3908 3908
3909 3909 revs = list(revs)
3910 3910 revs.extend(opts['rev'])
3911 3911
3912 3912 if not opts.get('user') and opts.get('currentuser'):
3913 3913 opts['user'] = ui.username()
3914 3914 if not opts.get('date') and opts.get('currentdate'):
3915 3915 opts['date'] = "%d %d" % util.makedate()
3916 3916
3917 3917 editor = cmdutil.getcommiteditor(editform='graft', **opts)
3918 3918
3919 3919 cont = False
3920 3920 if opts['continue']:
3921 3921 cont = True
3922 3922 if revs:
3923 3923 raise error.Abort(_("can't specify --continue and revisions"))
3924 3924 # read in unfinished revisions
3925 3925 try:
3926 3926 nodes = repo.vfs.read('graftstate').splitlines()
3927 3927 revs = [repo[node].rev() for node in nodes]
3928 3928 except IOError as inst:
3929 3929 if inst.errno != errno.ENOENT:
3930 3930 raise
3931 3931 raise error.Abort(_("no graft state found, can't continue"))
3932 3932 else:
3933 3933 cmdutil.checkunfinished(repo)
3934 3934 cmdutil.bailifchanged(repo)
3935 3935 if not revs:
3936 3936 raise error.Abort(_('no revisions specified'))
3937 3937 revs = scmutil.revrange(repo, revs)
3938 3938
3939 3939 skipped = set()
3940 3940 # check for merges
3941 3941 for rev in repo.revs('%ld and merge()', revs):
3942 3942 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3943 3943 skipped.add(rev)
3944 3944 revs = [r for r in revs if r not in skipped]
3945 3945 if not revs:
3946 3946 return -1
3947 3947
3948 3948 # Don't check in the --continue case, in effect retaining --force across
3949 3949 # --continues. That's because without --force, any revisions we decided to
3950 3950 # skip would have been filtered out here, so they wouldn't have made their
3951 3951 # way to the graftstate. With --force, any revisions we would have otherwise
3952 3952 # skipped would not have been filtered out, and if they hadn't been applied
3953 3953 # already, they'd have been in the graftstate.
3954 3954 if not (cont or opts.get('force')):
3955 3955 # check for ancestors of dest branch
3956 3956 crev = repo['.'].rev()
3957 3957 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3958 3958 # Cannot use x.remove(y) on smart set, this has to be a list.
3959 3959 # XXX make this lazy in the future
3960 3960 revs = list(revs)
3961 3961 # don't mutate while iterating, create a copy
3962 3962 for rev in list(revs):
3963 3963 if rev in ancestors:
3964 3964 ui.warn(_('skipping ancestor revision %d:%s\n') %
3965 3965 (rev, repo[rev]))
3966 3966 # XXX remove on list is slow
3967 3967 revs.remove(rev)
3968 3968 if not revs:
3969 3969 return -1
3970 3970
3971 3971 # analyze revs for earlier grafts
3972 3972 ids = {}
3973 3973 for ctx in repo.set("%ld", revs):
3974 3974 ids[ctx.hex()] = ctx.rev()
3975 3975 n = ctx.extra().get('source')
3976 3976 if n:
3977 3977 ids[n] = ctx.rev()
3978 3978
3979 3979 # check ancestors for earlier grafts
3980 3980 ui.debug('scanning for duplicate grafts\n')
3981 3981
3982 3982 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3983 3983 ctx = repo[rev]
3984 3984 n = ctx.extra().get('source')
3985 3985 if n in ids:
3986 3986 try:
3987 3987 r = repo[n].rev()
3988 3988 except error.RepoLookupError:
3989 3989 r = None
3990 3990 if r in revs:
3991 3991 ui.warn(_('skipping revision %d:%s '
3992 3992 '(already grafted to %d:%s)\n')
3993 3993 % (r, repo[r], rev, ctx))
3994 3994 revs.remove(r)
3995 3995 elif ids[n] in revs:
3996 3996 if r is None:
3997 3997 ui.warn(_('skipping already grafted revision %d:%s '
3998 3998 '(%d:%s also has unknown origin %s)\n')
3999 3999 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
4000 4000 else:
4001 4001 ui.warn(_('skipping already grafted revision %d:%s '
4002 4002 '(%d:%s also has origin %d:%s)\n')
4003 4003 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
4004 4004 revs.remove(ids[n])
4005 4005 elif ctx.hex() in ids:
4006 4006 r = ids[ctx.hex()]
4007 4007 ui.warn(_('skipping already grafted revision %d:%s '
4008 4008 '(was grafted from %d:%s)\n') %
4009 4009 (r, repo[r], rev, ctx))
4010 4010 revs.remove(r)
4011 4011 if not revs:
4012 4012 return -1
4013 4013
4014 4014 for pos, ctx in enumerate(repo.set("%ld", revs)):
4015 4015 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
4016 4016 ctx.description().split('\n', 1)[0])
4017 4017 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
4018 4018 if names:
4019 4019 desc += ' (%s)' % ' '.join(names)
4020 4020 ui.status(_('grafting %s\n') % desc)
4021 4021 if opts.get('dry_run'):
4022 4022 continue
4023 4023
4024 4024 source = ctx.extra().get('source')
4025 4025 extra = {}
4026 4026 if source:
4027 4027 extra['source'] = source
4028 4028 extra['intermediate-source'] = ctx.hex()
4029 4029 else:
4030 4030 extra['source'] = ctx.hex()
4031 4031 user = ctx.user()
4032 4032 if opts.get('user'):
4033 4033 user = opts['user']
4034 4034 date = ctx.date()
4035 4035 if opts.get('date'):
4036 4036 date = opts['date']
4037 4037 message = ctx.description()
4038 4038 if opts.get('log'):
4039 4039 message += '\n(grafted from %s)' % ctx.hex()
4040 4040
4041 4041 # we don't merge the first commit when continuing
4042 4042 if not cont:
4043 4043 # perform the graft merge with p1(rev) as 'ancestor'
4044 4044 try:
4045 4045 # ui.forcemerge is an internal variable, do not document
4046 4046 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4047 4047 'graft')
4048 4048 stats = mergemod.graft(repo, ctx, ctx.p1(),
4049 4049 ['local', 'graft'])
4050 4050 finally:
4051 4051 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
4052 4052 # report any conflicts
4053 4053 if stats and stats[3] > 0:
4054 4054 # write out state for --continue
4055 4055 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
4056 4056 repo.vfs.write('graftstate', ''.join(nodelines))
4057 4057 extra = ''
4058 4058 if opts.get('user'):
4059 4059 extra += ' --user %s' % opts['user']
4060 4060 if opts.get('date'):
4061 4061 extra += ' --date %s' % opts['date']
4062 4062 if opts.get('log'):
4063 4063 extra += ' --log'
4064 4064 hint=_('use hg resolve and hg graft --continue%s') % extra
4065 4065 raise error.Abort(
4066 4066 _("unresolved conflicts, can't continue"),
4067 4067 hint=hint)
4068 4068 else:
4069 4069 cont = False
4070 4070
4071 4071 # commit
4072 4072 node = repo.commit(text=message, user=user,
4073 4073 date=date, extra=extra, editor=editor)
4074 4074 if node is None:
4075 4075 ui.warn(
4076 4076 _('note: graft of %d:%s created no changes to commit\n') %
4077 4077 (ctx.rev(), ctx))
4078 4078
4079 4079 # remove state when we complete successfully
4080 4080 if not opts.get('dry_run'):
4081 4081 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
4082 4082
4083 4083 return 0
4084 4084
4085 4085 @command('grep',
4086 4086 [('0', 'print0', None, _('end fields with NUL')),
4087 4087 ('', 'all', None, _('print all revisions that match')),
4088 4088 ('a', 'text', None, _('treat all files as text')),
4089 4089 ('f', 'follow', None,
4090 4090 _('follow changeset history,'
4091 4091 ' or file history across copies and renames')),
4092 4092 ('i', 'ignore-case', None, _('ignore case when matching')),
4093 4093 ('l', 'files-with-matches', None,
4094 4094 _('print only filenames and revisions that match')),
4095 4095 ('n', 'line-number', None, _('print matching line numbers')),
4096 4096 ('r', 'rev', [],
4097 4097 _('only search files changed within revision range'), _('REV')),
4098 4098 ('u', 'user', None, _('list the author (long with -v)')),
4099 4099 ('d', 'date', None, _('list the date (short with -q)')),
4100 4100 ] + walkopts,
4101 4101 _('[OPTION]... PATTERN [FILE]...'),
4102 4102 inferrepo=True)
4103 4103 def grep(ui, repo, pattern, *pats, **opts):
4104 4104 """search for a pattern in specified files and revisions
4105 4105
4106 4106 Search revisions of files for a regular expression.
4107 4107
4108 4108 This command behaves differently than Unix grep. It only accepts
4109 4109 Python/Perl regexps. It searches repository history, not the
4110 4110 working directory. It always prints the revision number in which a
4111 4111 match appears.
4112 4112
4113 4113 By default, grep only prints output for the first revision of a
4114 4114 file in which it finds a match. To get it to print every revision
4115 4115 that contains a change in match status ("-" for a match that
4116 4116 becomes a non-match, or "+" for a non-match that becomes a match),
4117 4117 use the --all flag.
4118 4118
4119 4119 Returns 0 if a match is found, 1 otherwise.
4120 4120 """
4121 4121 reflags = re.M
4122 4122 if opts.get('ignore_case'):
4123 4123 reflags |= re.I
4124 4124 try:
4125 4125 regexp = util.re.compile(pattern, reflags)
4126 4126 except re.error as inst:
4127 4127 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
4128 4128 return 1
4129 4129 sep, eol = ':', '\n'
4130 4130 if opts.get('print0'):
4131 4131 sep = eol = '\0'
4132 4132
4133 4133 getfile = util.lrucachefunc(repo.file)
4134 4134
4135 4135 def matchlines(body):
4136 4136 begin = 0
4137 4137 linenum = 0
4138 4138 while begin < len(body):
4139 4139 match = regexp.search(body, begin)
4140 4140 if not match:
4141 4141 break
4142 4142 mstart, mend = match.span()
4143 4143 linenum += body.count('\n', begin, mstart) + 1
4144 4144 lstart = body.rfind('\n', begin, mstart) + 1 or begin
4145 4145 begin = body.find('\n', mend) + 1 or len(body) + 1
4146 4146 lend = begin - 1
4147 4147 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
4148 4148
4149 4149 class linestate(object):
4150 4150 def __init__(self, line, linenum, colstart, colend):
4151 4151 self.line = line
4152 4152 self.linenum = linenum
4153 4153 self.colstart = colstart
4154 4154 self.colend = colend
4155 4155
4156 4156 def __hash__(self):
4157 4157 return hash((self.linenum, self.line))
4158 4158
4159 4159 def __eq__(self, other):
4160 4160 return self.line == other.line
4161 4161
4162 4162 def __iter__(self):
4163 4163 yield (self.line[:self.colstart], '')
4164 4164 yield (self.line[self.colstart:self.colend], 'grep.match')
4165 4165 rest = self.line[self.colend:]
4166 4166 while rest != '':
4167 4167 match = regexp.search(rest)
4168 4168 if not match:
4169 4169 yield (rest, '')
4170 4170 break
4171 4171 mstart, mend = match.span()
4172 4172 yield (rest[:mstart], '')
4173 4173 yield (rest[mstart:mend], 'grep.match')
4174 4174 rest = rest[mend:]
4175 4175
4176 4176 matches = {}
4177 4177 copies = {}
4178 4178 def grepbody(fn, rev, body):
4179 4179 matches[rev].setdefault(fn, [])
4180 4180 m = matches[rev][fn]
4181 4181 for lnum, cstart, cend, line in matchlines(body):
4182 4182 s = linestate(line, lnum, cstart, cend)
4183 4183 m.append(s)
4184 4184
4185 4185 def difflinestates(a, b):
4186 4186 sm = difflib.SequenceMatcher(None, a, b)
4187 4187 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
4188 4188 if tag == 'insert':
4189 4189 for i in xrange(blo, bhi):
4190 4190 yield ('+', b[i])
4191 4191 elif tag == 'delete':
4192 4192 for i in xrange(alo, ahi):
4193 4193 yield ('-', a[i])
4194 4194 elif tag == 'replace':
4195 4195 for i in xrange(alo, ahi):
4196 4196 yield ('-', a[i])
4197 4197 for i in xrange(blo, bhi):
4198 4198 yield ('+', b[i])
4199 4199
4200 4200 def display(fn, ctx, pstates, states):
4201 4201 rev = ctx.rev()
4202 4202 if ui.quiet:
4203 4203 datefunc = util.shortdate
4204 4204 else:
4205 4205 datefunc = util.datestr
4206 4206 found = False
4207 4207 @util.cachefunc
4208 4208 def binary():
4209 4209 flog = getfile(fn)
4210 4210 return util.binary(flog.read(ctx.filenode(fn)))
4211 4211
4212 4212 if opts.get('all'):
4213 4213 iter = difflinestates(pstates, states)
4214 4214 else:
4215 4215 iter = [('', l) for l in states]
4216 4216 for change, l in iter:
4217 4217 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
4218 4218
4219 4219 if opts.get('line_number'):
4220 4220 cols.append((str(l.linenum), 'grep.linenumber'))
4221 4221 if opts.get('all'):
4222 4222 cols.append((change, 'grep.change'))
4223 4223 if opts.get('user'):
4224 4224 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
4225 4225 if opts.get('date'):
4226 4226 cols.append((datefunc(ctx.date()), 'grep.date'))
4227 4227 for col, label in cols[:-1]:
4228 4228 ui.write(col, label=label)
4229 4229 ui.write(sep, label='grep.sep')
4230 4230 ui.write(cols[-1][0], label=cols[-1][1])
4231 4231 if not opts.get('files_with_matches'):
4232 4232 ui.write(sep, label='grep.sep')
4233 4233 if not opts.get('text') and binary():
4234 4234 ui.write(" Binary file matches")
4235 4235 else:
4236 4236 for s, label in l:
4237 4237 ui.write(s, label=label)
4238 4238 ui.write(eol)
4239 4239 found = True
4240 4240 if opts.get('files_with_matches'):
4241 4241 break
4242 4242 return found
4243 4243
4244 4244 skip = {}
4245 4245 revfiles = {}
4246 4246 matchfn = scmutil.match(repo[None], pats, opts)
4247 4247 found = False
4248 4248 follow = opts.get('follow')
4249 4249
4250 4250 def prep(ctx, fns):
4251 4251 rev = ctx.rev()
4252 4252 pctx = ctx.p1()
4253 4253 parent = pctx.rev()
4254 4254 matches.setdefault(rev, {})
4255 4255 matches.setdefault(parent, {})
4256 4256 files = revfiles.setdefault(rev, [])
4257 4257 for fn in fns:
4258 4258 flog = getfile(fn)
4259 4259 try:
4260 4260 fnode = ctx.filenode(fn)
4261 4261 except error.LookupError:
4262 4262 continue
4263 4263
4264 4264 copied = flog.renamed(fnode)
4265 4265 copy = follow and copied and copied[0]
4266 4266 if copy:
4267 4267 copies.setdefault(rev, {})[fn] = copy
4268 4268 if fn in skip:
4269 4269 if copy:
4270 4270 skip[copy] = True
4271 4271 continue
4272 4272 files.append(fn)
4273 4273
4274 4274 if fn not in matches[rev]:
4275 4275 grepbody(fn, rev, flog.read(fnode))
4276 4276
4277 4277 pfn = copy or fn
4278 4278 if pfn not in matches[parent]:
4279 4279 try:
4280 4280 fnode = pctx.filenode(pfn)
4281 4281 grepbody(pfn, parent, flog.read(fnode))
4282 4282 except error.LookupError:
4283 4283 pass
4284 4284
4285 4285 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
4286 4286 rev = ctx.rev()
4287 4287 parent = ctx.p1().rev()
4288 4288 for fn in sorted(revfiles.get(rev, [])):
4289 4289 states = matches[rev][fn]
4290 4290 copy = copies.get(rev, {}).get(fn)
4291 4291 if fn in skip:
4292 4292 if copy:
4293 4293 skip[copy] = True
4294 4294 continue
4295 4295 pstates = matches.get(parent, {}).get(copy or fn, [])
4296 4296 if pstates or states:
4297 4297 r = display(fn, ctx, pstates, states)
4298 4298 found = found or r
4299 4299 if r and not opts.get('all'):
4300 4300 skip[fn] = True
4301 4301 if copy:
4302 4302 skip[copy] = True
4303 4303 del matches[rev]
4304 4304 del revfiles[rev]
4305 4305
4306 4306 return not found
4307 4307
4308 4308 @command('heads',
4309 4309 [('r', 'rev', '',
4310 4310 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
4311 4311 ('t', 'topo', False, _('show topological heads only')),
4312 4312 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
4313 4313 ('c', 'closed', False, _('show normal and closed branch heads')),
4314 4314 ] + templateopts,
4315 4315 _('[-ct] [-r STARTREV] [REV]...'))
4316 4316 def heads(ui, repo, *branchrevs, **opts):
4317 4317 """show branch heads
4318 4318
4319 4319 With no arguments, show all open branch heads in the repository.
4320 4320 Branch heads are changesets that have no descendants on the
4321 4321 same branch. They are where development generally takes place and
4322 4322 are the usual targets for update and merge operations.
4323 4323
4324 4324 If one or more REVs are given, only open branch heads on the
4325 4325 branches associated with the specified changesets are shown. This
4326 4326 means that you can use :hg:`heads .` to see the heads on the
4327 4327 currently checked-out branch.
4328 4328
4329 4329 If -c/--closed is specified, also show branch heads marked closed
4330 4330 (see :hg:`commit --close-branch`).
4331 4331
4332 4332 If STARTREV is specified, only those heads that are descendants of
4333 4333 STARTREV will be displayed.
4334 4334
4335 4335 If -t/--topo is specified, named branch mechanics will be ignored and only
4336 4336 topological heads (changesets with no children) will be shown.
4337 4337
4338 4338 Returns 0 if matching heads are found, 1 if not.
4339 4339 """
4340 4340
4341 4341 start = None
4342 4342 if 'rev' in opts:
4343 4343 start = scmutil.revsingle(repo, opts['rev'], None).node()
4344 4344
4345 4345 if opts.get('topo'):
4346 4346 heads = [repo[h] for h in repo.heads(start)]
4347 4347 else:
4348 4348 heads = []
4349 4349 for branch in repo.branchmap():
4350 4350 heads += repo.branchheads(branch, start, opts.get('closed'))
4351 4351 heads = [repo[h] for h in heads]
4352 4352
4353 4353 if branchrevs:
4354 4354 branches = set(repo[br].branch() for br in branchrevs)
4355 4355 heads = [h for h in heads if h.branch() in branches]
4356 4356
4357 4357 if opts.get('active') and branchrevs:
4358 4358 dagheads = repo.heads(start)
4359 4359 heads = [h for h in heads if h.node() in dagheads]
4360 4360
4361 4361 if branchrevs:
4362 4362 haveheads = set(h.branch() for h in heads)
4363 4363 if branches - haveheads:
4364 4364 headless = ', '.join(b for b in branches - haveheads)
4365 4365 msg = _('no open branch heads found on branches %s')
4366 4366 if opts.get('rev'):
4367 4367 msg += _(' (started at %s)') % opts['rev']
4368 4368 ui.warn((msg + '\n') % headless)
4369 4369
4370 4370 if not heads:
4371 4371 return 1
4372 4372
4373 4373 heads = sorted(heads, key=lambda x: -x.rev())
4374 4374 displayer = cmdutil.show_changeset(ui, repo, opts)
4375 4375 for ctx in heads:
4376 4376 displayer.show(ctx)
4377 4377 displayer.close()
4378 4378
4379 4379 @command('help',
4380 4380 [('e', 'extension', None, _('show only help for extensions')),
4381 4381 ('c', 'command', None, _('show only help for commands')),
4382 4382 ('k', 'keyword', None, _('show topics matching keyword')),
4383 4383 ('s', 'system', [], _('show help for specific platform(s)')),
4384 4384 ],
4385 4385 _('[-ecks] [TOPIC]'),
4386 4386 norepo=True)
4387 4387 def help_(ui, name=None, **opts):
4388 4388 """show help for a given topic or a help overview
4389 4389
4390 4390 With no arguments, print a list of commands with short help messages.
4391 4391
4392 4392 Given a topic, extension, or command name, print help for that
4393 4393 topic.
4394 4394
4395 4395 Returns 0 if successful.
4396 4396 """
4397 4397
4398 4398 textwidth = min(ui.termwidth(), 80) - 2
4399 4399
4400 4400 keep = opts.get('system') or []
4401 4401 if len(keep) == 0:
4402 4402 if sys.platform.startswith('win'):
4403 4403 keep.append('windows')
4404 4404 elif sys.platform == 'OpenVMS':
4405 4405 keep.append('vms')
4406 4406 elif sys.platform == 'plan9':
4407 4407 keep.append('plan9')
4408 4408 else:
4409 4409 keep.append('unix')
4410 4410 keep.append(sys.platform.lower())
4411 4411 if ui.verbose:
4412 4412 keep.append('verbose')
4413 4413
4414 4414 section = None
4415 4415 subtopic = None
4416 4416 if name and '.' in name:
4417 4417 name, section = name.split('.', 1)
4418 4418 section = section.lower()
4419 4419 if '.' in section:
4420 4420 subtopic, section = section.split('.', 1)
4421 4421 else:
4422 4422 subtopic = section
4423 4423
4424 4424 text = help.help_(ui, name, subtopic=subtopic, **opts)
4425 4425
4426 4426 formatted, pruned = minirst.format(text, textwidth, keep=keep,
4427 4427 section=section)
4428 4428
4429 4429 # We could have been given a weird ".foo" section without a name
4430 4430 # to look for, or we could have simply failed to found "foo.bar"
4431 4431 # because bar isn't a section of foo
4432 4432 if section and not (formatted and name):
4433 4433 raise error.Abort(_("help section not found"))
4434 4434
4435 4435 if 'verbose' in pruned:
4436 4436 keep.append('omitted')
4437 4437 else:
4438 4438 keep.append('notomitted')
4439 4439 formatted, pruned = minirst.format(text, textwidth, keep=keep,
4440 4440 section=section)
4441 4441 ui.write(formatted)
4442 4442
4443 4443
4444 4444 @command('identify|id',
4445 4445 [('r', 'rev', '',
4446 4446 _('identify the specified revision'), _('REV')),
4447 4447 ('n', 'num', None, _('show local revision number')),
4448 4448 ('i', 'id', None, _('show global revision id')),
4449 4449 ('b', 'branch', None, _('show branch')),
4450 4450 ('t', 'tags', None, _('show tags')),
4451 4451 ('B', 'bookmarks', None, _('show bookmarks')),
4452 4452 ] + remoteopts,
4453 4453 _('[-nibtB] [-r REV] [SOURCE]'),
4454 4454 optionalrepo=True)
4455 4455 def identify(ui, repo, source=None, rev=None,
4456 4456 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
4457 4457 """identify the working directory or specified revision
4458 4458
4459 4459 Print a summary identifying the repository state at REV using one or
4460 4460 two parent hash identifiers, followed by a "+" if the working
4461 4461 directory has uncommitted changes, the branch name (if not default),
4462 4462 a list of tags, and a list of bookmarks.
4463 4463
4464 4464 When REV is not given, print a summary of the current state of the
4465 4465 repository.
4466 4466
4467 4467 Specifying a path to a repository root or Mercurial bundle will
4468 4468 cause lookup to operate on that repository/bundle.
4469 4469
4470 4470 .. container:: verbose
4471 4471
4472 4472 Examples:
4473 4473
4474 4474 - generate a build identifier for the working directory::
4475 4475
4476 4476 hg id --id > build-id.dat
4477 4477
4478 4478 - find the revision corresponding to a tag::
4479 4479
4480 4480 hg id -n -r 1.3
4481 4481
4482 4482 - check the most recent revision of a remote repository::
4483 4483
4484 4484 hg id -r tip http://selenic.com/hg/
4485 4485
4486 4486 See :hg:`log` for generating more information about specific revisions,
4487 4487 including full hash identifiers.
4488 4488
4489 4489 Returns 0 if successful.
4490 4490 """
4491 4491
4492 4492 if not repo and not source:
4493 4493 raise error.Abort(_("there is no Mercurial repository here "
4494 4494 "(.hg not found)"))
4495 4495
4496 4496 if ui.debugflag:
4497 4497 hexfunc = hex
4498 4498 else:
4499 4499 hexfunc = short
4500 4500 default = not (num or id or branch or tags or bookmarks)
4501 4501 output = []
4502 4502 revs = []
4503 4503
4504 4504 if source:
4505 4505 source, branches = hg.parseurl(ui.expandpath(source))
4506 4506 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
4507 4507 repo = peer.local()
4508 4508 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
4509 4509
4510 4510 if not repo:
4511 4511 if num or branch or tags:
4512 4512 raise error.Abort(
4513 4513 _("can't query remote revision number, branch, or tags"))
4514 4514 if not rev and revs:
4515 4515 rev = revs[0]
4516 4516 if not rev:
4517 4517 rev = "tip"
4518 4518
4519 4519 remoterev = peer.lookup(rev)
4520 4520 if default or id:
4521 4521 output = [hexfunc(remoterev)]
4522 4522
4523 4523 def getbms():
4524 4524 bms = []
4525 4525
4526 4526 if 'bookmarks' in peer.listkeys('namespaces'):
4527 4527 hexremoterev = hex(remoterev)
4528 4528 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
4529 4529 if bmr == hexremoterev]
4530 4530
4531 4531 return sorted(bms)
4532 4532
4533 4533 if bookmarks:
4534 4534 output.extend(getbms())
4535 4535 elif default and not ui.quiet:
4536 4536 # multiple bookmarks for a single parent separated by '/'
4537 4537 bm = '/'.join(getbms())
4538 4538 if bm:
4539 4539 output.append(bm)
4540 4540 else:
4541 4541 ctx = scmutil.revsingle(repo, rev, None)
4542 4542
4543 4543 if ctx.rev() is None:
4544 4544 ctx = repo[None]
4545 4545 parents = ctx.parents()
4546 4546 taglist = []
4547 4547 for p in parents:
4548 4548 taglist.extend(p.tags())
4549 4549
4550 4550 changed = ""
4551 4551 if default or id or num:
4552 4552 if (any(repo.status())
4553 4553 or any(ctx.sub(s).dirty() for s in ctx.substate)):
4554 4554 changed = '+'
4555 4555 if default or id:
4556 4556 output = ["%s%s" %
4557 4557 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
4558 4558 if num:
4559 4559 output.append("%s%s" %
4560 4560 ('+'.join([str(p.rev()) for p in parents]), changed))
4561 4561 else:
4562 4562 if default or id:
4563 4563 output = [hexfunc(ctx.node())]
4564 4564 if num:
4565 4565 output.append(str(ctx.rev()))
4566 4566 taglist = ctx.tags()
4567 4567
4568 4568 if default and not ui.quiet:
4569 4569 b = ctx.branch()
4570 4570 if b != 'default':
4571 4571 output.append("(%s)" % b)
4572 4572
4573 4573 # multiple tags for a single parent separated by '/'
4574 4574 t = '/'.join(taglist)
4575 4575 if t:
4576 4576 output.append(t)
4577 4577
4578 4578 # multiple bookmarks for a single parent separated by '/'
4579 4579 bm = '/'.join(ctx.bookmarks())
4580 4580 if bm:
4581 4581 output.append(bm)
4582 4582 else:
4583 4583 if branch:
4584 4584 output.append(ctx.branch())
4585 4585
4586 4586 if tags:
4587 4587 output.extend(taglist)
4588 4588
4589 4589 if bookmarks:
4590 4590 output.extend(ctx.bookmarks())
4591 4591
4592 4592 ui.write("%s\n" % ' '.join(output))
4593 4593
4594 4594 @command('import|patch',
4595 4595 [('p', 'strip', 1,
4596 4596 _('directory strip option for patch. This has the same '
4597 4597 'meaning as the corresponding patch option'), _('NUM')),
4598 4598 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
4599 4599 ('e', 'edit', False, _('invoke editor on commit messages')),
4600 4600 ('f', 'force', None,
4601 4601 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
4602 4602 ('', 'no-commit', None,
4603 4603 _("don't commit, just update the working directory")),
4604 4604 ('', 'bypass', None,
4605 4605 _("apply patch without touching the working directory")),
4606 4606 ('', 'partial', None,
4607 4607 _('commit even if some hunks fail')),
4608 4608 ('', 'exact', None,
4609 4609 _('apply patch to the nodes from which it was generated')),
4610 4610 ('', 'prefix', '',
4611 4611 _('apply patch to subdirectory'), _('DIR')),
4612 4612 ('', 'import-branch', None,
4613 4613 _('use any branch information in patch (implied by --exact)'))] +
4614 4614 commitopts + commitopts2 + similarityopts,
4615 4615 _('[OPTION]... PATCH...'))
4616 4616 def import_(ui, repo, patch1=None, *patches, **opts):
4617 4617 """import an ordered set of patches
4618 4618
4619 4619 Import a list of patches and commit them individually (unless
4620 4620 --no-commit is specified).
4621 4621
4622 4622 To read a patch from standard input, use "-" as the patch name. If
4623 4623 a URL is specified, the patch will be downloaded from there.
4624 4624
4625 4625 Import first applies changes to the working directory (unless
4626 4626 --bypass is specified), import will abort if there are outstanding
4627 4627 changes.
4628 4628
4629 4629 Use --bypass to apply and commit patches directly to the
4630 4630 repository, without affecting the working directory. Without
4631 4631 --exact, patches will be applied on top of the working directory
4632 4632 parent revision.
4633 4633
4634 4634 You can import a patch straight from a mail message. Even patches
4635 4635 as attachments work (to use the body part, it must have type
4636 4636 text/plain or text/x-patch). From and Subject headers of email
4637 4637 message are used as default committer and commit message. All
4638 4638 text/plain body parts before first diff are added to the commit
4639 4639 message.
4640 4640
4641 4641 If the imported patch was generated by :hg:`export`, user and
4642 4642 description from patch override values from message headers and
4643 4643 body. Values given on command line with -m/--message and -u/--user
4644 4644 override these.
4645 4645
4646 4646 If --exact is specified, import will set the working directory to
4647 4647 the parent of each patch before applying it, and will abort if the
4648 4648 resulting changeset has a different ID than the one recorded in
4649 4649 the patch. This may happen due to character set problems or other
4650 4650 deficiencies in the text patch format.
4651 4651
4652 4652 Use --partial to ensure a changeset will be created from the patch
4653 4653 even if some hunks fail to apply. Hunks that fail to apply will be
4654 4654 written to a <target-file>.rej file. Conflicts can then be resolved
4655 4655 by hand before :hg:`commit --amend` is run to update the created
4656 4656 changeset. This flag exists to let people import patches that
4657 4657 partially apply without losing the associated metadata (author,
4658 4658 date, description, ...).
4659 4659
4660 4660 .. note::
4661 4661
4662 4662 When no hunks apply cleanly, :hg:`import --partial` will create
4663 4663 an empty changeset, importing only the patch metadata.
4664 4664
4665 4665 With -s/--similarity, hg will attempt to discover renames and
4666 4666 copies in the patch in the same way as :hg:`addremove`.
4667 4667
4668 4668 It is possible to use external patch programs to perform the patch
4669 4669 by setting the ``ui.patch`` configuration option. For the default
4670 4670 internal tool, the fuzz can also be configured via ``patch.fuzz``.
4671 4671 See :hg:`help config` for more information about configuration
4672 4672 files and how to use these options.
4673 4673
4674 4674 See :hg:`help dates` for a list of formats valid for -d/--date.
4675 4675
4676 4676 .. container:: verbose
4677 4677
4678 4678 Examples:
4679 4679
4680 4680 - import a traditional patch from a website and detect renames::
4681 4681
4682 4682 hg import -s 80 http://example.com/bugfix.patch
4683 4683
4684 4684 - import a changeset from an hgweb server::
4685 4685
4686 4686 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
4687 4687
4688 4688 - import all the patches in an Unix-style mbox::
4689 4689
4690 4690 hg import incoming-patches.mbox
4691 4691
4692 4692 - attempt to exactly restore an exported changeset (not always
4693 4693 possible)::
4694 4694
4695 4695 hg import --exact proposed-fix.patch
4696 4696
4697 4697 - use an external tool to apply a patch which is too fuzzy for
4698 4698 the default internal tool.
4699 4699
4700 4700 hg import --config ui.patch="patch --merge" fuzzy.patch
4701 4701
4702 4702 - change the default fuzzing from 2 to a less strict 7
4703 4703
4704 4704 hg import --config ui.fuzz=7 fuzz.patch
4705 4705
4706 4706 Returns 0 on success, 1 on partial success (see --partial).
4707 4707 """
4708 4708
4709 4709 if not patch1:
4710 4710 raise error.Abort(_('need at least one patch to import'))
4711 4711
4712 4712 patches = (patch1,) + patches
4713 4713
4714 4714 date = opts.get('date')
4715 4715 if date:
4716 4716 opts['date'] = util.parsedate(date)
4717 4717
4718 4718 exact = opts.get('exact')
4719 4719 update = not opts.get('bypass')
4720 4720 if not update and opts.get('no_commit'):
4721 4721 raise error.Abort(_('cannot use --no-commit with --bypass'))
4722 4722 try:
4723 4723 sim = float(opts.get('similarity') or 0)
4724 4724 except ValueError:
4725 4725 raise error.Abort(_('similarity must be a number'))
4726 4726 if sim < 0 or sim > 100:
4727 4727 raise error.Abort(_('similarity must be between 0 and 100'))
4728 4728 if sim and not update:
4729 4729 raise error.Abort(_('cannot use --similarity with --bypass'))
4730 4730 if exact:
4731 4731 if opts.get('edit'):
4732 4732 raise error.Abort(_('cannot use --exact with --edit'))
4733 4733 if opts.get('prefix'):
4734 4734 raise error.Abort(_('cannot use --exact with --prefix'))
4735 4735
4736 4736 base = opts["base"]
4737 4737 wlock = dsguard = lock = tr = None
4738 4738 msgs = []
4739 4739 ret = 0
4740 4740
4741 4741
4742 4742 try:
4743 4743 wlock = repo.wlock()
4744 4744
4745 4745 if update:
4746 4746 cmdutil.checkunfinished(repo)
4747 4747 if (exact or not opts.get('force')):
4748 4748 cmdutil.bailifchanged(repo)
4749 4749
4750 4750 if not opts.get('no_commit'):
4751 4751 lock = repo.lock()
4752 4752 tr = repo.transaction('import')
4753 4753 else:
4754 4754 dsguard = cmdutil.dirstateguard(repo, 'import')
4755 4755 parents = repo[None].parents()
4756 4756 for patchurl in patches:
4757 4757 if patchurl == '-':
4758 4758 ui.status(_('applying patch from stdin\n'))
4759 4759 patchfile = ui.fin
4760 4760 patchurl = 'stdin' # for error message
4761 4761 else:
4762 4762 patchurl = os.path.join(base, patchurl)
4763 4763 ui.status(_('applying %s\n') % patchurl)
4764 4764 patchfile = hg.openpath(ui, patchurl)
4765 4765
4766 4766 haspatch = False
4767 4767 for hunk in patch.split(patchfile):
4768 4768 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
4769 4769 parents, opts,
4770 4770 msgs, hg.clean)
4771 4771 if msg:
4772 4772 haspatch = True
4773 4773 ui.note(msg + '\n')
4774 4774 if update or exact:
4775 4775 parents = repo[None].parents()
4776 4776 else:
4777 4777 parents = [repo[node]]
4778 4778 if rej:
4779 4779 ui.write_err(_("patch applied partially\n"))
4780 4780 ui.write_err(_("(fix the .rej files and run "
4781 4781 "`hg commit --amend`)\n"))
4782 4782 ret = 1
4783 4783 break
4784 4784
4785 4785 if not haspatch:
4786 4786 raise error.Abort(_('%s: no diffs found') % patchurl)
4787 4787
4788 4788 if tr:
4789 4789 tr.close()
4790 4790 if msgs:
4791 4791 repo.savecommitmessage('\n* * *\n'.join(msgs))
4792 4792 if dsguard:
4793 4793 dsguard.close()
4794 4794 return ret
4795 4795 finally:
4796 4796 if tr:
4797 4797 tr.release()
4798 4798 release(lock, dsguard, wlock)
4799 4799
4800 4800 @command('incoming|in',
4801 4801 [('f', 'force', None,
4802 4802 _('run even if remote repository is unrelated')),
4803 4803 ('n', 'newest-first', None, _('show newest record first')),
4804 4804 ('', 'bundle', '',
4805 4805 _('file to store the bundles into'), _('FILE')),
4806 4806 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4807 4807 ('B', 'bookmarks', False, _("compare bookmarks")),
4808 4808 ('b', 'branch', [],
4809 4809 _('a specific branch you would like to pull'), _('BRANCH')),
4810 4810 ] + logopts + remoteopts + subrepoopts,
4811 4811 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
4812 4812 def incoming(ui, repo, source="default", **opts):
4813 4813 """show new changesets found in source
4814 4814
4815 4815 Show new changesets found in the specified path/URL or the default
4816 4816 pull location. These are the changesets that would have been pulled
4817 4817 if a pull at the time you issued this command.
4818 4818
4819 4819 See pull for valid source format details.
4820 4820
4821 4821 .. container:: verbose
4822 4822
4823 4823 With -B/--bookmarks, the result of bookmark comparison between
4824 4824 local and remote repositories is displayed. With -v/--verbose,
4825 4825 status is also displayed for each bookmark like below::
4826 4826
4827 4827 BM1 01234567890a added
4828 4828 BM2 1234567890ab advanced
4829 4829 BM3 234567890abc diverged
4830 4830 BM4 34567890abcd changed
4831 4831
4832 4832 The action taken locally when pulling depends on the
4833 4833 status of each bookmark:
4834 4834
4835 4835 :``added``: pull will create it
4836 4836 :``advanced``: pull will update it
4837 4837 :``diverged``: pull will create a divergent bookmark
4838 4838 :``changed``: result depends on remote changesets
4839 4839
4840 4840 From the point of view of pulling behavior, bookmark
4841 4841 existing only in the remote repository are treated as ``added``,
4842 4842 even if it is in fact locally deleted.
4843 4843
4844 4844 .. container:: verbose
4845 4845
4846 4846 For remote repository, using --bundle avoids downloading the
4847 4847 changesets twice if the incoming is followed by a pull.
4848 4848
4849 4849 Examples:
4850 4850
4851 4851 - show incoming changes with patches and full description::
4852 4852
4853 4853 hg incoming -vp
4854 4854
4855 4855 - show incoming changes excluding merges, store a bundle::
4856 4856
4857 4857 hg in -vpM --bundle incoming.hg
4858 4858 hg pull incoming.hg
4859 4859
4860 4860 - briefly list changes inside a bundle::
4861 4861
4862 4862 hg in changes.hg -T "{desc|firstline}\\n"
4863 4863
4864 4864 Returns 0 if there are incoming changes, 1 otherwise.
4865 4865 """
4866 4866 if opts.get('graph'):
4867 4867 cmdutil.checkunsupportedgraphflags([], opts)
4868 4868 def display(other, chlist, displayer):
4869 4869 revdag = cmdutil.graphrevs(other, chlist, opts)
4870 4870 cmdutil.displaygraph(ui, repo, revdag, displayer,
4871 4871 graphmod.asciiedges)
4872 4872
4873 4873 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
4874 4874 return 0
4875 4875
4876 4876 if opts.get('bundle') and opts.get('subrepos'):
4877 4877 raise error.Abort(_('cannot combine --bundle and --subrepos'))
4878 4878
4879 4879 if opts.get('bookmarks'):
4880 4880 source, branches = hg.parseurl(ui.expandpath(source),
4881 4881 opts.get('branch'))
4882 4882 other = hg.peer(repo, opts, source)
4883 4883 if 'bookmarks' not in other.listkeys('namespaces'):
4884 4884 ui.warn(_("remote doesn't support bookmarks\n"))
4885 4885 return 0
4886 4886 ui.status(_('comparing with %s\n') % util.hidepassword(source))
4887 4887 return bookmarks.incoming(ui, repo, other)
4888 4888
4889 4889 repo._subtoppath = ui.expandpath(source)
4890 4890 try:
4891 4891 return hg.incoming(ui, repo, source, opts)
4892 4892 finally:
4893 4893 del repo._subtoppath
4894 4894
4895 4895
4896 4896 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
4897 4897 norepo=True)
4898 4898 def init(ui, dest=".", **opts):
4899 4899 """create a new repository in the given directory
4900 4900
4901 4901 Initialize a new repository in the given directory. If the given
4902 4902 directory does not exist, it will be created.
4903 4903
4904 4904 If no directory is given, the current directory is used.
4905 4905
4906 4906 It is possible to specify an ``ssh://`` URL as the destination.
4907 4907 See :hg:`help urls` for more information.
4908 4908
4909 4909 Returns 0 on success.
4910 4910 """
4911 4911 hg.peer(ui, opts, ui.expandpath(dest), create=True)
4912 4912
4913 4913 @command('locate',
4914 4914 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
4915 4915 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4916 4916 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
4917 4917 ] + walkopts,
4918 4918 _('[OPTION]... [PATTERN]...'))
4919 4919 def locate(ui, repo, *pats, **opts):
4920 4920 """locate files matching specific patterns (DEPRECATED)
4921 4921
4922 4922 Print files under Mercurial control in the working directory whose
4923 4923 names match the given patterns.
4924 4924
4925 4925 By default, this command searches all directories in the working
4926 4926 directory. To search just the current directory and its
4927 4927 subdirectories, use "--include .".
4928 4928
4929 4929 If no patterns are given to match, this command prints the names
4930 4930 of all files under Mercurial control in the working directory.
4931 4931
4932 4932 If you want to feed the output of this command into the "xargs"
4933 4933 command, use the -0 option to both this command and "xargs". This
4934 4934 will avoid the problem of "xargs" treating single filenames that
4935 4935 contain whitespace as multiple filenames.
4936 4936
4937 4937 See :hg:`help files` for a more versatile command.
4938 4938
4939 4939 Returns 0 if a match is found, 1 otherwise.
4940 4940 """
4941 4941 if opts.get('print0'):
4942 4942 end = '\0'
4943 4943 else:
4944 4944 end = '\n'
4945 4945 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
4946 4946
4947 4947 ret = 1
4948 4948 ctx = repo[rev]
4949 4949 m = scmutil.match(ctx, pats, opts, default='relglob',
4950 4950 badfn=lambda x, y: False)
4951 4951
4952 4952 for abs in ctx.matches(m):
4953 4953 if opts.get('fullpath'):
4954 4954 ui.write(repo.wjoin(abs), end)
4955 4955 else:
4956 4956 ui.write(((pats and m.rel(abs)) or abs), end)
4957 4957 ret = 0
4958 4958
4959 4959 return ret
4960 4960
4961 4961 @command('^log|history',
4962 4962 [('f', 'follow', None,
4963 4963 _('follow changeset history, or file history across copies and renames')),
4964 4964 ('', 'follow-first', None,
4965 4965 _('only follow the first parent of merge changesets (DEPRECATED)')),
4966 4966 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
4967 4967 ('C', 'copies', None, _('show copied files')),
4968 4968 ('k', 'keyword', [],
4969 4969 _('do case-insensitive search for a given text'), _('TEXT')),
4970 4970 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
4971 4971 ('', 'removed', None, _('include revisions where files were removed')),
4972 4972 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
4973 4973 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4974 4974 ('', 'only-branch', [],
4975 4975 _('show only changesets within the given named branch (DEPRECATED)'),
4976 4976 _('BRANCH')),
4977 4977 ('b', 'branch', [],
4978 4978 _('show changesets within the given named branch'), _('BRANCH')),
4979 4979 ('P', 'prune', [],
4980 4980 _('do not display revision or any of its ancestors'), _('REV')),
4981 4981 ] + logopts + walkopts,
4982 4982 _('[OPTION]... [FILE]'),
4983 4983 inferrepo=True)
4984 4984 def log(ui, repo, *pats, **opts):
4985 4985 """show revision history of entire repository or files
4986 4986
4987 4987 Print the revision history of the specified files or the entire
4988 4988 project.
4989 4989
4990 4990 If no revision range is specified, the default is ``tip:0`` unless
4991 4991 --follow is set, in which case the working directory parent is
4992 4992 used as the starting revision.
4993 4993
4994 4994 File history is shown without following rename or copy history of
4995 4995 files. Use -f/--follow with a filename to follow history across
4996 4996 renames and copies. --follow without a filename will only show
4997 4997 ancestors or descendants of the starting revision.
4998 4998
4999 4999 By default this command prints revision number and changeset id,
5000 5000 tags, non-trivial parents, user, date and time, and a summary for
5001 5001 each commit. When the -v/--verbose switch is used, the list of
5002 5002 changed files and full commit message are shown.
5003 5003
5004 5004 With --graph the revisions are shown as an ASCII art DAG with the most
5005 5005 recent changeset at the top.
5006 5006 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
5007 5007 and '+' represents a fork where the changeset from the lines below is a
5008 5008 parent of the 'o' merge on the same line.
5009 5009
5010 5010 .. note::
5011 5011
5012 5012 :hg:`log --patch` may generate unexpected diff output for merge
5013 5013 changesets, as it will only compare the merge changeset against
5014 5014 its first parent. Also, only files different from BOTH parents
5015 5015 will appear in files:.
5016 5016
5017 5017 .. note::
5018 5018
5019 5019 For performance reasons, :hg:`log FILE` may omit duplicate changes
5020 5020 made on branches and will not show removals or mode changes. To
5021 5021 see all such changes, use the --removed switch.
5022 5022
5023 5023 .. container:: verbose
5024 5024
5025 5025 Some examples:
5026 5026
5027 5027 - changesets with full descriptions and file lists::
5028 5028
5029 5029 hg log -v
5030 5030
5031 5031 - changesets ancestral to the working directory::
5032 5032
5033 5033 hg log -f
5034 5034
5035 5035 - last 10 commits on the current branch::
5036 5036
5037 5037 hg log -l 10 -b .
5038 5038
5039 5039 - changesets showing all modifications of a file, including removals::
5040 5040
5041 5041 hg log --removed file.c
5042 5042
5043 5043 - all changesets that touch a directory, with diffs, excluding merges::
5044 5044
5045 5045 hg log -Mp lib/
5046 5046
5047 5047 - all revision numbers that match a keyword::
5048 5048
5049 5049 hg log -k bug --template "{rev}\\n"
5050 5050
5051 5051 - the full hash identifier of the working directory parent::
5052 5052
5053 5053 hg log -r . --template "{node}\\n"
5054 5054
5055 5055 - list available log templates::
5056 5056
5057 5057 hg log -T list
5058 5058
5059 5059 - check if a given changeset is included in a tagged release::
5060 5060
5061 5061 hg log -r "a21ccf and ancestor(1.9)"
5062 5062
5063 5063 - find all changesets by some user in a date range::
5064 5064
5065 5065 hg log -k alice -d "may 2008 to jul 2008"
5066 5066
5067 5067 - summary of all changesets after the last tag::
5068 5068
5069 5069 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
5070 5070
5071 5071 See :hg:`help dates` for a list of formats valid for -d/--date.
5072 5072
5073 5073 See :hg:`help revisions` and :hg:`help revsets` for more about
5074 5074 specifying and ordering revisions.
5075 5075
5076 5076 See :hg:`help templates` for more about pre-packaged styles and
5077 5077 specifying custom templates.
5078 5078
5079 5079 Returns 0 on success.
5080 5080
5081 5081 """
5082 5082 if opts.get('follow') and opts.get('rev'):
5083 5083 opts['rev'] = [revset.formatspec('reverse(::%lr)', opts.get('rev'))]
5084 5084 del opts['follow']
5085 5085
5086 5086 if opts.get('graph'):
5087 5087 return cmdutil.graphlog(ui, repo, *pats, **opts)
5088 5088
5089 5089 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
5090 5090 limit = cmdutil.loglimit(opts)
5091 5091 count = 0
5092 5092
5093 5093 getrenamed = None
5094 5094 if opts.get('copies'):
5095 5095 endrev = None
5096 5096 if opts.get('rev'):
5097 5097 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
5098 5098 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
5099 5099
5100 5100 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
5101 5101 for rev in revs:
5102 5102 if count == limit:
5103 5103 break
5104 5104 ctx = repo[rev]
5105 5105 copies = None
5106 5106 if getrenamed is not None and rev:
5107 5107 copies = []
5108 5108 for fn in ctx.files():
5109 5109 rename = getrenamed(fn, rev)
5110 5110 if rename:
5111 5111 copies.append((fn, rename[0]))
5112 5112 if filematcher:
5113 5113 revmatchfn = filematcher(ctx.rev())
5114 5114 else:
5115 5115 revmatchfn = None
5116 5116 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
5117 5117 if displayer.flush(ctx):
5118 5118 count += 1
5119 5119
5120 5120 displayer.close()
5121 5121
5122 5122 @command('manifest',
5123 5123 [('r', 'rev', '', _('revision to display'), _('REV')),
5124 5124 ('', 'all', False, _("list files from all revisions"))]
5125 5125 + formatteropts,
5126 5126 _('[-r REV]'))
5127 5127 def manifest(ui, repo, node=None, rev=None, **opts):
5128 5128 """output the current or given revision of the project manifest
5129 5129
5130 5130 Print a list of version controlled files for the given revision.
5131 5131 If no revision is given, the first parent of the working directory
5132 5132 is used, or the null revision if no revision is checked out.
5133 5133
5134 5134 With -v, print file permissions, symlink and executable bits.
5135 5135 With --debug, print file revision hashes.
5136 5136
5137 5137 If option --all is specified, the list of all files from all revisions
5138 5138 is printed. This includes deleted and renamed files.
5139 5139
5140 5140 Returns 0 on success.
5141 5141 """
5142 5142
5143 5143 fm = ui.formatter('manifest', opts)
5144 5144
5145 5145 if opts.get('all'):
5146 5146 if rev or node:
5147 5147 raise error.Abort(_("can't specify a revision with --all"))
5148 5148
5149 5149 res = []
5150 5150 prefix = "data/"
5151 5151 suffix = ".i"
5152 5152 plen = len(prefix)
5153 5153 slen = len(suffix)
5154 5154 with repo.lock():
5155 5155 for fn, b, size in repo.store.datafiles():
5156 5156 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
5157 5157 res.append(fn[plen:-slen])
5158 5158 for f in res:
5159 5159 fm.startitem()
5160 5160 fm.write("path", '%s\n', f)
5161 5161 fm.end()
5162 5162 return
5163 5163
5164 5164 if rev and node:
5165 5165 raise error.Abort(_("please specify just one revision"))
5166 5166
5167 5167 if not node:
5168 5168 node = rev
5169 5169
5170 5170 char = {'l': '@', 'x': '*', '': ''}
5171 5171 mode = {'l': '644', 'x': '755', '': '644'}
5172 5172 ctx = scmutil.revsingle(repo, node)
5173 5173 mf = ctx.manifest()
5174 5174 for f in ctx:
5175 5175 fm.startitem()
5176 5176 fl = ctx[f].flags()
5177 5177 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
5178 5178 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
5179 5179 fm.write('path', '%s\n', f)
5180 5180 fm.end()
5181 5181
5182 5182 @command('^merge',
5183 5183 [('f', 'force', None,
5184 5184 _('force a merge including outstanding changes (DEPRECATED)')),
5185 5185 ('r', 'rev', '', _('revision to merge'), _('REV')),
5186 5186 ('P', 'preview', None,
5187 5187 _('review revisions to merge (no merge is performed)'))
5188 5188 ] + mergetoolopts,
5189 5189 _('[-P] [-f] [[-r] REV]'))
5190 5190 def merge(ui, repo, node=None, **opts):
5191 5191 """merge another revision into working directory
5192 5192
5193 5193 The current working directory is updated with all changes made in
5194 5194 the requested revision since the last common predecessor revision.
5195 5195
5196 5196 Files that changed between either parent are marked as changed for
5197 5197 the next commit and a commit must be performed before any further
5198 5198 updates to the repository are allowed. The next commit will have
5199 5199 two parents.
5200 5200
5201 5201 ``--tool`` can be used to specify the merge tool used for file
5202 5202 merges. It overrides the HGMERGE environment variable and your
5203 5203 configuration files. See :hg:`help merge-tools` for options.
5204 5204
5205 5205 If no revision is specified, the working directory's parent is a
5206 5206 head revision, and the current branch contains exactly one other
5207 5207 head, the other head is merged with by default. Otherwise, an
5208 5208 explicit revision with which to merge with must be provided.
5209 5209
5210 5210 See :hg:`help resolve` for information on handling file conflicts.
5211 5211
5212 5212 To undo an uncommitted merge, use :hg:`update --clean .` which
5213 5213 will check out a clean copy of the original merge parent, losing
5214 5214 all changes.
5215 5215
5216 5216 Returns 0 on success, 1 if there are unresolved files.
5217 5217 """
5218 5218
5219 5219 if opts.get('rev') and node:
5220 5220 raise error.Abort(_("please specify just one revision"))
5221 5221 if not node:
5222 5222 node = opts.get('rev')
5223 5223
5224 5224 if node:
5225 5225 node = scmutil.revsingle(repo, node).node()
5226 5226
5227 5227 if not node:
5228 5228 node = repo[destutil.destmerge(repo)].node()
5229 5229
5230 5230 if opts.get('preview'):
5231 5231 # find nodes that are ancestors of p2 but not of p1
5232 5232 p1 = repo.lookup('.')
5233 5233 p2 = repo.lookup(node)
5234 5234 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
5235 5235
5236 5236 displayer = cmdutil.show_changeset(ui, repo, opts)
5237 5237 for node in nodes:
5238 5238 displayer.show(repo[node])
5239 5239 displayer.close()
5240 5240 return 0
5241 5241
5242 5242 try:
5243 5243 # ui.forcemerge is an internal variable, do not document
5244 5244 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
5245 return hg.merge(repo, node, force=opts.get('force'))
5245 force = opts.get('force')
5246 return hg.merge(repo, node, force=force, mergeforce=force)
5246 5247 finally:
5247 5248 ui.setconfig('ui', 'forcemerge', '', 'merge')
5248 5249
5249 5250 @command('outgoing|out',
5250 5251 [('f', 'force', None, _('run even when the destination is unrelated')),
5251 5252 ('r', 'rev', [],
5252 5253 _('a changeset intended to be included in the destination'), _('REV')),
5253 5254 ('n', 'newest-first', None, _('show newest record first')),
5254 5255 ('B', 'bookmarks', False, _('compare bookmarks')),
5255 5256 ('b', 'branch', [], _('a specific branch you would like to push'),
5256 5257 _('BRANCH')),
5257 5258 ] + logopts + remoteopts + subrepoopts,
5258 5259 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
5259 5260 def outgoing(ui, repo, dest=None, **opts):
5260 5261 """show changesets not found in the destination
5261 5262
5262 5263 Show changesets not found in the specified destination repository
5263 5264 or the default push location. These are the changesets that would
5264 5265 be pushed if a push was requested.
5265 5266
5266 5267 See pull for details of valid destination formats.
5267 5268
5268 5269 .. container:: verbose
5269 5270
5270 5271 With -B/--bookmarks, the result of bookmark comparison between
5271 5272 local and remote repositories is displayed. With -v/--verbose,
5272 5273 status is also displayed for each bookmark like below::
5273 5274
5274 5275 BM1 01234567890a added
5275 5276 BM2 deleted
5276 5277 BM3 234567890abc advanced
5277 5278 BM4 34567890abcd diverged
5278 5279 BM5 4567890abcde changed
5279 5280
5280 5281 The action taken when pushing depends on the
5281 5282 status of each bookmark:
5282 5283
5283 5284 :``added``: push with ``-B`` will create it
5284 5285 :``deleted``: push with ``-B`` will delete it
5285 5286 :``advanced``: push will update it
5286 5287 :``diverged``: push with ``-B`` will update it
5287 5288 :``changed``: push with ``-B`` will update it
5288 5289
5289 5290 From the point of view of pushing behavior, bookmarks
5290 5291 existing only in the remote repository are treated as
5291 5292 ``deleted``, even if it is in fact added remotely.
5292 5293
5293 5294 Returns 0 if there are outgoing changes, 1 otherwise.
5294 5295 """
5295 5296 if opts.get('graph'):
5296 5297 cmdutil.checkunsupportedgraphflags([], opts)
5297 5298 o, other = hg._outgoing(ui, repo, dest, opts)
5298 5299 if not o:
5299 5300 cmdutil.outgoinghooks(ui, repo, other, opts, o)
5300 5301 return
5301 5302
5302 5303 revdag = cmdutil.graphrevs(repo, o, opts)
5303 5304 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
5304 5305 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
5305 5306 cmdutil.outgoinghooks(ui, repo, other, opts, o)
5306 5307 return 0
5307 5308
5308 5309 if opts.get('bookmarks'):
5309 5310 dest = ui.expandpath(dest or 'default-push', dest or 'default')
5310 5311 dest, branches = hg.parseurl(dest, opts.get('branch'))
5311 5312 other = hg.peer(repo, opts, dest)
5312 5313 if 'bookmarks' not in other.listkeys('namespaces'):
5313 5314 ui.warn(_("remote doesn't support bookmarks\n"))
5314 5315 return 0
5315 5316 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
5316 5317 return bookmarks.outgoing(ui, repo, other)
5317 5318
5318 5319 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
5319 5320 try:
5320 5321 return hg.outgoing(ui, repo, dest, opts)
5321 5322 finally:
5322 5323 del repo._subtoppath
5323 5324
5324 5325 @command('parents',
5325 5326 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
5326 5327 ] + templateopts,
5327 5328 _('[-r REV] [FILE]'),
5328 5329 inferrepo=True)
5329 5330 def parents(ui, repo, file_=None, **opts):
5330 5331 """show the parents of the working directory or revision (DEPRECATED)
5331 5332
5332 5333 Print the working directory's parent revisions. If a revision is
5333 5334 given via -r/--rev, the parent of that revision will be printed.
5334 5335 If a file argument is given, the revision in which the file was
5335 5336 last changed (before the working directory revision or the
5336 5337 argument to --rev if given) is printed.
5337 5338
5338 5339 This command is equivalent to::
5339 5340
5340 5341 hg log -r "p1()+p2()" or
5341 5342 hg log -r "p1(REV)+p2(REV)" or
5342 5343 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
5343 5344 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
5344 5345
5345 5346 See :hg:`summary` and :hg:`help revsets` for related information.
5346 5347
5347 5348 Returns 0 on success.
5348 5349 """
5349 5350
5350 5351 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
5351 5352
5352 5353 if file_:
5353 5354 m = scmutil.match(ctx, (file_,), opts)
5354 5355 if m.anypats() or len(m.files()) != 1:
5355 5356 raise error.Abort(_('can only specify an explicit filename'))
5356 5357 file_ = m.files()[0]
5357 5358 filenodes = []
5358 5359 for cp in ctx.parents():
5359 5360 if not cp:
5360 5361 continue
5361 5362 try:
5362 5363 filenodes.append(cp.filenode(file_))
5363 5364 except error.LookupError:
5364 5365 pass
5365 5366 if not filenodes:
5366 5367 raise error.Abort(_("'%s' not found in manifest!") % file_)
5367 5368 p = []
5368 5369 for fn in filenodes:
5369 5370 fctx = repo.filectx(file_, fileid=fn)
5370 5371 p.append(fctx.node())
5371 5372 else:
5372 5373 p = [cp.node() for cp in ctx.parents()]
5373 5374
5374 5375 displayer = cmdutil.show_changeset(ui, repo, opts)
5375 5376 for n in p:
5376 5377 if n != nullid:
5377 5378 displayer.show(repo[n])
5378 5379 displayer.close()
5379 5380
5380 5381 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
5381 5382 def paths(ui, repo, search=None, **opts):
5382 5383 """show aliases for remote repositories
5383 5384
5384 5385 Show definition of symbolic path name NAME. If no name is given,
5385 5386 show definition of all available names.
5386 5387
5387 5388 Option -q/--quiet suppresses all output when searching for NAME
5388 5389 and shows only the path names when listing all definitions.
5389 5390
5390 5391 Path names are defined in the [paths] section of your
5391 5392 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
5392 5393 repository, ``.hg/hgrc`` is used, too.
5393 5394
5394 5395 The path names ``default`` and ``default-push`` have a special
5395 5396 meaning. When performing a push or pull operation, they are used
5396 5397 as fallbacks if no location is specified on the command-line.
5397 5398 When ``default-push`` is set, it will be used for push and
5398 5399 ``default`` will be used for pull; otherwise ``default`` is used
5399 5400 as the fallback for both. When cloning a repository, the clone
5400 5401 source is written as ``default`` in ``.hg/hgrc``.
5401 5402
5402 5403 .. note::
5403 5404
5404 5405 ``default`` and ``default-push`` apply to all inbound (e.g.
5405 5406 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
5406 5407 and :hg:`bundle`) operations.
5407 5408
5408 5409 See :hg:`help urls` for more information.
5409 5410
5410 5411 Returns 0 on success.
5411 5412 """
5412 5413 if search:
5413 5414 pathitems = [(name, path) for name, path in ui.paths.iteritems()
5414 5415 if name == search]
5415 5416 else:
5416 5417 pathitems = sorted(ui.paths.iteritems())
5417 5418
5418 5419 fm = ui.formatter('paths', opts)
5419 5420 if fm:
5420 5421 hidepassword = str
5421 5422 else:
5422 5423 hidepassword = util.hidepassword
5423 5424 if ui.quiet:
5424 5425 namefmt = '%s\n'
5425 5426 else:
5426 5427 namefmt = '%s = '
5427 5428 showsubopts = not search and not ui.quiet
5428 5429
5429 5430 for name, path in pathitems:
5430 5431 fm.startitem()
5431 5432 fm.condwrite(not search, 'name', namefmt, name)
5432 5433 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
5433 5434 for subopt, value in sorted(path.suboptions.items()):
5434 5435 assert subopt not in ('name', 'url')
5435 5436 if showsubopts:
5436 5437 fm.plain('%s:%s = ' % (name, subopt))
5437 5438 fm.condwrite(showsubopts, subopt, '%s\n', value)
5438 5439
5439 5440 fm.end()
5440 5441
5441 5442 if search and not pathitems:
5442 5443 if not ui.quiet:
5443 5444 ui.warn(_("not found!\n"))
5444 5445 return 1
5445 5446 else:
5446 5447 return 0
5447 5448
5448 5449 @command('phase',
5449 5450 [('p', 'public', False, _('set changeset phase to public')),
5450 5451 ('d', 'draft', False, _('set changeset phase to draft')),
5451 5452 ('s', 'secret', False, _('set changeset phase to secret')),
5452 5453 ('f', 'force', False, _('allow to move boundary backward')),
5453 5454 ('r', 'rev', [], _('target revision'), _('REV')),
5454 5455 ],
5455 5456 _('[-p|-d|-s] [-f] [-r] [REV...]'))
5456 5457 def phase(ui, repo, *revs, **opts):
5457 5458 """set or show the current phase name
5458 5459
5459 5460 With no argument, show the phase name of the current revision(s).
5460 5461
5461 5462 With one of -p/--public, -d/--draft or -s/--secret, change the
5462 5463 phase value of the specified revisions.
5463 5464
5464 5465 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
5465 5466 lower phase to an higher phase. Phases are ordered as follows::
5466 5467
5467 5468 public < draft < secret
5468 5469
5469 5470 Returns 0 on success, 1 if some phases could not be changed.
5470 5471
5471 5472 (For more information about the phases concept, see :hg:`help phases`.)
5472 5473 """
5473 5474 # search for a unique phase argument
5474 5475 targetphase = None
5475 5476 for idx, name in enumerate(phases.phasenames):
5476 5477 if opts[name]:
5477 5478 if targetphase is not None:
5478 5479 raise error.Abort(_('only one phase can be specified'))
5479 5480 targetphase = idx
5480 5481
5481 5482 # look for specified revision
5482 5483 revs = list(revs)
5483 5484 revs.extend(opts['rev'])
5484 5485 if not revs:
5485 5486 # display both parents as the second parent phase can influence
5486 5487 # the phase of a merge commit
5487 5488 revs = [c.rev() for c in repo[None].parents()]
5488 5489
5489 5490 revs = scmutil.revrange(repo, revs)
5490 5491
5491 5492 lock = None
5492 5493 ret = 0
5493 5494 if targetphase is None:
5494 5495 # display
5495 5496 for r in revs:
5496 5497 ctx = repo[r]
5497 5498 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
5498 5499 else:
5499 5500 tr = None
5500 5501 lock = repo.lock()
5501 5502 try:
5502 5503 tr = repo.transaction("phase")
5503 5504 # set phase
5504 5505 if not revs:
5505 5506 raise error.Abort(_('empty revision set'))
5506 5507 nodes = [repo[r].node() for r in revs]
5507 5508 # moving revision from public to draft may hide them
5508 5509 # We have to check result on an unfiltered repository
5509 5510 unfi = repo.unfiltered()
5510 5511 getphase = unfi._phasecache.phase
5511 5512 olddata = [getphase(unfi, r) for r in unfi]
5512 5513 phases.advanceboundary(repo, tr, targetphase, nodes)
5513 5514 if opts['force']:
5514 5515 phases.retractboundary(repo, tr, targetphase, nodes)
5515 5516 tr.close()
5516 5517 finally:
5517 5518 if tr is not None:
5518 5519 tr.release()
5519 5520 lock.release()
5520 5521 getphase = unfi._phasecache.phase
5521 5522 newdata = [getphase(unfi, r) for r in unfi]
5522 5523 changes = sum(newdata[r] != olddata[r] for r in unfi)
5523 5524 cl = unfi.changelog
5524 5525 rejected = [n for n in nodes
5525 5526 if newdata[cl.rev(n)] < targetphase]
5526 5527 if rejected:
5527 5528 ui.warn(_('cannot move %i changesets to a higher '
5528 5529 'phase, use --force\n') % len(rejected))
5529 5530 ret = 1
5530 5531 if changes:
5531 5532 msg = _('phase changed for %i changesets\n') % changes
5532 5533 if ret:
5533 5534 ui.status(msg)
5534 5535 else:
5535 5536 ui.note(msg)
5536 5537 else:
5537 5538 ui.warn(_('no phases changed\n'))
5538 5539 return ret
5539 5540
5540 5541 def postincoming(ui, repo, modheads, optupdate, checkout):
5541 5542 if modheads == 0:
5542 5543 return
5543 5544 if optupdate:
5544 5545 try:
5545 5546 brev = checkout
5546 5547 movemarkfrom = None
5547 5548 if not checkout:
5548 5549 updata = destutil.destupdate(repo)
5549 5550 checkout, movemarkfrom, brev = updata
5550 5551 ret = hg.update(repo, checkout)
5551 5552 except error.UpdateAbort as inst:
5552 5553 msg = _("not updating: %s") % str(inst)
5553 5554 hint = inst.hint
5554 5555 raise error.UpdateAbort(msg, hint=hint)
5555 5556 if not ret and movemarkfrom:
5556 5557 if movemarkfrom == repo['.'].node():
5557 5558 pass # no-op update
5558 5559 elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
5559 5560 ui.status(_("updating bookmark %s\n") % repo._activebookmark)
5560 5561 return ret
5561 5562 if modheads > 1:
5562 5563 currentbranchheads = len(repo.branchheads())
5563 5564 if currentbranchheads == modheads:
5564 5565 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
5565 5566 elif currentbranchheads > 1:
5566 5567 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
5567 5568 "merge)\n"))
5568 5569 else:
5569 5570 ui.status(_("(run 'hg heads' to see heads)\n"))
5570 5571 else:
5571 5572 ui.status(_("(run 'hg update' to get a working copy)\n"))
5572 5573
5573 5574 @command('^pull',
5574 5575 [('u', 'update', None,
5575 5576 _('update to new branch head if changesets were pulled')),
5576 5577 ('f', 'force', None, _('run even when remote repository is unrelated')),
5577 5578 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
5578 5579 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
5579 5580 ('b', 'branch', [], _('a specific branch you would like to pull'),
5580 5581 _('BRANCH')),
5581 5582 ] + remoteopts,
5582 5583 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
5583 5584 def pull(ui, repo, source="default", **opts):
5584 5585 """pull changes from the specified source
5585 5586
5586 5587 Pull changes from a remote repository to a local one.
5587 5588
5588 5589 This finds all changes from the repository at the specified path
5589 5590 or URL and adds them to a local repository (the current one unless
5590 5591 -R is specified). By default, this does not update the copy of the
5591 5592 project in the working directory.
5592 5593
5593 5594 Use :hg:`incoming` if you want to see what would have been added
5594 5595 by a pull at the time you issued this command. If you then decide
5595 5596 to add those changes to the repository, you should use :hg:`pull
5596 5597 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5597 5598
5598 5599 If SOURCE is omitted, the 'default' path will be used.
5599 5600 See :hg:`help urls` for more information.
5600 5601
5601 5602 Returns 0 on success, 1 if an update had unresolved files.
5602 5603 """
5603 5604 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
5604 5605 ui.status(_('pulling from %s\n') % util.hidepassword(source))
5605 5606 other = hg.peer(repo, opts, source)
5606 5607 try:
5607 5608 revs, checkout = hg.addbranchrevs(repo, other, branches,
5608 5609 opts.get('rev'))
5609 5610
5610 5611
5611 5612 pullopargs = {}
5612 5613 if opts.get('bookmark'):
5613 5614 if not revs:
5614 5615 revs = []
5615 5616 # The list of bookmark used here is not the one used to actually
5616 5617 # update the bookmark name. This can result in the revision pulled
5617 5618 # not ending up with the name of the bookmark because of a race
5618 5619 # condition on the server. (See issue 4689 for details)
5619 5620 remotebookmarks = other.listkeys('bookmarks')
5620 5621 pullopargs['remotebookmarks'] = remotebookmarks
5621 5622 for b in opts['bookmark']:
5622 5623 if b not in remotebookmarks:
5623 5624 raise error.Abort(_('remote bookmark %s not found!') % b)
5624 5625 revs.append(remotebookmarks[b])
5625 5626
5626 5627 if revs:
5627 5628 try:
5628 5629 # When 'rev' is a bookmark name, we cannot guarantee that it
5629 5630 # will be updated with that name because of a race condition
5630 5631 # server side. (See issue 4689 for details)
5631 5632 oldrevs = revs
5632 5633 revs = [] # actually, nodes
5633 5634 for r in oldrevs:
5634 5635 node = other.lookup(r)
5635 5636 revs.append(node)
5636 5637 if r == checkout:
5637 5638 checkout = node
5638 5639 except error.CapabilityError:
5639 5640 err = _("other repository doesn't support revision lookup, "
5640 5641 "so a rev cannot be specified.")
5641 5642 raise error.Abort(err)
5642 5643
5643 5644 pullopargs.update(opts.get('opargs', {}))
5644 5645 modheads = exchange.pull(repo, other, heads=revs,
5645 5646 force=opts.get('force'),
5646 5647 bookmarks=opts.get('bookmark', ()),
5647 5648 opargs=pullopargs).cgresult
5648 5649 if checkout:
5649 5650 checkout = str(repo.changelog.rev(checkout))
5650 5651 repo._subtoppath = source
5651 5652 try:
5652 5653 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
5653 5654
5654 5655 finally:
5655 5656 del repo._subtoppath
5656 5657
5657 5658 finally:
5658 5659 other.close()
5659 5660 return ret
5660 5661
5661 5662 @command('^push',
5662 5663 [('f', 'force', None, _('force push')),
5663 5664 ('r', 'rev', [],
5664 5665 _('a changeset intended to be included in the destination'),
5665 5666 _('REV')),
5666 5667 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
5667 5668 ('b', 'branch', [],
5668 5669 _('a specific branch you would like to push'), _('BRANCH')),
5669 5670 ('', 'new-branch', False, _('allow pushing a new branch')),
5670 5671 ] + remoteopts,
5671 5672 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
5672 5673 def push(ui, repo, dest=None, **opts):
5673 5674 """push changes to the specified destination
5674 5675
5675 5676 Push changesets from the local repository to the specified
5676 5677 destination.
5677 5678
5678 5679 This operation is symmetrical to pull: it is identical to a pull
5679 5680 in the destination repository from the current one.
5680 5681
5681 5682 By default, push will not allow creation of new heads at the
5682 5683 destination, since multiple heads would make it unclear which head
5683 5684 to use. In this situation, it is recommended to pull and merge
5684 5685 before pushing.
5685 5686
5686 5687 Use --new-branch if you want to allow push to create a new named
5687 5688 branch that is not present at the destination. This allows you to
5688 5689 only create a new branch without forcing other changes.
5689 5690
5690 5691 .. note::
5691 5692
5692 5693 Extra care should be taken with the -f/--force option,
5693 5694 which will push all new heads on all branches, an action which will
5694 5695 almost always cause confusion for collaborators.
5695 5696
5696 5697 If -r/--rev is used, the specified revision and all its ancestors
5697 5698 will be pushed to the remote repository.
5698 5699
5699 5700 If -B/--bookmark is used, the specified bookmarked revision, its
5700 5701 ancestors, and the bookmark will be pushed to the remote
5701 5702 repository.
5702 5703
5703 5704 Please see :hg:`help urls` for important details about ``ssh://``
5704 5705 URLs. If DESTINATION is omitted, a default path will be used.
5705 5706
5706 5707 Returns 0 if push was successful, 1 if nothing to push.
5707 5708 """
5708 5709
5709 5710 if opts.get('bookmark'):
5710 5711 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
5711 5712 for b in opts['bookmark']:
5712 5713 # translate -B options to -r so changesets get pushed
5713 5714 if b in repo._bookmarks:
5714 5715 opts.setdefault('rev', []).append(b)
5715 5716 else:
5716 5717 # if we try to push a deleted bookmark, translate it to null
5717 5718 # this lets simultaneous -r, -b options continue working
5718 5719 opts.setdefault('rev', []).append("null")
5719 5720
5720 5721 path = ui.paths.getpath(dest, default=('default-push', 'default'))
5721 5722 if not path:
5722 5723 raise error.Abort(_('default repository not configured!'),
5723 5724 hint=_('see the "path" section in "hg help config"'))
5724 5725 dest = path.pushloc or path.loc
5725 5726 branches = (path.branch, opts.get('branch') or [])
5726 5727 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
5727 5728 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
5728 5729 other = hg.peer(repo, opts, dest)
5729 5730
5730 5731 if revs:
5731 5732 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
5732 5733 if not revs:
5733 5734 raise error.Abort(_("specified revisions evaluate to an empty set"),
5734 5735 hint=_("use different revision arguments"))
5735 5736
5736 5737 repo._subtoppath = dest
5737 5738 try:
5738 5739 # push subrepos depth-first for coherent ordering
5739 5740 c = repo['']
5740 5741 subs = c.substate # only repos that are committed
5741 5742 for s in sorted(subs):
5742 5743 result = c.sub(s).push(opts)
5743 5744 if result == 0:
5744 5745 return not result
5745 5746 finally:
5746 5747 del repo._subtoppath
5747 5748 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
5748 5749 newbranch=opts.get('new_branch'),
5749 5750 bookmarks=opts.get('bookmark', ()),
5750 5751 opargs=opts.get('opargs'))
5751 5752
5752 5753 result = not pushop.cgresult
5753 5754
5754 5755 if pushop.bkresult is not None:
5755 5756 if pushop.bkresult == 2:
5756 5757 result = 2
5757 5758 elif not result and pushop.bkresult:
5758 5759 result = 2
5759 5760
5760 5761 return result
5761 5762
5762 5763 @command('recover', [])
5763 5764 def recover(ui, repo):
5764 5765 """roll back an interrupted transaction
5765 5766
5766 5767 Recover from an interrupted commit or pull.
5767 5768
5768 5769 This command tries to fix the repository status after an
5769 5770 interrupted operation. It should only be necessary when Mercurial
5770 5771 suggests it.
5771 5772
5772 5773 Returns 0 if successful, 1 if nothing to recover or verify fails.
5773 5774 """
5774 5775 if repo.recover():
5775 5776 return hg.verify(repo)
5776 5777 return 1
5777 5778
5778 5779 @command('^remove|rm',
5779 5780 [('A', 'after', None, _('record delete for missing files')),
5780 5781 ('f', 'force', None,
5781 5782 _('remove (and delete) file even if added or modified')),
5782 5783 ] + subrepoopts + walkopts,
5783 5784 _('[OPTION]... FILE...'),
5784 5785 inferrepo=True)
5785 5786 def remove(ui, repo, *pats, **opts):
5786 5787 """remove the specified files on the next commit
5787 5788
5788 5789 Schedule the indicated files for removal from the current branch.
5789 5790
5790 5791 This command schedules the files to be removed at the next commit.
5791 5792 To undo a remove before that, see :hg:`revert`. To undo added
5792 5793 files, see :hg:`forget`.
5793 5794
5794 5795 .. container:: verbose
5795 5796
5796 5797 -A/--after can be used to remove only files that have already
5797 5798 been deleted, -f/--force can be used to force deletion, and -Af
5798 5799 can be used to remove files from the next revision without
5799 5800 deleting them from the working directory.
5800 5801
5801 5802 The following table details the behavior of remove for different
5802 5803 file states (columns) and option combinations (rows). The file
5803 5804 states are Added [A], Clean [C], Modified [M] and Missing [!]
5804 5805 (as reported by :hg:`status`). The actions are Warn, Remove
5805 5806 (from branch) and Delete (from disk):
5806 5807
5807 5808 ========= == == == ==
5808 5809 opt/state A C M !
5809 5810 ========= == == == ==
5810 5811 none W RD W R
5811 5812 -f R RD RD R
5812 5813 -A W W W R
5813 5814 -Af R R R R
5814 5815 ========= == == == ==
5815 5816
5816 5817 .. note::
5817 5818
5818 5819 :hg:`remove` never deletes files in Added [A] state from the
5819 5820 working directory, not even if ``--force`` is specified.
5820 5821
5821 5822 Returns 0 on success, 1 if any warnings encountered.
5822 5823 """
5823 5824
5824 5825 after, force = opts.get('after'), opts.get('force')
5825 5826 if not pats and not after:
5826 5827 raise error.Abort(_('no files specified'))
5827 5828
5828 5829 m = scmutil.match(repo[None], pats, opts)
5829 5830 subrepos = opts.get('subrepos')
5830 5831 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
5831 5832
5832 5833 @command('rename|move|mv',
5833 5834 [('A', 'after', None, _('record a rename that has already occurred')),
5834 5835 ('f', 'force', None, _('forcibly copy over an existing managed file')),
5835 5836 ] + walkopts + dryrunopts,
5836 5837 _('[OPTION]... SOURCE... DEST'))
5837 5838 def rename(ui, repo, *pats, **opts):
5838 5839 """rename files; equivalent of copy + remove
5839 5840
5840 5841 Mark dest as copies of sources; mark sources for deletion. If dest
5841 5842 is a directory, copies are put in that directory. If dest is a
5842 5843 file, there can only be one source.
5843 5844
5844 5845 By default, this command copies the contents of files as they
5845 5846 exist in the working directory. If invoked with -A/--after, the
5846 5847 operation is recorded, but no copying is performed.
5847 5848
5848 5849 This command takes effect at the next commit. To undo a rename
5849 5850 before that, see :hg:`revert`.
5850 5851
5851 5852 Returns 0 on success, 1 if errors are encountered.
5852 5853 """
5853 5854 with repo.wlock(False):
5854 5855 return cmdutil.copy(ui, repo, pats, opts, rename=True)
5855 5856
5856 5857 @command('resolve',
5857 5858 [('a', 'all', None, _('select all unresolved files')),
5858 5859 ('l', 'list', None, _('list state of files needing merge')),
5859 5860 ('m', 'mark', None, _('mark files as resolved')),
5860 5861 ('u', 'unmark', None, _('mark files as unresolved')),
5861 5862 ('n', 'no-status', None, _('hide status prefix'))]
5862 5863 + mergetoolopts + walkopts + formatteropts,
5863 5864 _('[OPTION]... [FILE]...'),
5864 5865 inferrepo=True)
5865 5866 def resolve(ui, repo, *pats, **opts):
5866 5867 """redo merges or set/view the merge status of files
5867 5868
5868 5869 Merges with unresolved conflicts are often the result of
5869 5870 non-interactive merging using the ``internal:merge`` configuration
5870 5871 setting, or a command-line merge tool like ``diff3``. The resolve
5871 5872 command is used to manage the files involved in a merge, after
5872 5873 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
5873 5874 working directory must have two parents). See :hg:`help
5874 5875 merge-tools` for information on configuring merge tools.
5875 5876
5876 5877 The resolve command can be used in the following ways:
5877 5878
5878 5879 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
5879 5880 files, discarding any previous merge attempts. Re-merging is not
5880 5881 performed for files already marked as resolved. Use ``--all/-a``
5881 5882 to select all unresolved files. ``--tool`` can be used to specify
5882 5883 the merge tool used for the given files. It overrides the HGMERGE
5883 5884 environment variable and your configuration files. Previous file
5884 5885 contents are saved with a ``.orig`` suffix.
5885 5886
5886 5887 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
5887 5888 (e.g. after having manually fixed-up the files). The default is
5888 5889 to mark all unresolved files.
5889 5890
5890 5891 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
5891 5892 default is to mark all resolved files.
5892 5893
5893 5894 - :hg:`resolve -l`: list files which had or still have conflicts.
5894 5895 In the printed list, ``U`` = unresolved and ``R`` = resolved.
5895 5896
5896 5897 .. note::
5897 5898
5898 5899 Mercurial will not let you commit files with unresolved merge
5899 5900 conflicts. You must use :hg:`resolve -m ...` before you can
5900 5901 commit after a conflicting merge.
5901 5902
5902 5903 Returns 0 on success, 1 if any files fail a resolve attempt.
5903 5904 """
5904 5905
5905 5906 all, mark, unmark, show, nostatus = \
5906 5907 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
5907 5908
5908 5909 if (show and (mark or unmark)) or (mark and unmark):
5909 5910 raise error.Abort(_("too many options specified"))
5910 5911 if pats and all:
5911 5912 raise error.Abort(_("can't specify --all and patterns"))
5912 5913 if not (all or pats or show or mark or unmark):
5913 5914 raise error.Abort(_('no files or directories specified'),
5914 5915 hint=('use --all to re-merge all unresolved files'))
5915 5916
5916 5917 if show:
5917 5918 fm = ui.formatter('resolve', opts)
5918 5919 ms = mergemod.mergestate.read(repo)
5919 5920 m = scmutil.match(repo[None], pats, opts)
5920 5921 for f in ms:
5921 5922 if not m(f):
5922 5923 continue
5923 5924 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
5924 5925 'd': 'driverresolved'}[ms[f]]
5925 5926 fm.startitem()
5926 5927 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
5927 5928 fm.write('path', '%s\n', f, label=l)
5928 5929 fm.end()
5929 5930 return 0
5930 5931
5931 5932 with repo.wlock():
5932 5933 ms = mergemod.mergestate.read(repo)
5933 5934
5934 5935 if not (ms.active() or repo.dirstate.p2() != nullid):
5935 5936 raise error.Abort(
5936 5937 _('resolve command not applicable when not merging'))
5937 5938
5938 5939 wctx = repo[None]
5939 5940
5940 5941 if ms.mergedriver and ms.mdstate() == 'u':
5941 5942 proceed = mergemod.driverpreprocess(repo, ms, wctx)
5942 5943 ms.commit()
5943 5944 # allow mark and unmark to go through
5944 5945 if not mark and not unmark and not proceed:
5945 5946 return 1
5946 5947
5947 5948 m = scmutil.match(wctx, pats, opts)
5948 5949 ret = 0
5949 5950 didwork = False
5950 5951 runconclude = False
5951 5952
5952 5953 tocomplete = []
5953 5954 for f in ms:
5954 5955 if not m(f):
5955 5956 continue
5956 5957
5957 5958 didwork = True
5958 5959
5959 5960 # don't let driver-resolved files be marked, and run the conclude
5960 5961 # step if asked to resolve
5961 5962 if ms[f] == "d":
5962 5963 exact = m.exact(f)
5963 5964 if mark:
5964 5965 if exact:
5965 5966 ui.warn(_('not marking %s as it is driver-resolved\n')
5966 5967 % f)
5967 5968 elif unmark:
5968 5969 if exact:
5969 5970 ui.warn(_('not unmarking %s as it is driver-resolved\n')
5970 5971 % f)
5971 5972 else:
5972 5973 runconclude = True
5973 5974 continue
5974 5975
5975 5976 if mark:
5976 5977 ms.mark(f, "r")
5977 5978 elif unmark:
5978 5979 ms.mark(f, "u")
5979 5980 else:
5980 5981 # backup pre-resolve (merge uses .orig for its own purposes)
5981 5982 a = repo.wjoin(f)
5982 5983 try:
5983 5984 util.copyfile(a, a + ".resolve")
5984 5985 except (IOError, OSError) as inst:
5985 5986 if inst.errno != errno.ENOENT:
5986 5987 raise
5987 5988
5988 5989 try:
5989 5990 # preresolve file
5990 5991 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5991 5992 'resolve')
5992 5993 complete, r = ms.preresolve(f, wctx)
5993 5994 if not complete:
5994 5995 tocomplete.append(f)
5995 5996 elif r:
5996 5997 ret = 1
5997 5998 finally:
5998 5999 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5999 6000 ms.commit()
6000 6001
6001 6002 # replace filemerge's .orig file with our resolve file, but only
6002 6003 # for merges that are complete
6003 6004 if complete:
6004 6005 try:
6005 6006 util.rename(a + ".resolve",
6006 6007 scmutil.origpath(ui, repo, a))
6007 6008 except OSError as inst:
6008 6009 if inst.errno != errno.ENOENT:
6009 6010 raise
6010 6011
6011 6012 for f in tocomplete:
6012 6013 try:
6013 6014 # resolve file
6014 6015 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
6015 6016 'resolve')
6016 6017 r = ms.resolve(f, wctx)
6017 6018 if r:
6018 6019 ret = 1
6019 6020 finally:
6020 6021 ui.setconfig('ui', 'forcemerge', '', 'resolve')
6021 6022 ms.commit()
6022 6023
6023 6024 # replace filemerge's .orig file with our resolve file
6024 6025 a = repo.wjoin(f)
6025 6026 try:
6026 6027 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
6027 6028 except OSError as inst:
6028 6029 if inst.errno != errno.ENOENT:
6029 6030 raise
6030 6031
6031 6032 ms.commit()
6032 6033 ms.recordactions()
6033 6034
6034 6035 if not didwork and pats:
6035 6036 ui.warn(_("arguments do not match paths that need resolving\n"))
6036 6037 elif ms.mergedriver and ms.mdstate() != 's':
6037 6038 # run conclude step when either a driver-resolved file is requested
6038 6039 # or there are no driver-resolved files
6039 6040 # we can't use 'ret' to determine whether any files are unresolved
6040 6041 # because we might not have tried to resolve some
6041 6042 if ((runconclude or not list(ms.driverresolved()))
6042 6043 and not list(ms.unresolved())):
6043 6044 proceed = mergemod.driverconclude(repo, ms, wctx)
6044 6045 ms.commit()
6045 6046 if not proceed:
6046 6047 return 1
6047 6048
6048 6049 # Nudge users into finishing an unfinished operation
6049 6050 unresolvedf = list(ms.unresolved())
6050 6051 driverresolvedf = list(ms.driverresolved())
6051 6052 if not unresolvedf and not driverresolvedf:
6052 6053 ui.status(_('(no more unresolved files)\n'))
6053 6054 cmdutil.checkafterresolved(repo)
6054 6055 elif not unresolvedf:
6055 6056 ui.status(_('(no more unresolved files -- '
6056 6057 'run "hg resolve --all" to conclude)\n'))
6057 6058
6058 6059 return ret
6059 6060
6060 6061 @command('revert',
6061 6062 [('a', 'all', None, _('revert all changes when no arguments given')),
6062 6063 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
6063 6064 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
6064 6065 ('C', 'no-backup', None, _('do not save backup copies of files')),
6065 6066 ('i', 'interactive', None,
6066 6067 _('interactively select the changes (EXPERIMENTAL)')),
6067 6068 ] + walkopts + dryrunopts,
6068 6069 _('[OPTION]... [-r REV] [NAME]...'))
6069 6070 def revert(ui, repo, *pats, **opts):
6070 6071 """restore files to their checkout state
6071 6072
6072 6073 .. note::
6073 6074
6074 6075 To check out earlier revisions, you should use :hg:`update REV`.
6075 6076 To cancel an uncommitted merge (and lose your changes),
6076 6077 use :hg:`update --clean .`.
6077 6078
6078 6079 With no revision specified, revert the specified files or directories
6079 6080 to the contents they had in the parent of the working directory.
6080 6081 This restores the contents of files to an unmodified
6081 6082 state and unschedules adds, removes, copies, and renames. If the
6082 6083 working directory has two parents, you must explicitly specify a
6083 6084 revision.
6084 6085
6085 6086 Using the -r/--rev or -d/--date options, revert the given files or
6086 6087 directories to their states as of a specific revision. Because
6087 6088 revert does not change the working directory parents, this will
6088 6089 cause these files to appear modified. This can be helpful to "back
6089 6090 out" some or all of an earlier change. See :hg:`backout` for a
6090 6091 related method.
6091 6092
6092 6093 Modified files are saved with a .orig suffix before reverting.
6093 6094 To disable these backups, use --no-backup.
6094 6095
6095 6096 See :hg:`help dates` for a list of formats valid for -d/--date.
6096 6097
6097 6098 See :hg:`help backout` for a way to reverse the effect of an
6098 6099 earlier changeset.
6099 6100
6100 6101 Returns 0 on success.
6101 6102 """
6102 6103
6103 6104 if opts.get("date"):
6104 6105 if opts.get("rev"):
6105 6106 raise error.Abort(_("you can't specify a revision and a date"))
6106 6107 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
6107 6108
6108 6109 parent, p2 = repo.dirstate.parents()
6109 6110 if not opts.get('rev') and p2 != nullid:
6110 6111 # revert after merge is a trap for new users (issue2915)
6111 6112 raise error.Abort(_('uncommitted merge with no revision specified'),
6112 6113 hint=_('use "hg update" or see "hg help revert"'))
6113 6114
6114 6115 ctx = scmutil.revsingle(repo, opts.get('rev'))
6115 6116
6116 6117 if (not (pats or opts.get('include') or opts.get('exclude') or
6117 6118 opts.get('all') or opts.get('interactive'))):
6118 6119 msg = _("no files or directories specified")
6119 6120 if p2 != nullid:
6120 6121 hint = _("uncommitted merge, use --all to discard all changes,"
6121 6122 " or 'hg update -C .' to abort the merge")
6122 6123 raise error.Abort(msg, hint=hint)
6123 6124 dirty = any(repo.status())
6124 6125 node = ctx.node()
6125 6126 if node != parent:
6126 6127 if dirty:
6127 6128 hint = _("uncommitted changes, use --all to discard all"
6128 6129 " changes, or 'hg update %s' to update") % ctx.rev()
6129 6130 else:
6130 6131 hint = _("use --all to revert all files,"
6131 6132 " or 'hg update %s' to update") % ctx.rev()
6132 6133 elif dirty:
6133 6134 hint = _("uncommitted changes, use --all to discard all changes")
6134 6135 else:
6135 6136 hint = _("use --all to revert all files")
6136 6137 raise error.Abort(msg, hint=hint)
6137 6138
6138 6139 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
6139 6140
6140 6141 @command('rollback', dryrunopts +
6141 6142 [('f', 'force', False, _('ignore safety measures'))])
6142 6143 def rollback(ui, repo, **opts):
6143 6144 """roll back the last transaction (DANGEROUS) (DEPRECATED)
6144 6145
6145 6146 Please use :hg:`commit --amend` instead of rollback to correct
6146 6147 mistakes in the last commit.
6147 6148
6148 6149 This command should be used with care. There is only one level of
6149 6150 rollback, and there is no way to undo a rollback. It will also
6150 6151 restore the dirstate at the time of the last transaction, losing
6151 6152 any dirstate changes since that time. This command does not alter
6152 6153 the working directory.
6153 6154
6154 6155 Transactions are used to encapsulate the effects of all commands
6155 6156 that create new changesets or propagate existing changesets into a
6156 6157 repository.
6157 6158
6158 6159 .. container:: verbose
6159 6160
6160 6161 For example, the following commands are transactional, and their
6161 6162 effects can be rolled back:
6162 6163
6163 6164 - commit
6164 6165 - import
6165 6166 - pull
6166 6167 - push (with this repository as the destination)
6167 6168 - unbundle
6168 6169
6169 6170 To avoid permanent data loss, rollback will refuse to rollback a
6170 6171 commit transaction if it isn't checked out. Use --force to
6171 6172 override this protection.
6172 6173
6173 6174 This command is not intended for use on public repositories. Once
6174 6175 changes are visible for pull by other users, rolling a transaction
6175 6176 back locally is ineffective (someone else may already have pulled
6176 6177 the changes). Furthermore, a race is possible with readers of the
6177 6178 repository; for example an in-progress pull from the repository
6178 6179 may fail if a rollback is performed.
6179 6180
6180 6181 Returns 0 on success, 1 if no rollback data is available.
6181 6182 """
6182 6183 return repo.rollback(dryrun=opts.get('dry_run'),
6183 6184 force=opts.get('force'))
6184 6185
6185 6186 @command('root', [])
6186 6187 def root(ui, repo):
6187 6188 """print the root (top) of the current working directory
6188 6189
6189 6190 Print the root directory of the current repository.
6190 6191
6191 6192 Returns 0 on success.
6192 6193 """
6193 6194 ui.write(repo.root + "\n")
6194 6195
6195 6196 @command('^serve',
6196 6197 [('A', 'accesslog', '', _('name of access log file to write to'),
6197 6198 _('FILE')),
6198 6199 ('d', 'daemon', None, _('run server in background')),
6199 6200 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('FILE')),
6200 6201 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
6201 6202 # use string type, then we can check if something was passed
6202 6203 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
6203 6204 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
6204 6205 _('ADDR')),
6205 6206 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
6206 6207 _('PREFIX')),
6207 6208 ('n', 'name', '',
6208 6209 _('name to show in web pages (default: working directory)'), _('NAME')),
6209 6210 ('', 'web-conf', '',
6210 6211 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
6211 6212 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
6212 6213 _('FILE')),
6213 6214 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
6214 6215 ('', 'stdio', None, _('for remote clients')),
6215 6216 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
6216 6217 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
6217 6218 ('', 'style', '', _('template style to use'), _('STYLE')),
6218 6219 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
6219 6220 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
6220 6221 _('[OPTION]...'),
6221 6222 optionalrepo=True)
6222 6223 def serve(ui, repo, **opts):
6223 6224 """start stand-alone webserver
6224 6225
6225 6226 Start a local HTTP repository browser and pull server. You can use
6226 6227 this for ad-hoc sharing and browsing of repositories. It is
6227 6228 recommended to use a real web server to serve a repository for
6228 6229 longer periods of time.
6229 6230
6230 6231 Please note that the server does not implement access control.
6231 6232 This means that, by default, anybody can read from the server and
6232 6233 nobody can write to it by default. Set the ``web.allow_push``
6233 6234 option to ``*`` to allow everybody to push to the server. You
6234 6235 should use a real web server if you need to authenticate users.
6235 6236
6236 6237 By default, the server logs accesses to stdout and errors to
6237 6238 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
6238 6239 files.
6239 6240
6240 6241 To have the server choose a free port number to listen on, specify
6241 6242 a port number of 0; in this case, the server will print the port
6242 6243 number it uses.
6243 6244
6244 6245 Returns 0 on success.
6245 6246 """
6246 6247
6247 6248 if opts["stdio"] and opts["cmdserver"]:
6248 6249 raise error.Abort(_("cannot use --stdio with --cmdserver"))
6249 6250
6250 6251 if opts["stdio"]:
6251 6252 if repo is None:
6252 6253 raise error.RepoError(_("there is no Mercurial repository here"
6253 6254 " (.hg not found)"))
6254 6255 s = sshserver.sshserver(ui, repo)
6255 6256 s.serve_forever()
6256 6257
6257 6258 if opts["cmdserver"]:
6258 6259 service = commandserver.createservice(ui, repo, opts)
6259 6260 else:
6260 6261 service = hgweb.createservice(ui, repo, opts)
6261 6262 return cmdutil.service(opts, initfn=service.init, runfn=service.run)
6262 6263
6263 6264 @command('^status|st',
6264 6265 [('A', 'all', None, _('show status of all files')),
6265 6266 ('m', 'modified', None, _('show only modified files')),
6266 6267 ('a', 'added', None, _('show only added files')),
6267 6268 ('r', 'removed', None, _('show only removed files')),
6268 6269 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
6269 6270 ('c', 'clean', None, _('show only files without changes')),
6270 6271 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
6271 6272 ('i', 'ignored', None, _('show only ignored files')),
6272 6273 ('n', 'no-status', None, _('hide status prefix')),
6273 6274 ('C', 'copies', None, _('show source of copied files')),
6274 6275 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
6275 6276 ('', 'rev', [], _('show difference from revision'), _('REV')),
6276 6277 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
6277 6278 ] + walkopts + subrepoopts + formatteropts,
6278 6279 _('[OPTION]... [FILE]...'),
6279 6280 inferrepo=True)
6280 6281 def status(ui, repo, *pats, **opts):
6281 6282 """show changed files in the working directory
6282 6283
6283 6284 Show status of files in the repository. If names are given, only
6284 6285 files that match are shown. Files that are clean or ignored or
6285 6286 the source of a copy/move operation, are not listed unless
6286 6287 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
6287 6288 Unless options described with "show only ..." are given, the
6288 6289 options -mardu are used.
6289 6290
6290 6291 Option -q/--quiet hides untracked (unknown and ignored) files
6291 6292 unless explicitly requested with -u/--unknown or -i/--ignored.
6292 6293
6293 6294 .. note::
6294 6295
6295 6296 :hg:`status` may appear to disagree with diff if permissions have
6296 6297 changed or a merge has occurred. The standard diff format does
6297 6298 not report permission changes and diff only reports changes
6298 6299 relative to one merge parent.
6299 6300
6300 6301 If one revision is given, it is used as the base revision.
6301 6302 If two revisions are given, the differences between them are
6302 6303 shown. The --change option can also be used as a shortcut to list
6303 6304 the changed files of a revision from its first parent.
6304 6305
6305 6306 The codes used to show the status of files are::
6306 6307
6307 6308 M = modified
6308 6309 A = added
6309 6310 R = removed
6310 6311 C = clean
6311 6312 ! = missing (deleted by non-hg command, but still tracked)
6312 6313 ? = not tracked
6313 6314 I = ignored
6314 6315 = origin of the previous file (with --copies)
6315 6316
6316 6317 .. container:: verbose
6317 6318
6318 6319 Examples:
6319 6320
6320 6321 - show changes in the working directory relative to a
6321 6322 changeset::
6322 6323
6323 6324 hg status --rev 9353
6324 6325
6325 6326 - show changes in the working directory relative to the
6326 6327 current directory (see :hg:`help patterns` for more information)::
6327 6328
6328 6329 hg status re:
6329 6330
6330 6331 - show all changes including copies in an existing changeset::
6331 6332
6332 6333 hg status --copies --change 9353
6333 6334
6334 6335 - get a NUL separated list of added files, suitable for xargs::
6335 6336
6336 6337 hg status -an0
6337 6338
6338 6339 Returns 0 on success.
6339 6340 """
6340 6341
6341 6342 revs = opts.get('rev')
6342 6343 change = opts.get('change')
6343 6344
6344 6345 if revs and change:
6345 6346 msg = _('cannot specify --rev and --change at the same time')
6346 6347 raise error.Abort(msg)
6347 6348 elif change:
6348 6349 node2 = scmutil.revsingle(repo, change, None).node()
6349 6350 node1 = repo[node2].p1().node()
6350 6351 else:
6351 6352 node1, node2 = scmutil.revpair(repo, revs)
6352 6353
6353 6354 if pats:
6354 6355 cwd = repo.getcwd()
6355 6356 else:
6356 6357 cwd = ''
6357 6358
6358 6359 if opts.get('print0'):
6359 6360 end = '\0'
6360 6361 else:
6361 6362 end = '\n'
6362 6363 copy = {}
6363 6364 states = 'modified added removed deleted unknown ignored clean'.split()
6364 6365 show = [k for k in states if opts.get(k)]
6365 6366 if opts.get('all'):
6366 6367 show += ui.quiet and (states[:4] + ['clean']) or states
6367 6368 if not show:
6368 6369 if ui.quiet:
6369 6370 show = states[:4]
6370 6371 else:
6371 6372 show = states[:5]
6372 6373
6373 6374 m = scmutil.match(repo[node2], pats, opts)
6374 6375 stat = repo.status(node1, node2, m,
6375 6376 'ignored' in show, 'clean' in show, 'unknown' in show,
6376 6377 opts.get('subrepos'))
6377 6378 changestates = zip(states, 'MAR!?IC', stat)
6378 6379
6379 6380 if (opts.get('all') or opts.get('copies')
6380 6381 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
6381 6382 copy = copies.pathcopies(repo[node1], repo[node2], m)
6382 6383
6383 6384 fm = ui.formatter('status', opts)
6384 6385 fmt = '%s' + end
6385 6386 showchar = not opts.get('no_status')
6386 6387
6387 6388 for state, char, files in changestates:
6388 6389 if state in show:
6389 6390 label = 'status.' + state
6390 6391 for f in files:
6391 6392 fm.startitem()
6392 6393 fm.condwrite(showchar, 'status', '%s ', char, label=label)
6393 6394 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
6394 6395 if f in copy:
6395 6396 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
6396 6397 label='status.copied')
6397 6398 fm.end()
6398 6399
6399 6400 @command('^summary|sum',
6400 6401 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
6401 6402 def summary(ui, repo, **opts):
6402 6403 """summarize working directory state
6403 6404
6404 6405 This generates a brief summary of the working directory state,
6405 6406 including parents, branch, commit status, phase and available updates.
6406 6407
6407 6408 With the --remote option, this will check the default paths for
6408 6409 incoming and outgoing changes. This can be time-consuming.
6409 6410
6410 6411 Returns 0 on success.
6411 6412 """
6412 6413
6413 6414 ctx = repo[None]
6414 6415 parents = ctx.parents()
6415 6416 pnode = parents[0].node()
6416 6417 marks = []
6417 6418
6418 6419 for p in parents:
6419 6420 # label with log.changeset (instead of log.parent) since this
6420 6421 # shows a working directory parent *changeset*:
6421 6422 # i18n: column positioning for "hg summary"
6422 6423 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
6423 6424 label='log.changeset changeset.%s' % p.phasestr())
6424 6425 ui.write(' '.join(p.tags()), label='log.tag')
6425 6426 if p.bookmarks():
6426 6427 marks.extend(p.bookmarks())
6427 6428 if p.rev() == -1:
6428 6429 if not len(repo):
6429 6430 ui.write(_(' (empty repository)'))
6430 6431 else:
6431 6432 ui.write(_(' (no revision checked out)'))
6432 6433 ui.write('\n')
6433 6434 if p.description():
6434 6435 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
6435 6436 label='log.summary')
6436 6437
6437 6438 branch = ctx.branch()
6438 6439 bheads = repo.branchheads(branch)
6439 6440 # i18n: column positioning for "hg summary"
6440 6441 m = _('branch: %s\n') % branch
6441 6442 if branch != 'default':
6442 6443 ui.write(m, label='log.branch')
6443 6444 else:
6444 6445 ui.status(m, label='log.branch')
6445 6446
6446 6447 if marks:
6447 6448 active = repo._activebookmark
6448 6449 # i18n: column positioning for "hg summary"
6449 6450 ui.write(_('bookmarks:'), label='log.bookmark')
6450 6451 if active is not None:
6451 6452 if active in marks:
6452 6453 ui.write(' *' + active, label=activebookmarklabel)
6453 6454 marks.remove(active)
6454 6455 else:
6455 6456 ui.write(' [%s]' % active, label=activebookmarklabel)
6456 6457 for m in marks:
6457 6458 ui.write(' ' + m, label='log.bookmark')
6458 6459 ui.write('\n', label='log.bookmark')
6459 6460
6460 6461 status = repo.status(unknown=True)
6461 6462
6462 6463 c = repo.dirstate.copies()
6463 6464 copied, renamed = [], []
6464 6465 for d, s in c.iteritems():
6465 6466 if s in status.removed:
6466 6467 status.removed.remove(s)
6467 6468 renamed.append(d)
6468 6469 else:
6469 6470 copied.append(d)
6470 6471 if d in status.added:
6471 6472 status.added.remove(d)
6472 6473
6473 6474 try:
6474 6475 ms = mergemod.mergestate.read(repo)
6475 6476 except error.UnsupportedMergeRecords as e:
6476 6477 s = ' '.join(e.recordtypes)
6477 6478 ui.warn(
6478 6479 _('warning: merge state has unsupported record types: %s\n') % s)
6479 6480 unresolved = 0
6480 6481 else:
6481 6482 unresolved = [f for f in ms if ms[f] == 'u']
6482 6483
6483 6484 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
6484 6485
6485 6486 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
6486 6487 (ui.label(_('%d added'), 'status.added'), status.added),
6487 6488 (ui.label(_('%d removed'), 'status.removed'), status.removed),
6488 6489 (ui.label(_('%d renamed'), 'status.copied'), renamed),
6489 6490 (ui.label(_('%d copied'), 'status.copied'), copied),
6490 6491 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
6491 6492 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
6492 6493 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
6493 6494 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
6494 6495 t = []
6495 6496 for l, s in labels:
6496 6497 if s:
6497 6498 t.append(l % len(s))
6498 6499
6499 6500 t = ', '.join(t)
6500 6501 cleanworkdir = False
6501 6502
6502 6503 if repo.vfs.exists('graftstate'):
6503 6504 t += _(' (graft in progress)')
6504 6505 if repo.vfs.exists('updatestate'):
6505 6506 t += _(' (interrupted update)')
6506 6507 elif len(parents) > 1:
6507 6508 t += _(' (merge)')
6508 6509 elif branch != parents[0].branch():
6509 6510 t += _(' (new branch)')
6510 6511 elif (parents[0].closesbranch() and
6511 6512 pnode in repo.branchheads(branch, closed=True)):
6512 6513 t += _(' (head closed)')
6513 6514 elif not (status.modified or status.added or status.removed or renamed or
6514 6515 copied or subs):
6515 6516 t += _(' (clean)')
6516 6517 cleanworkdir = True
6517 6518 elif pnode not in bheads:
6518 6519 t += _(' (new branch head)')
6519 6520
6520 6521 if parents:
6521 6522 pendingphase = max(p.phase() for p in parents)
6522 6523 else:
6523 6524 pendingphase = phases.public
6524 6525
6525 6526 if pendingphase > phases.newcommitphase(ui):
6526 6527 t += ' (%s)' % phases.phasenames[pendingphase]
6527 6528
6528 6529 if cleanworkdir:
6529 6530 # i18n: column positioning for "hg summary"
6530 6531 ui.status(_('commit: %s\n') % t.strip())
6531 6532 else:
6532 6533 # i18n: column positioning for "hg summary"
6533 6534 ui.write(_('commit: %s\n') % t.strip())
6534 6535
6535 6536 # all ancestors of branch heads - all ancestors of parent = new csets
6536 6537 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
6537 6538 bheads))
6538 6539
6539 6540 if new == 0:
6540 6541 # i18n: column positioning for "hg summary"
6541 6542 ui.status(_('update: (current)\n'))
6542 6543 elif pnode not in bheads:
6543 6544 # i18n: column positioning for "hg summary"
6544 6545 ui.write(_('update: %d new changesets (update)\n') % new)
6545 6546 else:
6546 6547 # i18n: column positioning for "hg summary"
6547 6548 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
6548 6549 (new, len(bheads)))
6549 6550
6550 6551 t = []
6551 6552 draft = len(repo.revs('draft()'))
6552 6553 if draft:
6553 6554 t.append(_('%d draft') % draft)
6554 6555 secret = len(repo.revs('secret()'))
6555 6556 if secret:
6556 6557 t.append(_('%d secret') % secret)
6557 6558
6558 6559 if draft or secret:
6559 6560 ui.status(_('phases: %s\n') % ', '.join(t))
6560 6561
6561 6562 if obsolete.isenabled(repo, obsolete.createmarkersopt):
6562 6563 for trouble in ("unstable", "divergent", "bumped"):
6563 6564 numtrouble = len(repo.revs(trouble + "()"))
6564 6565 # We write all the possibilities to ease translation
6565 6566 troublemsg = {
6566 6567 "unstable": _("unstable: %d changesets"),
6567 6568 "divergent": _("divergent: %d changesets"),
6568 6569 "bumped": _("bumped: %d changesets"),
6569 6570 }
6570 6571 if numtrouble > 0:
6571 6572 ui.status(troublemsg[trouble] % numtrouble + "\n")
6572 6573
6573 6574 cmdutil.summaryhooks(ui, repo)
6574 6575
6575 6576 if opts.get('remote'):
6576 6577 needsincoming, needsoutgoing = True, True
6577 6578 else:
6578 6579 needsincoming, needsoutgoing = False, False
6579 6580 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
6580 6581 if i:
6581 6582 needsincoming = True
6582 6583 if o:
6583 6584 needsoutgoing = True
6584 6585 if not needsincoming and not needsoutgoing:
6585 6586 return
6586 6587
6587 6588 def getincoming():
6588 6589 source, branches = hg.parseurl(ui.expandpath('default'))
6589 6590 sbranch = branches[0]
6590 6591 try:
6591 6592 other = hg.peer(repo, {}, source)
6592 6593 except error.RepoError:
6593 6594 if opts.get('remote'):
6594 6595 raise
6595 6596 return source, sbranch, None, None, None
6596 6597 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
6597 6598 if revs:
6598 6599 revs = [other.lookup(rev) for rev in revs]
6599 6600 ui.debug('comparing with %s\n' % util.hidepassword(source))
6600 6601 repo.ui.pushbuffer()
6601 6602 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
6602 6603 repo.ui.popbuffer()
6603 6604 return source, sbranch, other, commoninc, commoninc[1]
6604 6605
6605 6606 if needsincoming:
6606 6607 source, sbranch, sother, commoninc, incoming = getincoming()
6607 6608 else:
6608 6609 source = sbranch = sother = commoninc = incoming = None
6609 6610
6610 6611 def getoutgoing():
6611 6612 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
6612 6613 dbranch = branches[0]
6613 6614 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
6614 6615 if source != dest:
6615 6616 try:
6616 6617 dother = hg.peer(repo, {}, dest)
6617 6618 except error.RepoError:
6618 6619 if opts.get('remote'):
6619 6620 raise
6620 6621 return dest, dbranch, None, None
6621 6622 ui.debug('comparing with %s\n' % util.hidepassword(dest))
6622 6623 elif sother is None:
6623 6624 # there is no explicit destination peer, but source one is invalid
6624 6625 return dest, dbranch, None, None
6625 6626 else:
6626 6627 dother = sother
6627 6628 if (source != dest or (sbranch is not None and sbranch != dbranch)):
6628 6629 common = None
6629 6630 else:
6630 6631 common = commoninc
6631 6632 if revs:
6632 6633 revs = [repo.lookup(rev) for rev in revs]
6633 6634 repo.ui.pushbuffer()
6634 6635 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
6635 6636 commoninc=common)
6636 6637 repo.ui.popbuffer()
6637 6638 return dest, dbranch, dother, outgoing
6638 6639
6639 6640 if needsoutgoing:
6640 6641 dest, dbranch, dother, outgoing = getoutgoing()
6641 6642 else:
6642 6643 dest = dbranch = dother = outgoing = None
6643 6644
6644 6645 if opts.get('remote'):
6645 6646 t = []
6646 6647 if incoming:
6647 6648 t.append(_('1 or more incoming'))
6648 6649 o = outgoing.missing
6649 6650 if o:
6650 6651 t.append(_('%d outgoing') % len(o))
6651 6652 other = dother or sother
6652 6653 if 'bookmarks' in other.listkeys('namespaces'):
6653 6654 counts = bookmarks.summary(repo, other)
6654 6655 if counts[0] > 0:
6655 6656 t.append(_('%d incoming bookmarks') % counts[0])
6656 6657 if counts[1] > 0:
6657 6658 t.append(_('%d outgoing bookmarks') % counts[1])
6658 6659
6659 6660 if t:
6660 6661 # i18n: column positioning for "hg summary"
6661 6662 ui.write(_('remote: %s\n') % (', '.join(t)))
6662 6663 else:
6663 6664 # i18n: column positioning for "hg summary"
6664 6665 ui.status(_('remote: (synced)\n'))
6665 6666
6666 6667 cmdutil.summaryremotehooks(ui, repo, opts,
6667 6668 ((source, sbranch, sother, commoninc),
6668 6669 (dest, dbranch, dother, outgoing)))
6669 6670
6670 6671 @command('tag',
6671 6672 [('f', 'force', None, _('force tag')),
6672 6673 ('l', 'local', None, _('make the tag local')),
6673 6674 ('r', 'rev', '', _('revision to tag'), _('REV')),
6674 6675 ('', 'remove', None, _('remove a tag')),
6675 6676 # -l/--local is already there, commitopts cannot be used
6676 6677 ('e', 'edit', None, _('invoke editor on commit messages')),
6677 6678 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
6678 6679 ] + commitopts2,
6679 6680 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
6680 6681 def tag(ui, repo, name1, *names, **opts):
6681 6682 """add one or more tags for the current or given revision
6682 6683
6683 6684 Name a particular revision using <name>.
6684 6685
6685 6686 Tags are used to name particular revisions of the repository and are
6686 6687 very useful to compare different revisions, to go back to significant
6687 6688 earlier versions or to mark branch points as releases, etc. Changing
6688 6689 an existing tag is normally disallowed; use -f/--force to override.
6689 6690
6690 6691 If no revision is given, the parent of the working directory is
6691 6692 used.
6692 6693
6693 6694 To facilitate version control, distribution, and merging of tags,
6694 6695 they are stored as a file named ".hgtags" which is managed similarly
6695 6696 to other project files and can be hand-edited if necessary. This
6696 6697 also means that tagging creates a new commit. The file
6697 6698 ".hg/localtags" is used for local tags (not shared among
6698 6699 repositories).
6699 6700
6700 6701 Tag commits are usually made at the head of a branch. If the parent
6701 6702 of the working directory is not a branch head, :hg:`tag` aborts; use
6702 6703 -f/--force to force the tag commit to be based on a non-head
6703 6704 changeset.
6704 6705
6705 6706 See :hg:`help dates` for a list of formats valid for -d/--date.
6706 6707
6707 6708 Since tag names have priority over branch names during revision
6708 6709 lookup, using an existing branch name as a tag name is discouraged.
6709 6710
6710 6711 Returns 0 on success.
6711 6712 """
6712 6713 wlock = lock = None
6713 6714 try:
6714 6715 wlock = repo.wlock()
6715 6716 lock = repo.lock()
6716 6717 rev_ = "."
6717 6718 names = [t.strip() for t in (name1,) + names]
6718 6719 if len(names) != len(set(names)):
6719 6720 raise error.Abort(_('tag names must be unique'))
6720 6721 for n in names:
6721 6722 scmutil.checknewlabel(repo, n, 'tag')
6722 6723 if not n:
6723 6724 raise error.Abort(_('tag names cannot consist entirely of '
6724 6725 'whitespace'))
6725 6726 if opts.get('rev') and opts.get('remove'):
6726 6727 raise error.Abort(_("--rev and --remove are incompatible"))
6727 6728 if opts.get('rev'):
6728 6729 rev_ = opts['rev']
6729 6730 message = opts.get('message')
6730 6731 if opts.get('remove'):
6731 6732 if opts.get('local'):
6732 6733 expectedtype = 'local'
6733 6734 else:
6734 6735 expectedtype = 'global'
6735 6736
6736 6737 for n in names:
6737 6738 if not repo.tagtype(n):
6738 6739 raise error.Abort(_("tag '%s' does not exist") % n)
6739 6740 if repo.tagtype(n) != expectedtype:
6740 6741 if expectedtype == 'global':
6741 6742 raise error.Abort(_("tag '%s' is not a global tag") % n)
6742 6743 else:
6743 6744 raise error.Abort(_("tag '%s' is not a local tag") % n)
6744 6745 rev_ = 'null'
6745 6746 if not message:
6746 6747 # we don't translate commit messages
6747 6748 message = 'Removed tag %s' % ', '.join(names)
6748 6749 elif not opts.get('force'):
6749 6750 for n in names:
6750 6751 if n in repo.tags():
6751 6752 raise error.Abort(_("tag '%s' already exists "
6752 6753 "(use -f to force)") % n)
6753 6754 if not opts.get('local'):
6754 6755 p1, p2 = repo.dirstate.parents()
6755 6756 if p2 != nullid:
6756 6757 raise error.Abort(_('uncommitted merge'))
6757 6758 bheads = repo.branchheads()
6758 6759 if not opts.get('force') and bheads and p1 not in bheads:
6759 6760 raise error.Abort(_('not at a branch head (use -f to force)'))
6760 6761 r = scmutil.revsingle(repo, rev_).node()
6761 6762
6762 6763 if not message:
6763 6764 # we don't translate commit messages
6764 6765 message = ('Added tag %s for changeset %s' %
6765 6766 (', '.join(names), short(r)))
6766 6767
6767 6768 date = opts.get('date')
6768 6769 if date:
6769 6770 date = util.parsedate(date)
6770 6771
6771 6772 if opts.get('remove'):
6772 6773 editform = 'tag.remove'
6773 6774 else:
6774 6775 editform = 'tag.add'
6775 6776 editor = cmdutil.getcommiteditor(editform=editform, **opts)
6776 6777
6777 6778 # don't allow tagging the null rev
6778 6779 if (not opts.get('remove') and
6779 6780 scmutil.revsingle(repo, rev_).rev() == nullrev):
6780 6781 raise error.Abort(_("cannot tag null revision"))
6781 6782
6782 6783 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
6783 6784 editor=editor)
6784 6785 finally:
6785 6786 release(lock, wlock)
6786 6787
6787 6788 @command('tags', formatteropts, '')
6788 6789 def tags(ui, repo, **opts):
6789 6790 """list repository tags
6790 6791
6791 6792 This lists both regular and local tags. When the -v/--verbose
6792 6793 switch is used, a third column "local" is printed for local tags.
6793 6794 When the -q/--quiet switch is used, only the tag name is printed.
6794 6795
6795 6796 Returns 0 on success.
6796 6797 """
6797 6798
6798 6799 fm = ui.formatter('tags', opts)
6799 6800 hexfunc = fm.hexfunc
6800 6801 tagtype = ""
6801 6802
6802 6803 for t, n in reversed(repo.tagslist()):
6803 6804 hn = hexfunc(n)
6804 6805 label = 'tags.normal'
6805 6806 tagtype = ''
6806 6807 if repo.tagtype(t) == 'local':
6807 6808 label = 'tags.local'
6808 6809 tagtype = 'local'
6809 6810
6810 6811 fm.startitem()
6811 6812 fm.write('tag', '%s', t, label=label)
6812 6813 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
6813 6814 fm.condwrite(not ui.quiet, 'rev node', fmt,
6814 6815 repo.changelog.rev(n), hn, label=label)
6815 6816 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
6816 6817 tagtype, label=label)
6817 6818 fm.plain('\n')
6818 6819 fm.end()
6819 6820
6820 6821 @command('tip',
6821 6822 [('p', 'patch', None, _('show patch')),
6822 6823 ('g', 'git', None, _('use git extended diff format')),
6823 6824 ] + templateopts,
6824 6825 _('[-p] [-g]'))
6825 6826 def tip(ui, repo, **opts):
6826 6827 """show the tip revision (DEPRECATED)
6827 6828
6828 6829 The tip revision (usually just called the tip) is the changeset
6829 6830 most recently added to the repository (and therefore the most
6830 6831 recently changed head).
6831 6832
6832 6833 If you have just made a commit, that commit will be the tip. If
6833 6834 you have just pulled changes from another repository, the tip of
6834 6835 that repository becomes the current tip. The "tip" tag is special
6835 6836 and cannot be renamed or assigned to a different changeset.
6836 6837
6837 6838 This command is deprecated, please use :hg:`heads` instead.
6838 6839
6839 6840 Returns 0 on success.
6840 6841 """
6841 6842 displayer = cmdutil.show_changeset(ui, repo, opts)
6842 6843 displayer.show(repo['tip'])
6843 6844 displayer.close()
6844 6845
6845 6846 @command('unbundle',
6846 6847 [('u', 'update', None,
6847 6848 _('update to new branch head if changesets were unbundled'))],
6848 6849 _('[-u] FILE...'))
6849 6850 def unbundle(ui, repo, fname1, *fnames, **opts):
6850 6851 """apply one or more changegroup files
6851 6852
6852 6853 Apply one or more compressed changegroup files generated by the
6853 6854 bundle command.
6854 6855
6855 6856 Returns 0 on success, 1 if an update has unresolved files.
6856 6857 """
6857 6858 fnames = (fname1,) + fnames
6858 6859
6859 6860 with repo.lock():
6860 6861 for fname in fnames:
6861 6862 f = hg.openpath(ui, fname)
6862 6863 gen = exchange.readbundle(ui, f, fname)
6863 6864 if isinstance(gen, bundle2.unbundle20):
6864 6865 tr = repo.transaction('unbundle')
6865 6866 try:
6866 6867 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
6867 6868 url='bundle:' + fname)
6868 6869 tr.close()
6869 6870 except error.BundleUnknownFeatureError as exc:
6870 6871 raise error.Abort(_('%s: unknown bundle feature, %s')
6871 6872 % (fname, exc),
6872 6873 hint=_("see https://mercurial-scm.org/"
6873 6874 "wiki/BundleFeature for more "
6874 6875 "information"))
6875 6876 finally:
6876 6877 if tr:
6877 6878 tr.release()
6878 6879 changes = [r.get('return', 0)
6879 6880 for r in op.records['changegroup']]
6880 6881 modheads = changegroup.combineresults(changes)
6881 6882 elif isinstance(gen, streamclone.streamcloneapplier):
6882 6883 raise error.Abort(
6883 6884 _('packed bundles cannot be applied with '
6884 6885 '"hg unbundle"'),
6885 6886 hint=_('use "hg debugapplystreamclonebundle"'))
6886 6887 else:
6887 6888 modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname)
6888 6889
6889 6890 return postincoming(ui, repo, modheads, opts.get('update'), None)
6890 6891
6891 6892 @command('^update|up|checkout|co',
6892 6893 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
6893 6894 ('c', 'check', None,
6894 6895 _('update across branches if no uncommitted changes')),
6895 6896 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
6896 6897 ('r', 'rev', '', _('revision'), _('REV'))
6897 6898 ] + mergetoolopts,
6898 6899 _('[-c] [-C] [-d DATE] [[-r] REV]'))
6899 6900 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
6900 6901 tool=None):
6901 6902 """update working directory (or switch revisions)
6902 6903
6903 6904 Update the repository's working directory to the specified
6904 6905 changeset. If no changeset is specified, update to the tip of the
6905 6906 current named branch and move the active bookmark (see :hg:`help
6906 6907 bookmarks`).
6907 6908
6908 6909 Update sets the working directory's parent revision to the specified
6909 6910 changeset (see :hg:`help parents`).
6910 6911
6911 6912 If the changeset is not a descendant or ancestor of the working
6912 6913 directory's parent, the update is aborted. With the -c/--check
6913 6914 option, the working directory is checked for uncommitted changes; if
6914 6915 none are found, the working directory is updated to the specified
6915 6916 changeset.
6916 6917
6917 6918 .. container:: verbose
6918 6919
6919 6920 The following rules apply when the working directory contains
6920 6921 uncommitted changes:
6921 6922
6922 6923 1. If neither -c/--check nor -C/--clean is specified, and if
6923 6924 the requested changeset is an ancestor or descendant of
6924 6925 the working directory's parent, the uncommitted changes
6925 6926 are merged into the requested changeset and the merged
6926 6927 result is left uncommitted. If the requested changeset is
6927 6928 not an ancestor or descendant (that is, it is on another
6928 6929 branch), the update is aborted and the uncommitted changes
6929 6930 are preserved.
6930 6931
6931 6932 2. With the -c/--check option, the update is aborted and the
6932 6933 uncommitted changes are preserved.
6933 6934
6934 6935 3. With the -C/--clean option, uncommitted changes are discarded and
6935 6936 the working directory is updated to the requested changeset.
6936 6937
6937 6938 To cancel an uncommitted merge (and lose your changes), use
6938 6939 :hg:`update --clean .`.
6939 6940
6940 6941 Use null as the changeset to remove the working directory (like
6941 6942 :hg:`clone -U`).
6942 6943
6943 6944 If you want to revert just one file to an older revision, use
6944 6945 :hg:`revert [-r REV] NAME`.
6945 6946
6946 6947 See :hg:`help dates` for a list of formats valid for -d/--date.
6947 6948
6948 6949 Returns 0 on success, 1 if there are unresolved files.
6949 6950 """
6950 6951 movemarkfrom = None
6951 6952 if rev and node:
6952 6953 raise error.Abort(_("please specify just one revision"))
6953 6954
6954 6955 if rev is None or rev == '':
6955 6956 rev = node
6956 6957
6957 6958 with repo.wlock():
6958 6959 cmdutil.clearunfinished(repo)
6959 6960
6960 6961 if date:
6961 6962 if rev is not None:
6962 6963 raise error.Abort(_("you can't specify a revision and a date"))
6963 6964 rev = cmdutil.finddate(ui, repo, date)
6964 6965
6965 6966 # if we defined a bookmark, we have to remember the original name
6966 6967 brev = rev
6967 6968 rev = scmutil.revsingle(repo, rev, rev).rev()
6968 6969
6969 6970 if check and clean:
6970 6971 raise error.Abort(_("cannot specify both -c/--check and -C/--clean")
6971 6972 )
6972 6973
6973 6974 if check:
6974 6975 cmdutil.bailifchanged(repo, merge=False)
6975 6976 if rev is None:
6976 6977 updata = destutil.destupdate(repo, clean=clean, check=check)
6977 6978 rev, movemarkfrom, brev = updata
6978 6979
6979 6980 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
6980 6981
6981 6982 if clean:
6982 6983 ret = hg.clean(repo, rev)
6983 6984 else:
6984 6985 ret = hg.update(repo, rev)
6985 6986
6986 6987 if not ret and movemarkfrom:
6987 6988 if movemarkfrom == repo['.'].node():
6988 6989 pass # no-op update
6989 6990 elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
6990 6991 ui.status(_("updating bookmark %s\n") % repo._activebookmark)
6991 6992 else:
6992 6993 # this can happen with a non-linear update
6993 6994 ui.status(_("(leaving bookmark %s)\n") %
6994 6995 repo._activebookmark)
6995 6996 bookmarks.deactivate(repo)
6996 6997 elif brev in repo._bookmarks:
6997 6998 bookmarks.activate(repo, brev)
6998 6999 ui.status(_("(activating bookmark %s)\n") % brev)
6999 7000 elif brev:
7000 7001 if repo._activebookmark:
7001 7002 ui.status(_("(leaving bookmark %s)\n") %
7002 7003 repo._activebookmark)
7003 7004 bookmarks.deactivate(repo)
7004 7005
7005 7006 return ret
7006 7007
7007 7008 @command('verify', [])
7008 7009 def verify(ui, repo):
7009 7010 """verify the integrity of the repository
7010 7011
7011 7012 Verify the integrity of the current repository.
7012 7013
7013 7014 This will perform an extensive check of the repository's
7014 7015 integrity, validating the hashes and checksums of each entry in
7015 7016 the changelog, manifest, and tracked files, as well as the
7016 7017 integrity of their crosslinks and indices.
7017 7018
7018 7019 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
7019 7020 for more information about recovery from corruption of the
7020 7021 repository.
7021 7022
7022 7023 Returns 0 on success, 1 if errors are encountered.
7023 7024 """
7024 7025 return hg.verify(repo)
7025 7026
7026 7027 @command('version', [], norepo=True)
7027 7028 def version_(ui):
7028 7029 """output version and copyright information"""
7029 7030 ui.write(_("Mercurial Distributed SCM (version %s)\n")
7030 7031 % util.version())
7031 7032 ui.status(_(
7032 7033 "(see https://mercurial-scm.org for more information)\n"
7033 7034 "\nCopyright (C) 2005-2016 Matt Mackall and others\n"
7034 7035 "This is free software; see the source for copying conditions. "
7035 7036 "There is NO\nwarranty; "
7036 7037 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
7037 7038 ))
7038 7039
7039 7040 ui.note(_("\nEnabled extensions:\n\n"))
7040 7041 if ui.verbose:
7041 7042 # format names and versions into columns
7042 7043 names = []
7043 7044 vers = []
7044 7045 place = []
7045 7046 for name, module in extensions.extensions():
7046 7047 names.append(name)
7047 7048 vers.append(extensions.moduleversion(module))
7048 7049 if extensions.ismoduleinternal(module):
7049 7050 place.append(_("internal"))
7050 7051 else:
7051 7052 place.append(_("external"))
7052 7053 if names:
7053 7054 maxnamelen = max(len(n) for n in names)
7054 7055 for i, name in enumerate(names):
7055 7056 ui.write(" %-*s %s %s\n" %
7056 7057 (maxnamelen, name, place[i], vers[i]))
@@ -1,918 +1,918 b''
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from __future__ import absolute_import
10 10
11 11 import errno
12 12 import os
13 13 import shutil
14 14
15 15 from .i18n import _
16 16 from .node import nullid
17 17
18 18 from . import (
19 19 bookmarks,
20 20 bundlerepo,
21 21 cmdutil,
22 22 discovery,
23 23 error,
24 24 exchange,
25 25 extensions,
26 26 httppeer,
27 27 localrepo,
28 28 lock,
29 29 merge as mergemod,
30 30 node,
31 31 phases,
32 32 repoview,
33 33 scmutil,
34 34 sshpeer,
35 35 statichttprepo,
36 36 ui as uimod,
37 37 unionrepo,
38 38 url,
39 39 util,
40 40 verify as verifymod,
41 41 )
42 42
43 43 release = lock.release
44 44
45 45 def _local(path):
46 46 path = util.expandpath(util.urllocalpath(path))
47 47 return (os.path.isfile(path) and bundlerepo or localrepo)
48 48
49 49 def addbranchrevs(lrepo, other, branches, revs):
50 50 peer = other.peer() # a courtesy to callers using a localrepo for other
51 51 hashbranch, branches = branches
52 52 if not hashbranch and not branches:
53 53 x = revs or None
54 54 if util.safehasattr(revs, 'first'):
55 55 y = revs.first()
56 56 elif revs:
57 57 y = revs[0]
58 58 else:
59 59 y = None
60 60 return x, y
61 61 if revs:
62 62 revs = list(revs)
63 63 else:
64 64 revs = []
65 65
66 66 if not peer.capable('branchmap'):
67 67 if branches:
68 68 raise error.Abort(_("remote branch lookup not supported"))
69 69 revs.append(hashbranch)
70 70 return revs, revs[0]
71 71 branchmap = peer.branchmap()
72 72
73 73 def primary(branch):
74 74 if branch == '.':
75 75 if not lrepo:
76 76 raise error.Abort(_("dirstate branch not accessible"))
77 77 branch = lrepo.dirstate.branch()
78 78 if branch in branchmap:
79 79 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
80 80 return True
81 81 else:
82 82 return False
83 83
84 84 for branch in branches:
85 85 if not primary(branch):
86 86 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
87 87 if hashbranch:
88 88 if not primary(hashbranch):
89 89 revs.append(hashbranch)
90 90 return revs, revs[0]
91 91
92 92 def parseurl(path, branches=None):
93 93 '''parse url#branch, returning (url, (branch, branches))'''
94 94
95 95 u = util.url(path)
96 96 branch = None
97 97 if u.fragment:
98 98 branch = u.fragment
99 99 u.fragment = None
100 100 return str(u), (branch, branches or [])
101 101
102 102 schemes = {
103 103 'bundle': bundlerepo,
104 104 'union': unionrepo,
105 105 'file': _local,
106 106 'http': httppeer,
107 107 'https': httppeer,
108 108 'ssh': sshpeer,
109 109 'static-http': statichttprepo,
110 110 }
111 111
112 112 def _peerlookup(path):
113 113 u = util.url(path)
114 114 scheme = u.scheme or 'file'
115 115 thing = schemes.get(scheme) or schemes['file']
116 116 try:
117 117 return thing(path)
118 118 except TypeError:
119 119 # we can't test callable(thing) because 'thing' can be an unloaded
120 120 # module that implements __call__
121 121 if not util.safehasattr(thing, 'instance'):
122 122 raise
123 123 return thing
124 124
125 125 def islocal(repo):
126 126 '''return true if repo (or path pointing to repo) is local'''
127 127 if isinstance(repo, str):
128 128 try:
129 129 return _peerlookup(repo).islocal(repo)
130 130 except AttributeError:
131 131 return False
132 132 return repo.local()
133 133
134 134 def openpath(ui, path):
135 135 '''open path with open if local, url.open if remote'''
136 136 pathurl = util.url(path, parsequery=False, parsefragment=False)
137 137 if pathurl.islocal():
138 138 return util.posixfile(pathurl.localpath(), 'rb')
139 139 else:
140 140 return url.open(ui, path)
141 141
142 142 # a list of (ui, repo) functions called for wire peer initialization
143 143 wirepeersetupfuncs = []
144 144
145 145 def _peerorrepo(ui, path, create=False):
146 146 """return a repository object for the specified path"""
147 147 obj = _peerlookup(path).instance(ui, path, create)
148 148 ui = getattr(obj, "ui", ui)
149 149 for name, module in extensions.extensions(ui):
150 150 hook = getattr(module, 'reposetup', None)
151 151 if hook:
152 152 hook(ui, obj)
153 153 if not obj.local():
154 154 for f in wirepeersetupfuncs:
155 155 f(ui, obj)
156 156 return obj
157 157
158 158 def repository(ui, path='', create=False):
159 159 """return a repository object for the specified path"""
160 160 peer = _peerorrepo(ui, path, create)
161 161 repo = peer.local()
162 162 if not repo:
163 163 raise error.Abort(_("repository '%s' is not local") %
164 164 (path or peer.url()))
165 165 return repo.filtered('visible')
166 166
167 167 def peer(uiorrepo, opts, path, create=False):
168 168 '''return a repository peer for the specified path'''
169 169 rui = remoteui(uiorrepo, opts)
170 170 return _peerorrepo(rui, path, create).peer()
171 171
172 172 def defaultdest(source):
173 173 '''return default destination of clone if none is given
174 174
175 175 >>> defaultdest('foo')
176 176 'foo'
177 177 >>> defaultdest('/foo/bar')
178 178 'bar'
179 179 >>> defaultdest('/')
180 180 ''
181 181 >>> defaultdest('')
182 182 ''
183 183 >>> defaultdest('http://example.org/')
184 184 ''
185 185 >>> defaultdest('http://example.org/foo/')
186 186 'foo'
187 187 '''
188 188 path = util.url(source).path
189 189 if not path:
190 190 return ''
191 191 return os.path.basename(os.path.normpath(path))
192 192
193 193 def share(ui, source, dest=None, update=True, bookmarks=True):
194 194 '''create a shared repository'''
195 195
196 196 if not islocal(source):
197 197 raise error.Abort(_('can only share local repositories'))
198 198
199 199 if not dest:
200 200 dest = defaultdest(source)
201 201 else:
202 202 dest = ui.expandpath(dest)
203 203
204 204 if isinstance(source, str):
205 205 origsource = ui.expandpath(source)
206 206 source, branches = parseurl(origsource)
207 207 srcrepo = repository(ui, source)
208 208 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
209 209 else:
210 210 srcrepo = source.local()
211 211 origsource = source = srcrepo.url()
212 212 checkout = None
213 213
214 214 sharedpath = srcrepo.sharedpath # if our source is already sharing
215 215
216 216 destwvfs = scmutil.vfs(dest, realpath=True)
217 217 destvfs = scmutil.vfs(os.path.join(destwvfs.base, '.hg'), realpath=True)
218 218
219 219 if destvfs.lexists():
220 220 raise error.Abort(_('destination already exists'))
221 221
222 222 if not destwvfs.isdir():
223 223 destwvfs.mkdir()
224 224 destvfs.makedir()
225 225
226 226 requirements = ''
227 227 try:
228 228 requirements = srcrepo.vfs.read('requires')
229 229 except IOError as inst:
230 230 if inst.errno != errno.ENOENT:
231 231 raise
232 232
233 233 requirements += 'shared\n'
234 234 destvfs.write('requires', requirements)
235 235 destvfs.write('sharedpath', sharedpath)
236 236
237 237 r = repository(ui, destwvfs.base)
238 238 postshare(srcrepo, r, bookmarks=bookmarks)
239 239
240 240 if update:
241 241 r.ui.status(_("updating working directory\n"))
242 242 if update is not True:
243 243 checkout = update
244 244 for test in (checkout, 'default', 'tip'):
245 245 if test is None:
246 246 continue
247 247 try:
248 248 uprev = r.lookup(test)
249 249 break
250 250 except error.RepoLookupError:
251 251 continue
252 252 _update(r, uprev)
253 253
254 254 def postshare(sourcerepo, destrepo, bookmarks=True):
255 255 """Called after a new shared repo is created.
256 256
257 257 The new repo only has a requirements file and pointer to the source.
258 258 This function configures additional shared data.
259 259
260 260 Extensions can wrap this function and write additional entries to
261 261 destrepo/.hg/shared to indicate additional pieces of data to be shared.
262 262 """
263 263 default = sourcerepo.ui.config('paths', 'default')
264 264 if default:
265 265 fp = destrepo.vfs("hgrc", "w", text=True)
266 266 fp.write("[paths]\n")
267 267 fp.write("default = %s\n" % default)
268 268 fp.close()
269 269
270 270 if bookmarks:
271 271 fp = destrepo.vfs('shared', 'w')
272 272 fp.write('bookmarks\n')
273 273 fp.close()
274 274
275 275 def copystore(ui, srcrepo, destpath):
276 276 '''copy files from store of srcrepo in destpath
277 277
278 278 returns destlock
279 279 '''
280 280 destlock = None
281 281 try:
282 282 hardlink = None
283 283 num = 0
284 284 closetopic = [None]
285 285 def prog(topic, pos):
286 286 if pos is None:
287 287 closetopic[0] = topic
288 288 else:
289 289 ui.progress(topic, pos + num)
290 290 srcpublishing = srcrepo.publishing()
291 291 srcvfs = scmutil.vfs(srcrepo.sharedpath)
292 292 dstvfs = scmutil.vfs(destpath)
293 293 for f in srcrepo.store.copylist():
294 294 if srcpublishing and f.endswith('phaseroots'):
295 295 continue
296 296 dstbase = os.path.dirname(f)
297 297 if dstbase and not dstvfs.exists(dstbase):
298 298 dstvfs.mkdir(dstbase)
299 299 if srcvfs.exists(f):
300 300 if f.endswith('data'):
301 301 # 'dstbase' may be empty (e.g. revlog format 0)
302 302 lockfile = os.path.join(dstbase, "lock")
303 303 # lock to avoid premature writing to the target
304 304 destlock = lock.lock(dstvfs, lockfile)
305 305 hardlink, n = util.copyfiles(srcvfs.join(f), dstvfs.join(f),
306 306 hardlink, progress=prog)
307 307 num += n
308 308 if hardlink:
309 309 ui.debug("linked %d files\n" % num)
310 310 if closetopic[0]:
311 311 ui.progress(closetopic[0], None)
312 312 else:
313 313 ui.debug("copied %d files\n" % num)
314 314 if closetopic[0]:
315 315 ui.progress(closetopic[0], None)
316 316 return destlock
317 317 except: # re-raises
318 318 release(destlock)
319 319 raise
320 320
321 321 def clonewithshare(ui, peeropts, sharepath, source, srcpeer, dest, pull=False,
322 322 rev=None, update=True, stream=False):
323 323 """Perform a clone using a shared repo.
324 324
325 325 The store for the repository will be located at <sharepath>/.hg. The
326 326 specified revisions will be cloned or pulled from "source". A shared repo
327 327 will be created at "dest" and a working copy will be created if "update" is
328 328 True.
329 329 """
330 330 revs = None
331 331 if rev:
332 332 if not srcpeer.capable('lookup'):
333 333 raise error.Abort(_("src repository does not support "
334 334 "revision lookup and so doesn't "
335 335 "support clone by revision"))
336 336 revs = [srcpeer.lookup(r) for r in rev]
337 337
338 338 basename = os.path.basename(sharepath)
339 339
340 340 if os.path.exists(sharepath):
341 341 ui.status(_('(sharing from existing pooled repository %s)\n') %
342 342 basename)
343 343 else:
344 344 ui.status(_('(sharing from new pooled repository %s)\n') % basename)
345 345 # Always use pull mode because hardlinks in share mode don't work well.
346 346 # Never update because working copies aren't necessary in share mode.
347 347 clone(ui, peeropts, source, dest=sharepath, pull=True,
348 348 rev=rev, update=False, stream=stream)
349 349
350 350 sharerepo = repository(ui, path=sharepath)
351 351 share(ui, sharerepo, dest=dest, update=update, bookmarks=False)
352 352
353 353 # We need to perform a pull against the dest repo to fetch bookmarks
354 354 # and other non-store data that isn't shared by default. In the case of
355 355 # non-existing shared repo, this means we pull from the remote twice. This
356 356 # is a bit weird. But at the time it was implemented, there wasn't an easy
357 357 # way to pull just non-changegroup data.
358 358 destrepo = repository(ui, path=dest)
359 359 exchange.pull(destrepo, srcpeer, heads=revs)
360 360
361 361 return srcpeer, peer(ui, peeropts, dest)
362 362
363 363 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
364 364 update=True, stream=False, branch=None, shareopts=None):
365 365 """Make a copy of an existing repository.
366 366
367 367 Create a copy of an existing repository in a new directory. The
368 368 source and destination are URLs, as passed to the repository
369 369 function. Returns a pair of repository peers, the source and
370 370 newly created destination.
371 371
372 372 The location of the source is added to the new repository's
373 373 .hg/hgrc file, as the default to be used for future pulls and
374 374 pushes.
375 375
376 376 If an exception is raised, the partly cloned/updated destination
377 377 repository will be deleted.
378 378
379 379 Arguments:
380 380
381 381 source: repository object or URL
382 382
383 383 dest: URL of destination repository to create (defaults to base
384 384 name of source repository)
385 385
386 386 pull: always pull from source repository, even in local case or if the
387 387 server prefers streaming
388 388
389 389 stream: stream raw data uncompressed from repository (fast over
390 390 LAN, slow over WAN)
391 391
392 392 rev: revision to clone up to (implies pull=True)
393 393
394 394 update: update working directory after clone completes, if
395 395 destination is local repository (True means update to default rev,
396 396 anything else is treated as a revision)
397 397
398 398 branch: branches to clone
399 399
400 400 shareopts: dict of options to control auto sharing behavior. The "pool" key
401 401 activates auto sharing mode and defines the directory for stores. The
402 402 "mode" key determines how to construct the directory name of the shared
403 403 repository. "identity" means the name is derived from the node of the first
404 404 changeset in the repository. "remote" means the name is derived from the
405 405 remote's path/URL. Defaults to "identity."
406 406 """
407 407
408 408 if isinstance(source, str):
409 409 origsource = ui.expandpath(source)
410 410 source, branch = parseurl(origsource, branch)
411 411 srcpeer = peer(ui, peeropts, source)
412 412 else:
413 413 srcpeer = source.peer() # in case we were called with a localrepo
414 414 branch = (None, branch or [])
415 415 origsource = source = srcpeer.url()
416 416 rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)
417 417
418 418 if dest is None:
419 419 dest = defaultdest(source)
420 420 if dest:
421 421 ui.status(_("destination directory: %s\n") % dest)
422 422 else:
423 423 dest = ui.expandpath(dest)
424 424
425 425 dest = util.urllocalpath(dest)
426 426 source = util.urllocalpath(source)
427 427
428 428 if not dest:
429 429 raise error.Abort(_("empty destination path is not valid"))
430 430
431 431 destvfs = scmutil.vfs(dest, expandpath=True)
432 432 if destvfs.lexists():
433 433 if not destvfs.isdir():
434 434 raise error.Abort(_("destination '%s' already exists") % dest)
435 435 elif destvfs.listdir():
436 436 raise error.Abort(_("destination '%s' is not empty") % dest)
437 437
438 438 shareopts = shareopts or {}
439 439 sharepool = shareopts.get('pool')
440 440 sharenamemode = shareopts.get('mode')
441 441 if sharepool and islocal(dest):
442 442 sharepath = None
443 443 if sharenamemode == 'identity':
444 444 # Resolve the name from the initial changeset in the remote
445 445 # repository. This returns nullid when the remote is empty. It
446 446 # raises RepoLookupError if revision 0 is filtered or otherwise
447 447 # not available. If we fail to resolve, sharing is not enabled.
448 448 try:
449 449 rootnode = srcpeer.lookup('0')
450 450 if rootnode != node.nullid:
451 451 sharepath = os.path.join(sharepool, node.hex(rootnode))
452 452 else:
453 453 ui.status(_('(not using pooled storage: '
454 454 'remote appears to be empty)\n'))
455 455 except error.RepoLookupError:
456 456 ui.status(_('(not using pooled storage: '
457 457 'unable to resolve identity of remote)\n'))
458 458 elif sharenamemode == 'remote':
459 459 sharepath = os.path.join(sharepool, util.sha1(source).hexdigest())
460 460 else:
461 461 raise error.Abort('unknown share naming mode: %s' % sharenamemode)
462 462
463 463 if sharepath:
464 464 return clonewithshare(ui, peeropts, sharepath, source, srcpeer,
465 465 dest, pull=pull, rev=rev, update=update,
466 466 stream=stream)
467 467
468 468 srclock = destlock = cleandir = None
469 469 srcrepo = srcpeer.local()
470 470 try:
471 471 abspath = origsource
472 472 if islocal(origsource):
473 473 abspath = os.path.abspath(util.urllocalpath(origsource))
474 474
475 475 if islocal(dest):
476 476 cleandir = dest
477 477
478 478 copy = False
479 479 if (srcrepo and srcrepo.cancopy() and islocal(dest)
480 480 and not phases.hassecret(srcrepo)):
481 481 copy = not pull and not rev
482 482
483 483 if copy:
484 484 try:
485 485 # we use a lock here because if we race with commit, we
486 486 # can end up with extra data in the cloned revlogs that's
487 487 # not pointed to by changesets, thus causing verify to
488 488 # fail
489 489 srclock = srcrepo.lock(wait=False)
490 490 except error.LockError:
491 491 copy = False
492 492
493 493 if copy:
494 494 srcrepo.hook('preoutgoing', throw=True, source='clone')
495 495 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
496 496 if not os.path.exists(dest):
497 497 os.mkdir(dest)
498 498 else:
499 499 # only clean up directories we create ourselves
500 500 cleandir = hgdir
501 501 try:
502 502 destpath = hgdir
503 503 util.makedir(destpath, notindexed=True)
504 504 except OSError as inst:
505 505 if inst.errno == errno.EEXIST:
506 506 cleandir = None
507 507 raise error.Abort(_("destination '%s' already exists")
508 508 % dest)
509 509 raise
510 510
511 511 destlock = copystore(ui, srcrepo, destpath)
512 512 # copy bookmarks over
513 513 srcbookmarks = srcrepo.join('bookmarks')
514 514 dstbookmarks = os.path.join(destpath, 'bookmarks')
515 515 if os.path.exists(srcbookmarks):
516 516 util.copyfile(srcbookmarks, dstbookmarks)
517 517
518 518 # Recomputing branch cache might be slow on big repos,
519 519 # so just copy it
520 520 def copybranchcache(fname):
521 521 srcbranchcache = srcrepo.join('cache/%s' % fname)
522 522 dstbranchcache = os.path.join(dstcachedir, fname)
523 523 if os.path.exists(srcbranchcache):
524 524 if not os.path.exists(dstcachedir):
525 525 os.mkdir(dstcachedir)
526 526 util.copyfile(srcbranchcache, dstbranchcache)
527 527
528 528 dstcachedir = os.path.join(destpath, 'cache')
529 529 # In local clones we're copying all nodes, not just served
530 530 # ones. Therefore copy all branch caches over.
531 531 copybranchcache('branch2')
532 532 for cachename in repoview.filtertable:
533 533 copybranchcache('branch2-%s' % cachename)
534 534
535 535 # we need to re-init the repo after manually copying the data
536 536 # into it
537 537 destpeer = peer(srcrepo, peeropts, dest)
538 538 srcrepo.hook('outgoing', source='clone',
539 539 node=node.hex(node.nullid))
540 540 else:
541 541 try:
542 542 destpeer = peer(srcrepo or ui, peeropts, dest, create=True)
543 543 # only pass ui when no srcrepo
544 544 except OSError as inst:
545 545 if inst.errno == errno.EEXIST:
546 546 cleandir = None
547 547 raise error.Abort(_("destination '%s' already exists")
548 548 % dest)
549 549 raise
550 550
551 551 revs = None
552 552 if rev:
553 553 if not srcpeer.capable('lookup'):
554 554 raise error.Abort(_("src repository does not support "
555 555 "revision lookup and so doesn't "
556 556 "support clone by revision"))
557 557 revs = [srcpeer.lookup(r) for r in rev]
558 558 checkout = revs[0]
559 559 local = destpeer.local()
560 560 if local:
561 561 if not stream:
562 562 if pull:
563 563 stream = False
564 564 else:
565 565 stream = None
566 566 # internal config: ui.quietbookmarkmove
567 567 quiet = local.ui.backupconfig('ui', 'quietbookmarkmove')
568 568 try:
569 569 local.ui.setconfig(
570 570 'ui', 'quietbookmarkmove', True, 'clone')
571 571 exchange.pull(local, srcpeer, revs,
572 572 streamclonerequested=stream)
573 573 finally:
574 574 local.ui.restoreconfig(quiet)
575 575 elif srcrepo:
576 576 exchange.push(srcrepo, destpeer, revs=revs,
577 577 bookmarks=srcrepo._bookmarks.keys())
578 578 else:
579 579 raise error.Abort(_("clone from remote to remote not supported")
580 580 )
581 581
582 582 cleandir = None
583 583
584 584 destrepo = destpeer.local()
585 585 if destrepo:
586 586 template = uimod.samplehgrcs['cloned']
587 587 fp = destrepo.vfs("hgrc", "w", text=True)
588 588 u = util.url(abspath)
589 589 u.passwd = None
590 590 defaulturl = str(u)
591 591 fp.write(template % defaulturl)
592 592 fp.close()
593 593
594 594 destrepo.ui.setconfig('paths', 'default', defaulturl, 'clone')
595 595
596 596 if update:
597 597 if update is not True:
598 598 checkout = srcpeer.lookup(update)
599 599 uprev = None
600 600 status = None
601 601 if checkout is not None:
602 602 try:
603 603 uprev = destrepo.lookup(checkout)
604 604 except error.RepoLookupError:
605 605 if update is not True:
606 606 try:
607 607 uprev = destrepo.lookup(update)
608 608 except error.RepoLookupError:
609 609 pass
610 610 if uprev is None:
611 611 try:
612 612 uprev = destrepo._bookmarks['@']
613 613 update = '@'
614 614 bn = destrepo[uprev].branch()
615 615 if bn == 'default':
616 616 status = _("updating to bookmark @\n")
617 617 else:
618 618 status = (_("updating to bookmark @ on branch %s\n")
619 619 % bn)
620 620 except KeyError:
621 621 try:
622 622 uprev = destrepo.branchtip('default')
623 623 except error.RepoLookupError:
624 624 uprev = destrepo.lookup('tip')
625 625 if not status:
626 626 bn = destrepo[uprev].branch()
627 627 status = _("updating to branch %s\n") % bn
628 628 destrepo.ui.status(status)
629 629 _update(destrepo, uprev)
630 630 if update in destrepo._bookmarks:
631 631 bookmarks.activate(destrepo, update)
632 632 finally:
633 633 release(srclock, destlock)
634 634 if cleandir is not None:
635 635 shutil.rmtree(cleandir, True)
636 636 if srcpeer is not None:
637 637 srcpeer.close()
638 638 return srcpeer, destpeer
639 639
640 640 def _showstats(repo, stats, quietempty=False):
641 641 if quietempty and not any(stats):
642 642 return
643 643 repo.ui.status(_("%d files updated, %d files merged, "
644 644 "%d files removed, %d files unresolved\n") % stats)
645 645
646 646 def updaterepo(repo, node, overwrite):
647 647 """Update the working directory to node.
648 648
649 649 When overwrite is set, changes are clobbered, merged else
650 650
651 651 returns stats (see pydoc mercurial.merge.applyupdates)"""
652 652 return mergemod.update(repo, node, False, overwrite,
653 653 labels=['working copy', 'destination'])
654 654
655 655 def update(repo, node, quietempty=False):
656 656 """update the working directory to node, merging linear changes"""
657 657 stats = updaterepo(repo, node, False)
658 658 _showstats(repo, stats, quietempty)
659 659 if stats[3]:
660 660 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
661 661 return stats[3] > 0
662 662
663 663 # naming conflict in clone()
664 664 _update = update
665 665
666 666 def clean(repo, node, show_stats=True, quietempty=False):
667 667 """forcibly switch the working directory to node, clobbering changes"""
668 668 stats = updaterepo(repo, node, True)
669 669 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
670 670 if show_stats:
671 671 _showstats(repo, stats, quietempty)
672 672 return stats[3] > 0
673 673
674 def merge(repo, node, force=None, remind=True):
674 def merge(repo, node, force=None, remind=True, mergeforce=False):
675 675 """Branch merge with node, resolving changes. Return true if any
676 676 unresolved conflicts."""
677 stats = mergemod.update(repo, node, True, force)
677 stats = mergemod.update(repo, node, True, force, mergeforce=mergeforce)
678 678 _showstats(repo, stats)
679 679 if stats[3]:
680 680 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
681 681 "or 'hg update -C .' to abandon\n"))
682 682 elif remind:
683 683 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
684 684 return stats[3] > 0
685 685
686 686 def _incoming(displaychlist, subreporecurse, ui, repo, source,
687 687 opts, buffered=False):
688 688 """
689 689 Helper for incoming / gincoming.
690 690 displaychlist gets called with
691 691 (remoterepo, incomingchangesetlist, displayer) parameters,
692 692 and is supposed to contain only code that can't be unified.
693 693 """
694 694 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
695 695 other = peer(repo, opts, source)
696 696 ui.status(_('comparing with %s\n') % util.hidepassword(source))
697 697 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
698 698
699 699 if revs:
700 700 revs = [other.lookup(rev) for rev in revs]
701 701 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
702 702 revs, opts["bundle"], opts["force"])
703 703 try:
704 704 if not chlist:
705 705 ui.status(_("no changes found\n"))
706 706 return subreporecurse()
707 707
708 708 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
709 709 displaychlist(other, chlist, displayer)
710 710 displayer.close()
711 711 finally:
712 712 cleanupfn()
713 713 subreporecurse()
714 714 return 0 # exit code is zero since we found incoming changes
715 715
716 716 def incoming(ui, repo, source, opts):
717 717 def subreporecurse():
718 718 ret = 1
719 719 if opts.get('subrepos'):
720 720 ctx = repo[None]
721 721 for subpath in sorted(ctx.substate):
722 722 sub = ctx.sub(subpath)
723 723 ret = min(ret, sub.incoming(ui, source, opts))
724 724 return ret
725 725
726 726 def display(other, chlist, displayer):
727 727 limit = cmdutil.loglimit(opts)
728 728 if opts.get('newest_first'):
729 729 chlist.reverse()
730 730 count = 0
731 731 for n in chlist:
732 732 if limit is not None and count >= limit:
733 733 break
734 734 parents = [p for p in other.changelog.parents(n) if p != nullid]
735 735 if opts.get('no_merges') and len(parents) == 2:
736 736 continue
737 737 count += 1
738 738 displayer.show(other[n])
739 739 return _incoming(display, subreporecurse, ui, repo, source, opts)
740 740
741 741 def _outgoing(ui, repo, dest, opts):
742 742 dest = ui.expandpath(dest or 'default-push', dest or 'default')
743 743 dest, branches = parseurl(dest, opts.get('branch'))
744 744 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
745 745 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
746 746 if revs:
747 747 revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
748 748
749 749 other = peer(repo, opts, dest)
750 750 outgoing = discovery.findcommonoutgoing(repo.unfiltered(), other, revs,
751 751 force=opts.get('force'))
752 752 o = outgoing.missing
753 753 if not o:
754 754 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
755 755 return o, other
756 756
757 757 def outgoing(ui, repo, dest, opts):
758 758 def recurse():
759 759 ret = 1
760 760 if opts.get('subrepos'):
761 761 ctx = repo[None]
762 762 for subpath in sorted(ctx.substate):
763 763 sub = ctx.sub(subpath)
764 764 ret = min(ret, sub.outgoing(ui, dest, opts))
765 765 return ret
766 766
767 767 limit = cmdutil.loglimit(opts)
768 768 o, other = _outgoing(ui, repo, dest, opts)
769 769 if not o:
770 770 cmdutil.outgoinghooks(ui, repo, other, opts, o)
771 771 return recurse()
772 772
773 773 if opts.get('newest_first'):
774 774 o.reverse()
775 775 displayer = cmdutil.show_changeset(ui, repo, opts)
776 776 count = 0
777 777 for n in o:
778 778 if limit is not None and count >= limit:
779 779 break
780 780 parents = [p for p in repo.changelog.parents(n) if p != nullid]
781 781 if opts.get('no_merges') and len(parents) == 2:
782 782 continue
783 783 count += 1
784 784 displayer.show(repo[n])
785 785 displayer.close()
786 786 cmdutil.outgoinghooks(ui, repo, other, opts, o)
787 787 recurse()
788 788 return 0 # exit code is zero since we found outgoing changes
789 789
790 790 def verify(repo):
791 791 """verify the consistency of a repository"""
792 792 ret = verifymod.verify(repo)
793 793
794 794 # Broken subrepo references in hidden csets don't seem worth worrying about,
795 795 # since they can't be pushed/pulled, and --hidden can be used if they are a
796 796 # concern.
797 797
798 798 # pathto() is needed for -R case
799 799 revs = repo.revs("filelog(%s)",
800 800 util.pathto(repo.root, repo.getcwd(), '.hgsubstate'))
801 801
802 802 if revs:
803 803 repo.ui.status(_('checking subrepo links\n'))
804 804 for rev in revs:
805 805 ctx = repo[rev]
806 806 try:
807 807 for subpath in ctx.substate:
808 808 ret = ctx.sub(subpath).verify() or ret
809 809 except Exception:
810 810 repo.ui.warn(_('.hgsubstate is corrupt in revision %s\n') %
811 811 node.short(ctx.node()))
812 812
813 813 return ret
814 814
815 815 def remoteui(src, opts):
816 816 'build a remote ui from ui or repo and opts'
817 817 if util.safehasattr(src, 'baseui'): # looks like a repository
818 818 dst = src.baseui.copy() # drop repo-specific config
819 819 src = src.ui # copy target options from repo
820 820 else: # assume it's a global ui object
821 821 dst = src.copy() # keep all global options
822 822
823 823 # copy ssh-specific options
824 824 for o in 'ssh', 'remotecmd':
825 825 v = opts.get(o) or src.config('ui', o)
826 826 if v:
827 827 dst.setconfig("ui", o, v, 'copied')
828 828
829 829 # copy bundle-specific options
830 830 r = src.config('bundle', 'mainreporoot')
831 831 if r:
832 832 dst.setconfig('bundle', 'mainreporoot', r, 'copied')
833 833
834 834 # copy selected local settings to the remote ui
835 835 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
836 836 for key, val in src.configitems(sect):
837 837 dst.setconfig(sect, key, val, 'copied')
838 838 v = src.config('web', 'cacerts')
839 839 if v == '!':
840 840 dst.setconfig('web', 'cacerts', v, 'copied')
841 841 elif v:
842 842 dst.setconfig('web', 'cacerts', util.expandpath(v), 'copied')
843 843
844 844 return dst
845 845
846 846 # Files of interest
847 847 # Used to check if the repository has changed looking at mtime and size of
848 848 # these files.
849 849 foi = [('spath', '00changelog.i'),
850 850 ('spath', 'phaseroots'), # ! phase can change content at the same size
851 851 ('spath', 'obsstore'),
852 852 ('path', 'bookmarks'), # ! bookmark can change content at the same size
853 853 ]
854 854
855 855 class cachedlocalrepo(object):
856 856 """Holds a localrepository that can be cached and reused."""
857 857
858 858 def __init__(self, repo):
859 859 """Create a new cached repo from an existing repo.
860 860
861 861 We assume the passed in repo was recently created. If the
862 862 repo has changed between when it was created and when it was
863 863 turned into a cache, it may not refresh properly.
864 864 """
865 865 assert isinstance(repo, localrepo.localrepository)
866 866 self._repo = repo
867 867 self._state, self.mtime = self._repostate()
868 868
869 869 def fetch(self):
870 870 """Refresh (if necessary) and return a repository.
871 871
872 872 If the cached instance is out of date, it will be recreated
873 873 automatically and returned.
874 874
875 875 Returns a tuple of the repo and a boolean indicating whether a new
876 876 repo instance was created.
877 877 """
878 878 # We compare the mtimes and sizes of some well-known files to
879 879 # determine if the repo changed. This is not precise, as mtimes
880 880 # are susceptible to clock skew and imprecise filesystems and
881 881 # file content can change while maintaining the same size.
882 882
883 883 state, mtime = self._repostate()
884 884 if state == self._state:
885 885 return self._repo, False
886 886
887 887 self._repo = repository(self._repo.baseui, self._repo.url())
888 888 self._state = state
889 889 self.mtime = mtime
890 890
891 891 return self._repo, True
892 892
893 893 def _repostate(self):
894 894 state = []
895 895 maxmtime = -1
896 896 for attr, fname in foi:
897 897 prefix = getattr(self._repo, attr)
898 898 p = os.path.join(prefix, fname)
899 899 try:
900 900 st = os.stat(p)
901 901 except OSError:
902 902 st = os.stat(prefix)
903 903 state.append((st.st_mtime, st.st_size))
904 904 maxmtime = max(maxmtime, st.st_mtime)
905 905
906 906 return tuple(state), maxmtime
907 907
908 908 def copy(self):
909 909 """Obtain a copy of this class instance.
910 910
911 911 A new localrepository instance is obtained. The new instance should be
912 912 completely independent of the original.
913 913 """
914 914 repo = repository(self._repo.baseui, self._repo.origroot)
915 915 c = cachedlocalrepo(repo)
916 916 c._state = self._state
917 917 c.mtime = self.mtime
918 918 return c
@@ -1,1617 +1,1620 b''
1 1 # merge.py - directory-level update/merge handling for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import os
12 12 import shutil
13 13 import struct
14 14
15 15 from .i18n import _
16 16 from .node import (
17 17 bin,
18 18 hex,
19 19 nullhex,
20 20 nullid,
21 21 nullrev,
22 22 )
23 23 from . import (
24 24 copies,
25 25 destutil,
26 26 error,
27 27 filemerge,
28 28 obsolete,
29 29 scmutil,
30 30 subrepo,
31 31 util,
32 32 worker,
33 33 )
34 34
35 35 _pack = struct.pack
36 36 _unpack = struct.unpack
37 37
38 38 def _droponode(data):
39 39 # used for compatibility for v1
40 40 bits = data.split('\0')
41 41 bits = bits[:-2] + bits[-1:]
42 42 return '\0'.join(bits)
43 43
44 44 class mergestate(object):
45 45 '''track 3-way merge state of individual files
46 46
47 47 The merge state is stored on disk when needed. Two files are used: one with
48 48 an old format (version 1), and one with a new format (version 2). Version 2
49 49 stores a superset of the data in version 1, including new kinds of records
50 50 in the future. For more about the new format, see the documentation for
51 51 `_readrecordsv2`.
52 52
53 53 Each record can contain arbitrary content, and has an associated type. This
54 54 `type` should be a letter. If `type` is uppercase, the record is mandatory:
55 55 versions of Mercurial that don't support it should abort. If `type` is
56 56 lowercase, the record can be safely ignored.
57 57
58 58 Currently known records:
59 59
60 60 L: the node of the "local" part of the merge (hexified version)
61 61 O: the node of the "other" part of the merge (hexified version)
62 62 F: a file to be merged entry
63 63 C: a change/delete or delete/change conflict
64 64 D: a file that the external merge driver will merge internally
65 65 (experimental)
66 66 m: the external merge driver defined for this merge plus its run state
67 67 (experimental)
68 68 f: a (filename, dictonary) tuple of optional values for a given file
69 69 X: unsupported mandatory record type (used in tests)
70 70 x: unsupported advisory record type (used in tests)
71 71
72 72 Merge driver run states (experimental):
73 73 u: driver-resolved files unmarked -- needs to be run next time we're about
74 74 to resolve or commit
75 75 m: driver-resolved files marked -- only needs to be run before commit
76 76 s: success/skipped -- does not need to be run any more
77 77
78 78 '''
79 79 statepathv1 = 'merge/state'
80 80 statepathv2 = 'merge/state2'
81 81
82 82 @staticmethod
83 83 def clean(repo, node=None, other=None):
84 84 """Initialize a brand new merge state, removing any existing state on
85 85 disk."""
86 86 ms = mergestate(repo)
87 87 ms.reset(node, other)
88 88 return ms
89 89
90 90 @staticmethod
91 91 def read(repo):
92 92 """Initialize the merge state, reading it from disk."""
93 93 ms = mergestate(repo)
94 94 ms._read()
95 95 return ms
96 96
97 97 def __init__(self, repo):
98 98 """Initialize the merge state.
99 99
100 100 Do not use this directly! Instead call read() or clean()."""
101 101 self._repo = repo
102 102 self._dirty = False
103 103
104 104 def reset(self, node=None, other=None):
105 105 self._state = {}
106 106 self._stateextras = {}
107 107 self._local = None
108 108 self._other = None
109 109 for var in ('localctx', 'otherctx'):
110 110 if var in vars(self):
111 111 delattr(self, var)
112 112 if node:
113 113 self._local = node
114 114 self._other = other
115 115 self._readmergedriver = None
116 116 if self.mergedriver:
117 117 self._mdstate = 's'
118 118 else:
119 119 self._mdstate = 'u'
120 120 shutil.rmtree(self._repo.join('merge'), True)
121 121 self._results = {}
122 122 self._dirty = False
123 123
124 124 def _read(self):
125 125 """Analyse each record content to restore a serialized state from disk
126 126
127 127 This function process "record" entry produced by the de-serialization
128 128 of on disk file.
129 129 """
130 130 self._state = {}
131 131 self._stateextras = {}
132 132 self._local = None
133 133 self._other = None
134 134 for var in ('localctx', 'otherctx'):
135 135 if var in vars(self):
136 136 delattr(self, var)
137 137 self._readmergedriver = None
138 138 self._mdstate = 's'
139 139 unsupported = set()
140 140 records = self._readrecords()
141 141 for rtype, record in records:
142 142 if rtype == 'L':
143 143 self._local = bin(record)
144 144 elif rtype == 'O':
145 145 self._other = bin(record)
146 146 elif rtype == 'm':
147 147 bits = record.split('\0', 1)
148 148 mdstate = bits[1]
149 149 if len(mdstate) != 1 or mdstate not in 'ums':
150 150 # the merge driver should be idempotent, so just rerun it
151 151 mdstate = 'u'
152 152
153 153 self._readmergedriver = bits[0]
154 154 self._mdstate = mdstate
155 155 elif rtype in 'FDC':
156 156 bits = record.split('\0')
157 157 self._state[bits[0]] = bits[1:]
158 158 elif rtype == 'f':
159 159 filename, rawextras = record.split('\0', 1)
160 160 extraparts = rawextras.split('\0')
161 161 extras = {}
162 162 i = 0
163 163 while i < len(extraparts):
164 164 extras[extraparts[i]] = extraparts[i + 1]
165 165 i += 2
166 166
167 167 self._stateextras[filename] = extras
168 168 elif not rtype.islower():
169 169 unsupported.add(rtype)
170 170 self._results = {}
171 171 self._dirty = False
172 172
173 173 if unsupported:
174 174 raise error.UnsupportedMergeRecords(unsupported)
175 175
176 176 def _readrecords(self):
177 177 """Read merge state from disk and return a list of record (TYPE, data)
178 178
179 179 We read data from both v1 and v2 files and decide which one to use.
180 180
181 181 V1 has been used by version prior to 2.9.1 and contains less data than
182 182 v2. We read both versions and check if no data in v2 contradicts
183 183 v1. If there is not contradiction we can safely assume that both v1
184 184 and v2 were written at the same time and use the extract data in v2. If
185 185 there is contradiction we ignore v2 content as we assume an old version
186 186 of Mercurial has overwritten the mergestate file and left an old v2
187 187 file around.
188 188
189 189 returns list of record [(TYPE, data), ...]"""
190 190 v1records = self._readrecordsv1()
191 191 v2records = self._readrecordsv2()
192 192 if self._v1v2match(v1records, v2records):
193 193 return v2records
194 194 else:
195 195 # v1 file is newer than v2 file, use it
196 196 # we have to infer the "other" changeset of the merge
197 197 # we cannot do better than that with v1 of the format
198 198 mctx = self._repo[None].parents()[-1]
199 199 v1records.append(('O', mctx.hex()))
200 200 # add place holder "other" file node information
201 201 # nobody is using it yet so we do no need to fetch the data
202 202 # if mctx was wrong `mctx[bits[-2]]` may fails.
203 203 for idx, r in enumerate(v1records):
204 204 if r[0] == 'F':
205 205 bits = r[1].split('\0')
206 206 bits.insert(-2, '')
207 207 v1records[idx] = (r[0], '\0'.join(bits))
208 208 return v1records
209 209
210 210 def _v1v2match(self, v1records, v2records):
211 211 oldv2 = set() # old format version of v2 record
212 212 for rec in v2records:
213 213 if rec[0] == 'L':
214 214 oldv2.add(rec)
215 215 elif rec[0] == 'F':
216 216 # drop the onode data (not contained in v1)
217 217 oldv2.add(('F', _droponode(rec[1])))
218 218 for rec in v1records:
219 219 if rec not in oldv2:
220 220 return False
221 221 else:
222 222 return True
223 223
224 224 def _readrecordsv1(self):
225 225 """read on disk merge state for version 1 file
226 226
227 227 returns list of record [(TYPE, data), ...]
228 228
229 229 Note: the "F" data from this file are one entry short
230 230 (no "other file node" entry)
231 231 """
232 232 records = []
233 233 try:
234 234 f = self._repo.vfs(self.statepathv1)
235 235 for i, l in enumerate(f):
236 236 if i == 0:
237 237 records.append(('L', l[:-1]))
238 238 else:
239 239 records.append(('F', l[:-1]))
240 240 f.close()
241 241 except IOError as err:
242 242 if err.errno != errno.ENOENT:
243 243 raise
244 244 return records
245 245
246 246 def _readrecordsv2(self):
247 247 """read on disk merge state for version 2 file
248 248
249 249 This format is a list of arbitrary records of the form:
250 250
251 251 [type][length][content]
252 252
253 253 `type` is a single character, `length` is a 4 byte integer, and
254 254 `content` is an arbitrary byte sequence of length `length`.
255 255
256 256 Mercurial versions prior to 3.7 have a bug where if there are
257 257 unsupported mandatory merge records, attempting to clear out the merge
258 258 state with hg update --clean or similar aborts. The 't' record type
259 259 works around that by writing out what those versions treat as an
260 260 advisory record, but later versions interpret as special: the first
261 261 character is the 'real' record type and everything onwards is the data.
262 262
263 263 Returns list of records [(TYPE, data), ...]."""
264 264 records = []
265 265 try:
266 266 f = self._repo.vfs(self.statepathv2)
267 267 data = f.read()
268 268 off = 0
269 269 end = len(data)
270 270 while off < end:
271 271 rtype = data[off]
272 272 off += 1
273 273 length = _unpack('>I', data[off:(off + 4)])[0]
274 274 off += 4
275 275 record = data[off:(off + length)]
276 276 off += length
277 277 if rtype == 't':
278 278 rtype, record = record[0], record[1:]
279 279 records.append((rtype, record))
280 280 f.close()
281 281 except IOError as err:
282 282 if err.errno != errno.ENOENT:
283 283 raise
284 284 return records
285 285
286 286 @util.propertycache
287 287 def mergedriver(self):
288 288 # protect against the following:
289 289 # - A configures a malicious merge driver in their hgrc, then
290 290 # pauses the merge
291 291 # - A edits their hgrc to remove references to the merge driver
292 292 # - A gives a copy of their entire repo, including .hg, to B
293 293 # - B inspects .hgrc and finds it to be clean
294 294 # - B then continues the merge and the malicious merge driver
295 295 # gets invoked
296 296 configmergedriver = self._repo.ui.config('experimental', 'mergedriver')
297 297 if (self._readmergedriver is not None
298 298 and self._readmergedriver != configmergedriver):
299 299 raise error.ConfigError(
300 300 _("merge driver changed since merge started"),
301 301 hint=_("revert merge driver change or abort merge"))
302 302
303 303 return configmergedriver
304 304
305 305 @util.propertycache
306 306 def localctx(self):
307 307 if self._local is None:
308 308 raise RuntimeError("localctx accessed but self._local isn't set")
309 309 return self._repo[self._local]
310 310
311 311 @util.propertycache
312 312 def otherctx(self):
313 313 if self._other is None:
314 314 raise RuntimeError("localctx accessed but self._local isn't set")
315 315 return self._repo[self._other]
316 316
317 317 def active(self):
318 318 """Whether mergestate is active.
319 319
320 320 Returns True if there appears to be mergestate. This is a rough proxy
321 321 for "is a merge in progress."
322 322 """
323 323 # Check local variables before looking at filesystem for performance
324 324 # reasons.
325 325 return bool(self._local) or bool(self._state) or \
326 326 self._repo.vfs.exists(self.statepathv1) or \
327 327 self._repo.vfs.exists(self.statepathv2)
328 328
329 329 def commit(self):
330 330 """Write current state on disk (if necessary)"""
331 331 if self._dirty:
332 332 records = self._makerecords()
333 333 self._writerecords(records)
334 334 self._dirty = False
335 335
336 336 def _makerecords(self):
337 337 records = []
338 338 records.append(('L', hex(self._local)))
339 339 records.append(('O', hex(self._other)))
340 340 if self.mergedriver:
341 341 records.append(('m', '\0'.join([
342 342 self.mergedriver, self._mdstate])))
343 343 for d, v in self._state.iteritems():
344 344 if v[0] == 'd':
345 345 records.append(('D', '\0'.join([d] + v)))
346 346 # v[1] == local ('cd'), v[6] == other ('dc') -- not supported by
347 347 # older versions of Mercurial
348 348 elif v[1] == nullhex or v[6] == nullhex:
349 349 records.append(('C', '\0'.join([d] + v)))
350 350 else:
351 351 records.append(('F', '\0'.join([d] + v)))
352 352 for filename, extras in sorted(self._stateextras.iteritems()):
353 353 rawextras = '\0'.join('%s\0%s' % (k, v) for k, v in
354 354 extras.iteritems())
355 355 records.append(('f', '%s\0%s' % (filename, rawextras)))
356 356 return records
357 357
358 358 def _writerecords(self, records):
359 359 """Write current state on disk (both v1 and v2)"""
360 360 self._writerecordsv1(records)
361 361 self._writerecordsv2(records)
362 362
363 363 def _writerecordsv1(self, records):
364 364 """Write current state on disk in a version 1 file"""
365 365 f = self._repo.vfs(self.statepathv1, 'w')
366 366 irecords = iter(records)
367 367 lrecords = irecords.next()
368 368 assert lrecords[0] == 'L'
369 369 f.write(hex(self._local) + '\n')
370 370 for rtype, data in irecords:
371 371 if rtype == 'F':
372 372 f.write('%s\n' % _droponode(data))
373 373 f.close()
374 374
375 375 def _writerecordsv2(self, records):
376 376 """Write current state on disk in a version 2 file
377 377
378 378 See the docstring for _readrecordsv2 for why we use 't'."""
379 379 # these are the records that all version 2 clients can read
380 380 whitelist = 'LOF'
381 381 f = self._repo.vfs(self.statepathv2, 'w')
382 382 for key, data in records:
383 383 assert len(key) == 1
384 384 if key not in whitelist:
385 385 key, data = 't', '%s%s' % (key, data)
386 386 format = '>sI%is' % len(data)
387 387 f.write(_pack(format, key, len(data), data))
388 388 f.close()
389 389
390 390 def add(self, fcl, fco, fca, fd):
391 391 """add a new (potentially?) conflicting file the merge state
392 392 fcl: file context for local,
393 393 fco: file context for remote,
394 394 fca: file context for ancestors,
395 395 fd: file path of the resulting merge.
396 396
397 397 note: also write the local version to the `.hg/merge` directory.
398 398 """
399 399 if fcl.isabsent():
400 400 hash = nullhex
401 401 else:
402 402 hash = util.sha1(fcl.path()).hexdigest()
403 403 self._repo.vfs.write('merge/' + hash, fcl.data())
404 404 self._state[fd] = ['u', hash, fcl.path(),
405 405 fca.path(), hex(fca.filenode()),
406 406 fco.path(), hex(fco.filenode()),
407 407 fcl.flags()]
408 408 self._stateextras[fd] = { 'ancestorlinknode' : hex(fca.node()) }
409 409 self._dirty = True
410 410
411 411 def __contains__(self, dfile):
412 412 return dfile in self._state
413 413
414 414 def __getitem__(self, dfile):
415 415 return self._state[dfile][0]
416 416
417 417 def __iter__(self):
418 418 return iter(sorted(self._state))
419 419
420 420 def files(self):
421 421 return self._state.keys()
422 422
423 423 def mark(self, dfile, state):
424 424 self._state[dfile][0] = state
425 425 self._dirty = True
426 426
427 427 def mdstate(self):
428 428 return self._mdstate
429 429
430 430 def unresolved(self):
431 431 """Obtain the paths of unresolved files."""
432 432
433 433 for f, entry in self._state.items():
434 434 if entry[0] == 'u':
435 435 yield f
436 436
437 437 def driverresolved(self):
438 438 """Obtain the paths of driver-resolved files."""
439 439
440 440 for f, entry in self._state.items():
441 441 if entry[0] == 'd':
442 442 yield f
443 443
444 444 def extras(self, filename):
445 445 return self._stateextras.setdefault(filename, {})
446 446
447 447 def _resolve(self, preresolve, dfile, wctx, labels=None):
448 448 """rerun merge process for file path `dfile`"""
449 449 if self[dfile] in 'rd':
450 450 return True, 0
451 451 stateentry = self._state[dfile]
452 452 state, hash, lfile, afile, anode, ofile, onode, flags = stateentry
453 453 octx = self._repo[self._other]
454 454 extras = self.extras(dfile)
455 455 anccommitnode = extras.get('ancestorlinknode')
456 456 if anccommitnode:
457 457 actx = self._repo[anccommitnode]
458 458 else:
459 459 actx = None
460 460 fcd = self._filectxorabsent(hash, wctx, dfile)
461 461 fco = self._filectxorabsent(onode, octx, ofile)
462 462 # TODO: move this to filectxorabsent
463 463 fca = self._repo.filectx(afile, fileid=anode, changeid=actx)
464 464 # "premerge" x flags
465 465 flo = fco.flags()
466 466 fla = fca.flags()
467 467 if 'x' in flags + flo + fla and 'l' not in flags + flo + fla:
468 468 if fca.node() == nullid:
469 469 if preresolve:
470 470 self._repo.ui.warn(
471 471 _('warning: cannot merge flags for %s\n') % afile)
472 472 elif flags == fla:
473 473 flags = flo
474 474 if preresolve:
475 475 # restore local
476 476 if hash != nullhex:
477 477 f = self._repo.vfs('merge/' + hash)
478 478 self._repo.wwrite(dfile, f.read(), flags)
479 479 f.close()
480 480 else:
481 481 self._repo.wvfs.unlinkpath(dfile, ignoremissing=True)
482 482 complete, r, deleted = filemerge.premerge(self._repo, self._local,
483 483 lfile, fcd, fco, fca,
484 484 labels=labels)
485 485 else:
486 486 complete, r, deleted = filemerge.filemerge(self._repo, self._local,
487 487 lfile, fcd, fco, fca,
488 488 labels=labels)
489 489 if r is None:
490 490 # no real conflict
491 491 del self._state[dfile]
492 492 self._stateextras.pop(dfile, None)
493 493 self._dirty = True
494 494 elif not r:
495 495 self.mark(dfile, 'r')
496 496
497 497 if complete:
498 498 action = None
499 499 if deleted:
500 500 if fcd.isabsent():
501 501 # dc: local picked. Need to drop if present, which may
502 502 # happen on re-resolves.
503 503 action = 'f'
504 504 else:
505 505 # cd: remote picked (or otherwise deleted)
506 506 action = 'r'
507 507 else:
508 508 if fcd.isabsent(): # dc: remote picked
509 509 action = 'g'
510 510 elif fco.isabsent(): # cd: local picked
511 511 if dfile in self.localctx:
512 512 action = 'am'
513 513 else:
514 514 action = 'a'
515 515 # else: regular merges (no action necessary)
516 516 self._results[dfile] = r, action
517 517
518 518 return complete, r
519 519
520 520 def _filectxorabsent(self, hexnode, ctx, f):
521 521 if hexnode == nullhex:
522 522 return filemerge.absentfilectx(ctx, f)
523 523 else:
524 524 return ctx[f]
525 525
526 526 def preresolve(self, dfile, wctx, labels=None):
527 527 """run premerge process for dfile
528 528
529 529 Returns whether the merge is complete, and the exit code."""
530 530 return self._resolve(True, dfile, wctx, labels=labels)
531 531
532 532 def resolve(self, dfile, wctx, labels=None):
533 533 """run merge process (assuming premerge was run) for dfile
534 534
535 535 Returns the exit code of the merge."""
536 536 return self._resolve(False, dfile, wctx, labels=labels)[1]
537 537
538 538 def counts(self):
539 539 """return counts for updated, merged and removed files in this
540 540 session"""
541 541 updated, merged, removed = 0, 0, 0
542 542 for r, action in self._results.itervalues():
543 543 if r is None:
544 544 updated += 1
545 545 elif r == 0:
546 546 if action == 'r':
547 547 removed += 1
548 548 else:
549 549 merged += 1
550 550 return updated, merged, removed
551 551
552 552 def unresolvedcount(self):
553 553 """get unresolved count for this merge (persistent)"""
554 554 return len([True for f, entry in self._state.iteritems()
555 555 if entry[0] == 'u'])
556 556
557 557 def actions(self):
558 558 """return lists of actions to perform on the dirstate"""
559 559 actions = {'r': [], 'f': [], 'a': [], 'am': [], 'g': []}
560 560 for f, (r, action) in self._results.iteritems():
561 561 if action is not None:
562 562 actions[action].append((f, None, "merge result"))
563 563 return actions
564 564
565 565 def recordactions(self):
566 566 """record remove/add/get actions in the dirstate"""
567 567 branchmerge = self._repo.dirstate.p2() != nullid
568 568 recordupdates(self._repo, self.actions(), branchmerge)
569 569
570 570 def queueremove(self, f):
571 571 """queues a file to be removed from the dirstate
572 572
573 573 Meant for use by custom merge drivers."""
574 574 self._results[f] = 0, 'r'
575 575
576 576 def queueadd(self, f):
577 577 """queues a file to be added to the dirstate
578 578
579 579 Meant for use by custom merge drivers."""
580 580 self._results[f] = 0, 'a'
581 581
582 582 def queueget(self, f):
583 583 """queues a file to be marked modified in the dirstate
584 584
585 585 Meant for use by custom merge drivers."""
586 586 self._results[f] = 0, 'g'
587 587
588 588 def _getcheckunknownconfig(repo, section, name):
589 589 config = repo.ui.config(section, name, default='abort')
590 590 valid = ['abort', 'ignore', 'warn']
591 591 if config not in valid:
592 592 validstr = ', '.join(["'" + v + "'" for v in valid])
593 593 raise error.ConfigError(_("%s.%s not valid "
594 594 "('%s' is none of %s)")
595 595 % (section, name, config, validstr))
596 596 return config
597 597
598 598 def _checkunknownfile(repo, wctx, mctx, f, f2=None):
599 599 if f2 is None:
600 600 f2 = f
601 601 return (repo.wvfs.isfileorlink(f)
602 602 and repo.wvfs.audit.check(f)
603 603 and repo.dirstate.normalize(f) not in repo.dirstate
604 604 and mctx[f2].cmp(wctx[f]))
605 605
606 def _checkunknownfiles(repo, wctx, mctx, force, actions):
606 def _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce):
607 607 """
608 608 Considers any actions that care about the presence of conflicting unknown
609 609 files. For some actions, the result is to abort; for others, it is to
610 610 choose a different action.
611 611 """
612 612 conflicts = set()
613 613 warnconflicts = set()
614 614 abortconflicts = set()
615 615 unknownconfig = _getcheckunknownconfig(repo, 'merge', 'checkunknown')
616 616 ignoredconfig = _getcheckunknownconfig(repo, 'merge', 'checkignored')
617 617 if not force:
618 618 def collectconflicts(conflicts, config):
619 619 if config == 'abort':
620 620 abortconflicts.update(conflicts)
621 621 elif config == 'warn':
622 622 warnconflicts.update(conflicts)
623 623
624 624 for f, (m, args, msg) in actions.iteritems():
625 625 if m in ('c', 'dc'):
626 626 if _checkunknownfile(repo, wctx, mctx, f):
627 627 conflicts.add(f)
628 628 elif m == 'dg':
629 629 if _checkunknownfile(repo, wctx, mctx, f, args[0]):
630 630 conflicts.add(f)
631 631
632 632 ignoredconflicts = set([c for c in conflicts
633 633 if repo.dirstate._ignore(c)])
634 634 unknownconflicts = conflicts - ignoredconflicts
635 635 collectconflicts(ignoredconflicts, ignoredconfig)
636 636 collectconflicts(unknownconflicts, unknownconfig)
637 637
638 638 for f in sorted(abortconflicts):
639 639 repo.ui.warn(_("%s: untracked file differs\n") % f)
640 640 if abortconflicts:
641 641 raise error.Abort(_("untracked files in working directory "
642 642 "differ from files in requested revision"))
643 643
644 644 for f in sorted(warnconflicts):
645 645 repo.ui.warn(_("%s: replacing untracked file\n") % f)
646 646
647 647 for f, (m, args, msg) in actions.iteritems():
648 648 backup = f in conflicts
649 649 if m == 'c':
650 650 flags, = args
651 651 actions[f] = ('g', (flags, backup), msg)
652 652 elif m == 'cm':
653 653 fl2, anc = args
654 654 different = _checkunknownfile(repo, wctx, mctx, f)
655 655 if different:
656 656 actions[f] = ('m', (f, f, None, False, anc),
657 657 "remote differs from untracked local")
658 658 else:
659 659 actions[f] = ('g', (fl2, backup), "remote created")
660 660
661 661 def _forgetremoved(wctx, mctx, branchmerge):
662 662 """
663 663 Forget removed files
664 664
665 665 If we're jumping between revisions (as opposed to merging), and if
666 666 neither the working directory nor the target rev has the file,
667 667 then we need to remove it from the dirstate, to prevent the
668 668 dirstate from listing the file when it is no longer in the
669 669 manifest.
670 670
671 671 If we're merging, and the other revision has removed a file
672 672 that is not present in the working directory, we need to mark it
673 673 as removed.
674 674 """
675 675
676 676 actions = {}
677 677 m = 'f'
678 678 if branchmerge:
679 679 m = 'r'
680 680 for f in wctx.deleted():
681 681 if f not in mctx:
682 682 actions[f] = m, None, "forget deleted"
683 683
684 684 if not branchmerge:
685 685 for f in wctx.removed():
686 686 if f not in mctx:
687 687 actions[f] = 'f', None, "forget removed"
688 688
689 689 return actions
690 690
691 691 def _checkcollision(repo, wmf, actions):
692 692 # build provisional merged manifest up
693 693 pmmf = set(wmf)
694 694
695 695 if actions:
696 696 # k, dr, e and rd are no-op
697 697 for m in 'a', 'am', 'f', 'g', 'cd', 'dc':
698 698 for f, args, msg in actions[m]:
699 699 pmmf.add(f)
700 700 for f, args, msg in actions['r']:
701 701 pmmf.discard(f)
702 702 for f, args, msg in actions['dm']:
703 703 f2, flags = args
704 704 pmmf.discard(f2)
705 705 pmmf.add(f)
706 706 for f, args, msg in actions['dg']:
707 707 pmmf.add(f)
708 708 for f, args, msg in actions['m']:
709 709 f1, f2, fa, move, anc = args
710 710 if move:
711 711 pmmf.discard(f1)
712 712 pmmf.add(f)
713 713
714 714 # check case-folding collision in provisional merged manifest
715 715 foldmap = {}
716 716 for f in sorted(pmmf):
717 717 fold = util.normcase(f)
718 718 if fold in foldmap:
719 719 raise error.Abort(_("case-folding collision between %s and %s")
720 720 % (f, foldmap[fold]))
721 721 foldmap[fold] = f
722 722
723 723 # check case-folding of directories
724 724 foldprefix = unfoldprefix = lastfull = ''
725 725 for fold, f in sorted(foldmap.items()):
726 726 if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
727 727 # the folded prefix matches but actual casing is different
728 728 raise error.Abort(_("case-folding collision between "
729 729 "%s and directory of %s") % (lastfull, f))
730 730 foldprefix = fold + '/'
731 731 unfoldprefix = f + '/'
732 732 lastfull = f
733 733
734 734 def driverpreprocess(repo, ms, wctx, labels=None):
735 735 """run the preprocess step of the merge driver, if any
736 736
737 737 This is currently not implemented -- it's an extension point."""
738 738 return True
739 739
740 740 def driverconclude(repo, ms, wctx, labels=None):
741 741 """run the conclude step of the merge driver, if any
742 742
743 743 This is currently not implemented -- it's an extension point."""
744 744 return True
745 745
746 746 def manifestmerge(repo, wctx, p2, pa, branchmerge, force, matcher,
747 747 acceptremote, followcopies):
748 748 """
749 749 Merge p1 and p2 with ancestor pa and generate merge action list
750 750
751 751 branchmerge and force are as passed in to update
752 752 matcher = matcher to filter file lists
753 753 acceptremote = accept the incoming changes without prompting
754 754 """
755 755 if matcher is not None and matcher.always():
756 756 matcher = None
757 757
758 758 copy, movewithdir, diverge, renamedelete = {}, {}, {}, {}
759 759
760 760 # manifests fetched in order are going to be faster, so prime the caches
761 761 [x.manifest() for x in
762 762 sorted(wctx.parents() + [p2, pa], key=lambda x: x.rev())]
763 763
764 764 if followcopies:
765 765 ret = copies.mergecopies(repo, wctx, p2, pa)
766 766 copy, movewithdir, diverge, renamedelete = ret
767 767
768 768 repo.ui.note(_("resolving manifests\n"))
769 769 repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
770 770 % (bool(branchmerge), bool(force), bool(matcher)))
771 771 repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
772 772
773 773 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
774 774 copied = set(copy.values())
775 775 copied.update(movewithdir.values())
776 776
777 777 if '.hgsubstate' in m1:
778 778 # check whether sub state is modified
779 779 for s in sorted(wctx.substate):
780 780 if wctx.sub(s).dirty():
781 781 m1['.hgsubstate'] += '+'
782 782 break
783 783
784 784 # Compare manifests
785 785 if matcher is not None:
786 786 m1 = m1.matches(matcher)
787 787 m2 = m2.matches(matcher)
788 788 diff = m1.diff(m2)
789 789
790 790 actions = {}
791 791 for f, ((n1, fl1), (n2, fl2)) in diff.iteritems():
792 792 if n1 and n2: # file exists on both local and remote side
793 793 if f not in ma:
794 794 fa = copy.get(f, None)
795 795 if fa is not None:
796 796 actions[f] = ('m', (f, f, fa, False, pa.node()),
797 797 "both renamed from " + fa)
798 798 else:
799 799 actions[f] = ('m', (f, f, None, False, pa.node()),
800 800 "both created")
801 801 else:
802 802 a = ma[f]
803 803 fla = ma.flags(f)
804 804 nol = 'l' not in fl1 + fl2 + fla
805 805 if n2 == a and fl2 == fla:
806 806 actions[f] = ('k' , (), "remote unchanged")
807 807 elif n1 == a and fl1 == fla: # local unchanged - use remote
808 808 if n1 == n2: # optimization: keep local content
809 809 actions[f] = ('e', (fl2,), "update permissions")
810 810 else:
811 811 actions[f] = ('g', (fl2, False), "remote is newer")
812 812 elif nol and n2 == a: # remote only changed 'x'
813 813 actions[f] = ('e', (fl2,), "update permissions")
814 814 elif nol and n1 == a: # local only changed 'x'
815 815 actions[f] = ('g', (fl1, False), "remote is newer")
816 816 else: # both changed something
817 817 actions[f] = ('m', (f, f, f, False, pa.node()),
818 818 "versions differ")
819 819 elif n1: # file exists only on local side
820 820 if f in copied:
821 821 pass # we'll deal with it on m2 side
822 822 elif f in movewithdir: # directory rename, move local
823 823 f2 = movewithdir[f]
824 824 if f2 in m2:
825 825 actions[f2] = ('m', (f, f2, None, True, pa.node()),
826 826 "remote directory rename, both created")
827 827 else:
828 828 actions[f2] = ('dm', (f, fl1),
829 829 "remote directory rename - move from " + f)
830 830 elif f in copy:
831 831 f2 = copy[f]
832 832 actions[f] = ('m', (f, f2, f2, False, pa.node()),
833 833 "local copied/moved from " + f2)
834 834 elif f in ma: # clean, a different, no remote
835 835 if n1 != ma[f]:
836 836 if acceptremote:
837 837 actions[f] = ('r', None, "remote delete")
838 838 else:
839 839 actions[f] = ('cd', (f, None, f, False, pa.node()),
840 840 "prompt changed/deleted")
841 841 elif n1[20:] == 'a':
842 842 # This extra 'a' is added by working copy manifest to mark
843 843 # the file as locally added. We should forget it instead of
844 844 # deleting it.
845 845 actions[f] = ('f', None, "remote deleted")
846 846 else:
847 847 actions[f] = ('r', None, "other deleted")
848 848 elif n2: # file exists only on remote side
849 849 if f in copied:
850 850 pass # we'll deal with it on m1 side
851 851 elif f in movewithdir:
852 852 f2 = movewithdir[f]
853 853 if f2 in m1:
854 854 actions[f2] = ('m', (f2, f, None, False, pa.node()),
855 855 "local directory rename, both created")
856 856 else:
857 857 actions[f2] = ('dg', (f, fl2),
858 858 "local directory rename - get from " + f)
859 859 elif f in copy:
860 860 f2 = copy[f]
861 861 if f2 in m2:
862 862 actions[f] = ('m', (f2, f, f2, False, pa.node()),
863 863 "remote copied from " + f2)
864 864 else:
865 865 actions[f] = ('m', (f2, f, f2, True, pa.node()),
866 866 "remote moved from " + f2)
867 867 elif f not in ma:
868 868 # local unknown, remote created: the logic is described by the
869 869 # following table:
870 870 #
871 871 # force branchmerge different | action
872 872 # n * * | create
873 873 # y n * | create
874 874 # y y n | create
875 875 # y y y | merge
876 876 #
877 877 # Checking whether the files are different is expensive, so we
878 878 # don't do that when we can avoid it.
879 879 if not force:
880 880 actions[f] = ('c', (fl2,), "remote created")
881 881 elif not branchmerge:
882 882 actions[f] = ('c', (fl2,), "remote created")
883 883 else:
884 884 actions[f] = ('cm', (fl2, pa.node()),
885 885 "remote created, get or merge")
886 886 elif n2 != ma[f]:
887 887 if acceptremote:
888 888 actions[f] = ('c', (fl2,), "remote recreating")
889 889 else:
890 890 actions[f] = ('dc', (None, f, f, False, pa.node()),
891 891 "prompt deleted/changed")
892 892
893 893 return actions, diverge, renamedelete
894 894
895 895 def _resolvetrivial(repo, wctx, mctx, ancestor, actions):
896 896 """Resolves false conflicts where the nodeid changed but the content
897 897 remained the same."""
898 898
899 899 for f, (m, args, msg) in actions.items():
900 900 if m == 'cd' and f in ancestor and not wctx[f].cmp(ancestor[f]):
901 901 # local did change but ended up with same content
902 902 actions[f] = 'r', None, "prompt same"
903 903 elif m == 'dc' and f in ancestor and not mctx[f].cmp(ancestor[f]):
904 904 # remote did change but ended up with same content
905 905 del actions[f] # don't get = keep local deleted
906 906
907 907 def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force,
908 acceptremote, followcopies, matcher=None):
908 acceptremote, followcopies, matcher=None,
909 mergeforce=False):
909 910 "Calculate the actions needed to merge mctx into wctx using ancestors"
910 911 if len(ancestors) == 1: # default
911 912 actions, diverge, renamedelete = manifestmerge(
912 913 repo, wctx, mctx, ancestors[0], branchmerge, force, matcher,
913 914 acceptremote, followcopies)
914 _checkunknownfiles(repo, wctx, mctx, force, actions)
915 _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
915 916
916 917 else: # only when merge.preferancestor=* - the default
917 918 repo.ui.note(
918 919 _("note: merging %s and %s using bids from ancestors %s\n") %
919 920 (wctx, mctx, _(' and ').join(str(anc) for anc in ancestors)))
920 921
921 922 # Call for bids
922 923 fbids = {} # mapping filename to bids (action method to list af actions)
923 924 diverge, renamedelete = None, None
924 925 for ancestor in ancestors:
925 926 repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
926 927 actions, diverge1, renamedelete1 = manifestmerge(
927 928 repo, wctx, mctx, ancestor, branchmerge, force, matcher,
928 929 acceptremote, followcopies)
929 _checkunknownfiles(repo, wctx, mctx, force, actions)
930 _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
930 931
931 932 # Track the shortest set of warning on the theory that bid
932 933 # merge will correctly incorporate more information
933 934 if diverge is None or len(diverge1) < len(diverge):
934 935 diverge = diverge1
935 936 if renamedelete is None or len(renamedelete) < len(renamedelete1):
936 937 renamedelete = renamedelete1
937 938
938 939 for f, a in sorted(actions.iteritems()):
939 940 m, args, msg = a
940 941 repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m))
941 942 if f in fbids:
942 943 d = fbids[f]
943 944 if m in d:
944 945 d[m].append(a)
945 946 else:
946 947 d[m] = [a]
947 948 else:
948 949 fbids[f] = {m: [a]}
949 950
950 951 # Pick the best bid for each file
951 952 repo.ui.note(_('\nauction for merging merge bids\n'))
952 953 actions = {}
953 954 for f, bids in sorted(fbids.items()):
954 955 # bids is a mapping from action method to list af actions
955 956 # Consensus?
956 957 if len(bids) == 1: # all bids are the same kind of method
957 958 m, l = bids.items()[0]
958 959 if all(a == l[0] for a in l[1:]): # len(bids) is > 1
959 960 repo.ui.note(" %s: consensus for %s\n" % (f, m))
960 961 actions[f] = l[0]
961 962 continue
962 963 # If keep is an option, just do it.
963 964 if 'k' in bids:
964 965 repo.ui.note(" %s: picking 'keep' action\n" % f)
965 966 actions[f] = bids['k'][0]
966 967 continue
967 968 # If there are gets and they all agree [how could they not?], do it.
968 969 if 'g' in bids:
969 970 ga0 = bids['g'][0]
970 971 if all(a == ga0 for a in bids['g'][1:]):
971 972 repo.ui.note(" %s: picking 'get' action\n" % f)
972 973 actions[f] = ga0
973 974 continue
974 975 # TODO: Consider other simple actions such as mode changes
975 976 # Handle inefficient democrazy.
976 977 repo.ui.note(_(' %s: multiple bids for merge action:\n') % f)
977 978 for m, l in sorted(bids.items()):
978 979 for _f, args, msg in l:
979 980 repo.ui.note(' %s -> %s\n' % (msg, m))
980 981 # Pick random action. TODO: Instead, prompt user when resolving
981 982 m, l = bids.items()[0]
982 983 repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
983 984 (f, m))
984 985 actions[f] = l[0]
985 986 continue
986 987 repo.ui.note(_('end of auction\n\n'))
987 988
988 989 _resolvetrivial(repo, wctx, mctx, ancestors[0], actions)
989 990
990 991 if wctx.rev() is None:
991 992 fractions = _forgetremoved(wctx, mctx, branchmerge)
992 993 actions.update(fractions)
993 994
994 995 return actions, diverge, renamedelete
995 996
996 997 def batchremove(repo, actions):
997 998 """apply removes to the working directory
998 999
999 1000 yields tuples for progress updates
1000 1001 """
1001 1002 verbose = repo.ui.verbose
1002 1003 unlink = util.unlinkpath
1003 1004 wjoin = repo.wjoin
1004 1005 audit = repo.wvfs.audit
1005 1006 i = 0
1006 1007 for f, args, msg in actions:
1007 1008 repo.ui.debug(" %s: %s -> r\n" % (f, msg))
1008 1009 if verbose:
1009 1010 repo.ui.note(_("removing %s\n") % f)
1010 1011 audit(f)
1011 1012 try:
1012 1013 unlink(wjoin(f), ignoremissing=True)
1013 1014 except OSError as inst:
1014 1015 repo.ui.warn(_("update failed to remove %s: %s!\n") %
1015 1016 (f, inst.strerror))
1016 1017 if i == 100:
1017 1018 yield i, f
1018 1019 i = 0
1019 1020 i += 1
1020 1021 if i > 0:
1021 1022 yield i, f
1022 1023
1023 1024 def batchget(repo, mctx, actions):
1024 1025 """apply gets to the working directory
1025 1026
1026 1027 mctx is the context to get from
1027 1028
1028 1029 yields tuples for progress updates
1029 1030 """
1030 1031 verbose = repo.ui.verbose
1031 1032 fctx = mctx.filectx
1032 1033 wwrite = repo.wwrite
1033 1034 ui = repo.ui
1034 1035 i = 0
1035 1036 for f, (flags, backup), msg in actions:
1036 1037 repo.ui.debug(" %s: %s -> g\n" % (f, msg))
1037 1038 if verbose:
1038 1039 repo.ui.note(_("getting %s\n") % f)
1039 1040
1040 1041 if backup:
1041 1042 absf = repo.wjoin(f)
1042 1043 orig = scmutil.origpath(ui, repo, absf)
1043 1044 try:
1044 1045 # TODO Mercurial has always aborted if an untracked directory
1045 1046 # is replaced by a tracked file, or generally with
1046 1047 # file/directory merges. This needs to be sorted out.
1047 1048 if repo.wvfs.isfileorlink(f):
1048 1049 util.rename(absf, orig)
1049 1050 except OSError as e:
1050 1051 if e.errno != errno.ENOENT:
1051 1052 raise
1052 1053
1053 1054 wwrite(f, fctx(f).data(), flags)
1054 1055 if i == 100:
1055 1056 yield i, f
1056 1057 i = 0
1057 1058 i += 1
1058 1059 if i > 0:
1059 1060 yield i, f
1060 1061
1061 1062 def applyupdates(repo, actions, wctx, mctx, overwrite, labels=None):
1062 1063 """apply the merge action list to the working directory
1063 1064
1064 1065 wctx is the working copy context
1065 1066 mctx is the context to be merged into the working copy
1066 1067
1067 1068 Return a tuple of counts (updated, merged, removed, unresolved) that
1068 1069 describes how many files were affected by the update.
1069 1070 """
1070 1071
1071 1072 updated, merged, removed = 0, 0, 0
1072 1073 ms = mergestate.clean(repo, wctx.p1().node(), mctx.node())
1073 1074 moves = []
1074 1075 for m, l in actions.items():
1075 1076 l.sort()
1076 1077
1077 1078 # 'cd' and 'dc' actions are treated like other merge conflicts
1078 1079 mergeactions = sorted(actions['cd'])
1079 1080 mergeactions.extend(sorted(actions['dc']))
1080 1081 mergeactions.extend(actions['m'])
1081 1082 for f, args, msg in mergeactions:
1082 1083 f1, f2, fa, move, anc = args
1083 1084 if f == '.hgsubstate': # merged internally
1084 1085 continue
1085 1086 if f1 is None:
1086 1087 fcl = filemerge.absentfilectx(wctx, fa)
1087 1088 else:
1088 1089 repo.ui.debug(" preserving %s for resolve of %s\n" % (f1, f))
1089 1090 fcl = wctx[f1]
1090 1091 if f2 is None:
1091 1092 fco = filemerge.absentfilectx(mctx, fa)
1092 1093 else:
1093 1094 fco = mctx[f2]
1094 1095 actx = repo[anc]
1095 1096 if fa in actx:
1096 1097 fca = actx[fa]
1097 1098 else:
1098 1099 # TODO: move to absentfilectx
1099 1100 fca = repo.filectx(f1, fileid=nullrev)
1100 1101 ms.add(fcl, fco, fca, f)
1101 1102 if f1 != f and move:
1102 1103 moves.append(f1)
1103 1104
1104 1105 audit = repo.wvfs.audit
1105 1106 _updating = _('updating')
1106 1107 _files = _('files')
1107 1108 progress = repo.ui.progress
1108 1109
1109 1110 # remove renamed files after safely stored
1110 1111 for f in moves:
1111 1112 if os.path.lexists(repo.wjoin(f)):
1112 1113 repo.ui.debug("removing %s\n" % f)
1113 1114 audit(f)
1114 1115 util.unlinkpath(repo.wjoin(f))
1115 1116
1116 1117 numupdates = sum(len(l) for m, l in actions.items() if m != 'k')
1117 1118
1118 1119 if [a for a in actions['r'] if a[0] == '.hgsubstate']:
1119 1120 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
1120 1121
1121 1122 # remove in parallel (must come first)
1122 1123 z = 0
1123 1124 prog = worker.worker(repo.ui, 0.001, batchremove, (repo,), actions['r'])
1124 1125 for i, item in prog:
1125 1126 z += i
1126 1127 progress(_updating, z, item=item, total=numupdates, unit=_files)
1127 1128 removed = len(actions['r'])
1128 1129
1129 1130 # get in parallel
1130 1131 prog = worker.worker(repo.ui, 0.001, batchget, (repo, mctx), actions['g'])
1131 1132 for i, item in prog:
1132 1133 z += i
1133 1134 progress(_updating, z, item=item, total=numupdates, unit=_files)
1134 1135 updated = len(actions['g'])
1135 1136
1136 1137 if [a for a in actions['g'] if a[0] == '.hgsubstate']:
1137 1138 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
1138 1139
1139 1140 # forget (manifest only, just log it) (must come first)
1140 1141 for f, args, msg in actions['f']:
1141 1142 repo.ui.debug(" %s: %s -> f\n" % (f, msg))
1142 1143 z += 1
1143 1144 progress(_updating, z, item=f, total=numupdates, unit=_files)
1144 1145
1145 1146 # re-add (manifest only, just log it)
1146 1147 for f, args, msg in actions['a']:
1147 1148 repo.ui.debug(" %s: %s -> a\n" % (f, msg))
1148 1149 z += 1
1149 1150 progress(_updating, z, item=f, total=numupdates, unit=_files)
1150 1151
1151 1152 # re-add/mark as modified (manifest only, just log it)
1152 1153 for f, args, msg in actions['am']:
1153 1154 repo.ui.debug(" %s: %s -> am\n" % (f, msg))
1154 1155 z += 1
1155 1156 progress(_updating, z, item=f, total=numupdates, unit=_files)
1156 1157
1157 1158 # keep (noop, just log it)
1158 1159 for f, args, msg in actions['k']:
1159 1160 repo.ui.debug(" %s: %s -> k\n" % (f, msg))
1160 1161 # no progress
1161 1162
1162 1163 # directory rename, move local
1163 1164 for f, args, msg in actions['dm']:
1164 1165 repo.ui.debug(" %s: %s -> dm\n" % (f, msg))
1165 1166 z += 1
1166 1167 progress(_updating, z, item=f, total=numupdates, unit=_files)
1167 1168 f0, flags = args
1168 1169 repo.ui.note(_("moving %s to %s\n") % (f0, f))
1169 1170 audit(f)
1170 1171 repo.wwrite(f, wctx.filectx(f0).data(), flags)
1171 1172 util.unlinkpath(repo.wjoin(f0))
1172 1173 updated += 1
1173 1174
1174 1175 # local directory rename, get
1175 1176 for f, args, msg in actions['dg']:
1176 1177 repo.ui.debug(" %s: %s -> dg\n" % (f, msg))
1177 1178 z += 1
1178 1179 progress(_updating, z, item=f, total=numupdates, unit=_files)
1179 1180 f0, flags = args
1180 1181 repo.ui.note(_("getting %s to %s\n") % (f0, f))
1181 1182 repo.wwrite(f, mctx.filectx(f0).data(), flags)
1182 1183 updated += 1
1183 1184
1184 1185 # exec
1185 1186 for f, args, msg in actions['e']:
1186 1187 repo.ui.debug(" %s: %s -> e\n" % (f, msg))
1187 1188 z += 1
1188 1189 progress(_updating, z, item=f, total=numupdates, unit=_files)
1189 1190 flags, = args
1190 1191 audit(f)
1191 1192 util.setflags(repo.wjoin(f), 'l' in flags, 'x' in flags)
1192 1193 updated += 1
1193 1194
1194 1195 # the ordering is important here -- ms.mergedriver will raise if the merge
1195 1196 # driver has changed, and we want to be able to bypass it when overwrite is
1196 1197 # True
1197 1198 usemergedriver = not overwrite and mergeactions and ms.mergedriver
1198 1199
1199 1200 if usemergedriver:
1200 1201 ms.commit()
1201 1202 proceed = driverpreprocess(repo, ms, wctx, labels=labels)
1202 1203 # the driver might leave some files unresolved
1203 1204 unresolvedf = set(ms.unresolved())
1204 1205 if not proceed:
1205 1206 # XXX setting unresolved to at least 1 is a hack to make sure we
1206 1207 # error out
1207 1208 return updated, merged, removed, max(len(unresolvedf), 1)
1208 1209 newactions = []
1209 1210 for f, args, msg in mergeactions:
1210 1211 if f in unresolvedf:
1211 1212 newactions.append((f, args, msg))
1212 1213 mergeactions = newactions
1213 1214
1214 1215 # premerge
1215 1216 tocomplete = []
1216 1217 for f, args, msg in mergeactions:
1217 1218 repo.ui.debug(" %s: %s -> m (premerge)\n" % (f, msg))
1218 1219 z += 1
1219 1220 progress(_updating, z, item=f, total=numupdates, unit=_files)
1220 1221 if f == '.hgsubstate': # subrepo states need updating
1221 1222 subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
1222 1223 overwrite)
1223 1224 continue
1224 1225 audit(f)
1225 1226 complete, r = ms.preresolve(f, wctx, labels=labels)
1226 1227 if not complete:
1227 1228 numupdates += 1
1228 1229 tocomplete.append((f, args, msg))
1229 1230
1230 1231 # merge
1231 1232 for f, args, msg in tocomplete:
1232 1233 repo.ui.debug(" %s: %s -> m (merge)\n" % (f, msg))
1233 1234 z += 1
1234 1235 progress(_updating, z, item=f, total=numupdates, unit=_files)
1235 1236 ms.resolve(f, wctx, labels=labels)
1236 1237
1237 1238 ms.commit()
1238 1239
1239 1240 unresolved = ms.unresolvedcount()
1240 1241
1241 1242 if usemergedriver and not unresolved and ms.mdstate() != 's':
1242 1243 if not driverconclude(repo, ms, wctx, labels=labels):
1243 1244 # XXX setting unresolved to at least 1 is a hack to make sure we
1244 1245 # error out
1245 1246 unresolved = max(unresolved, 1)
1246 1247
1247 1248 ms.commit()
1248 1249
1249 1250 msupdated, msmerged, msremoved = ms.counts()
1250 1251 updated += msupdated
1251 1252 merged += msmerged
1252 1253 removed += msremoved
1253 1254
1254 1255 extraactions = ms.actions()
1255 1256 for k, acts in extraactions.iteritems():
1256 1257 actions[k].extend(acts)
1257 1258
1258 1259 progress(_updating, None, total=numupdates, unit=_files)
1259 1260
1260 1261 return updated, merged, removed, unresolved
1261 1262
1262 1263 def recordupdates(repo, actions, branchmerge):
1263 1264 "record merge actions to the dirstate"
1264 1265 # remove (must come first)
1265 1266 for f, args, msg in actions.get('r', []):
1266 1267 if branchmerge:
1267 1268 repo.dirstate.remove(f)
1268 1269 else:
1269 1270 repo.dirstate.drop(f)
1270 1271
1271 1272 # forget (must come first)
1272 1273 for f, args, msg in actions.get('f', []):
1273 1274 repo.dirstate.drop(f)
1274 1275
1275 1276 # re-add
1276 1277 for f, args, msg in actions.get('a', []):
1277 1278 repo.dirstate.add(f)
1278 1279
1279 1280 # re-add/mark as modified
1280 1281 for f, args, msg in actions.get('am', []):
1281 1282 if branchmerge:
1282 1283 repo.dirstate.normallookup(f)
1283 1284 else:
1284 1285 repo.dirstate.add(f)
1285 1286
1286 1287 # exec change
1287 1288 for f, args, msg in actions.get('e', []):
1288 1289 repo.dirstate.normallookup(f)
1289 1290
1290 1291 # keep
1291 1292 for f, args, msg in actions.get('k', []):
1292 1293 pass
1293 1294
1294 1295 # get
1295 1296 for f, args, msg in actions.get('g', []):
1296 1297 if branchmerge:
1297 1298 repo.dirstate.otherparent(f)
1298 1299 else:
1299 1300 repo.dirstate.normal(f)
1300 1301
1301 1302 # merge
1302 1303 for f, args, msg in actions.get('m', []):
1303 1304 f1, f2, fa, move, anc = args
1304 1305 if branchmerge:
1305 1306 # We've done a branch merge, mark this file as merged
1306 1307 # so that we properly record the merger later
1307 1308 repo.dirstate.merge(f)
1308 1309 if f1 != f2: # copy/rename
1309 1310 if move:
1310 1311 repo.dirstate.remove(f1)
1311 1312 if f1 != f:
1312 1313 repo.dirstate.copy(f1, f)
1313 1314 else:
1314 1315 repo.dirstate.copy(f2, f)
1315 1316 else:
1316 1317 # We've update-merged a locally modified file, so
1317 1318 # we set the dirstate to emulate a normal checkout
1318 1319 # of that file some time in the past. Thus our
1319 1320 # merge will appear as a normal local file
1320 1321 # modification.
1321 1322 if f2 == f: # file not locally copied/moved
1322 1323 repo.dirstate.normallookup(f)
1323 1324 if move:
1324 1325 repo.dirstate.drop(f1)
1325 1326
1326 1327 # directory rename, move local
1327 1328 for f, args, msg in actions.get('dm', []):
1328 1329 f0, flag = args
1329 1330 if branchmerge:
1330 1331 repo.dirstate.add(f)
1331 1332 repo.dirstate.remove(f0)
1332 1333 repo.dirstate.copy(f0, f)
1333 1334 else:
1334 1335 repo.dirstate.normal(f)
1335 1336 repo.dirstate.drop(f0)
1336 1337
1337 1338 # directory rename, get
1338 1339 for f, args, msg in actions.get('dg', []):
1339 1340 f0, flag = args
1340 1341 if branchmerge:
1341 1342 repo.dirstate.add(f)
1342 1343 repo.dirstate.copy(f0, f)
1343 1344 else:
1344 1345 repo.dirstate.normal(f)
1345 1346
1346 1347 def update(repo, node, branchmerge, force, ancestor=None,
1347 mergeancestor=False, labels=None, matcher=None):
1348 mergeancestor=False, labels=None, matcher=None, mergeforce=False):
1348 1349 """
1349 1350 Perform a merge between the working directory and the given node
1350 1351
1351 1352 node = the node to update to, or None if unspecified
1352 1353 branchmerge = whether to merge between branches
1353 1354 force = whether to force branch merging or file overwriting
1354 1355 matcher = a matcher to filter file lists (dirstate not updated)
1355 1356 mergeancestor = whether it is merging with an ancestor. If true,
1356 1357 we should accept the incoming changes for any prompts that occur.
1357 1358 If false, merging with an ancestor (fast-forward) is only allowed
1358 1359 between different named branches. This flag is used by rebase extension
1359 1360 as a temporary fix and should be avoided in general.
1360 1361 labels = labels to use for base, local and other
1362 mergeforce = whether the merge was run with 'merge --force' (deprecated): if
1363 this is True, then 'force' should be True as well.
1361 1364
1362 1365 The table below shows all the behaviors of the update command
1363 1366 given the -c and -C or no options, whether the working directory
1364 1367 is dirty, whether a revision is specified, and the relationship of
1365 1368 the parent rev to the target rev (linear, on the same named
1366 1369 branch, or on another named branch).
1367 1370
1368 1371 This logic is tested by test-update-branches.t.
1369 1372
1370 1373 -c -C dirty rev | linear same cross
1371 1374 n n n n | ok (1) x
1372 1375 n n n y | ok ok ok
1373 1376 n n y n | merge (2) (2)
1374 1377 n n y y | merge (3) (3)
1375 1378 n y * * | discard discard discard
1376 1379 y n y * | (4) (4) (4)
1377 1380 y n n * | ok ok ok
1378 1381 y y * * | (5) (5) (5)
1379 1382
1380 1383 x = can't happen
1381 1384 * = don't-care
1382 1385 1 = abort: not a linear update (merge or update --check to force update)
1383 1386 2 = abort: uncommitted changes (commit and merge, or update --clean to
1384 1387 discard changes)
1385 1388 3 = abort: uncommitted changes (commit or update --clean to discard changes)
1386 1389 4 = abort: uncommitted changes (checked in commands.py)
1387 1390 5 = incompatible options (checked in commands.py)
1388 1391
1389 1392 Return the same tuple as applyupdates().
1390 1393 """
1391 1394
1392 1395 onode = node
1393 1396 # If we're doing a partial update, we need to skip updating
1394 1397 # the dirstate, so make a note of any partial-ness to the
1395 1398 # update here.
1396 1399 if matcher is None or matcher.always():
1397 1400 partial = False
1398 1401 else:
1399 1402 partial = True
1400 1403 with repo.wlock():
1401 1404 wc = repo[None]
1402 1405 pl = wc.parents()
1403 1406 p1 = pl[0]
1404 1407 pas = [None]
1405 1408 if ancestor is not None:
1406 1409 pas = [repo[ancestor]]
1407 1410
1408 1411 if node is None:
1409 1412 if (repo.ui.configbool('devel', 'all-warnings')
1410 1413 or repo.ui.configbool('devel', 'oldapi')):
1411 1414 repo.ui.develwarn('update with no target')
1412 1415 rev, _mark, _act = destutil.destupdate(repo)
1413 1416 node = repo[rev].node()
1414 1417
1415 1418 overwrite = force and not branchmerge
1416 1419
1417 1420 p2 = repo[node]
1418 1421 if pas[0] is None:
1419 1422 if repo.ui.configlist('merge', 'preferancestor', ['*']) == ['*']:
1420 1423 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
1421 1424 pas = [repo[anc] for anc in (sorted(cahs) or [nullid])]
1422 1425 else:
1423 1426 pas = [p1.ancestor(p2, warn=branchmerge)]
1424 1427
1425 1428 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
1426 1429
1427 1430 ### check phase
1428 1431 if not overwrite:
1429 1432 if len(pl) > 1:
1430 1433 raise error.Abort(_("outstanding uncommitted merge"))
1431 1434 ms = mergestate.read(repo)
1432 1435 if list(ms.unresolved()):
1433 1436 raise error.Abort(_("outstanding merge conflicts"))
1434 1437 if branchmerge:
1435 1438 if pas == [p2]:
1436 1439 raise error.Abort(_("merging with a working directory ancestor"
1437 1440 " has no effect"))
1438 1441 elif pas == [p1]:
1439 1442 if not mergeancestor and p1.branch() == p2.branch():
1440 1443 raise error.Abort(_("nothing to merge"),
1441 1444 hint=_("use 'hg update' "
1442 1445 "or check 'hg heads'"))
1443 1446 if not force and (wc.files() or wc.deleted()):
1444 1447 raise error.Abort(_("uncommitted changes"),
1445 1448 hint=_("use 'hg status' to list changes"))
1446 1449 for s in sorted(wc.substate):
1447 1450 wc.sub(s).bailifchanged()
1448 1451
1449 1452 elif not overwrite:
1450 1453 if p1 == p2: # no-op update
1451 1454 # call the hooks and exit early
1452 1455 repo.hook('preupdate', throw=True, parent1=xp2, parent2='')
1453 1456 repo.hook('update', parent1=xp2, parent2='', error=0)
1454 1457 return 0, 0, 0, 0
1455 1458
1456 1459 if pas not in ([p1], [p2]): # nonlinear
1457 1460 dirty = wc.dirty(missing=True)
1458 1461 if dirty or onode is None:
1459 1462 # Branching is a bit strange to ensure we do the minimal
1460 1463 # amount of call to obsolete.background.
1461 1464 foreground = obsolete.foreground(repo, [p1.node()])
1462 1465 # note: the <node> variable contains a random identifier
1463 1466 if repo[node].node() in foreground:
1464 1467 pas = [p1] # allow updating to successors
1465 1468 elif dirty:
1466 1469 msg = _("uncommitted changes")
1467 1470 if onode is None:
1468 1471 hint = _("commit and merge, or update --clean to"
1469 1472 " discard changes")
1470 1473 else:
1471 1474 hint = _("commit or update --clean to discard"
1472 1475 " changes")
1473 1476 raise error.Abort(msg, hint=hint)
1474 1477 else: # node is none
1475 1478 msg = _("not a linear update")
1476 1479 hint = _("merge or update --check to force update")
1477 1480 raise error.Abort(msg, hint=hint)
1478 1481 else:
1479 1482 # Allow jumping branches if clean and specific rev given
1480 1483 pas = [p1]
1481 1484
1482 1485 # deprecated config: merge.followcopies
1483 1486 followcopies = False
1484 1487 if overwrite:
1485 1488 pas = [wc]
1486 1489 elif pas == [p2]: # backwards
1487 1490 pas = [wc.p1()]
1488 1491 elif not branchmerge and not wc.dirty(missing=True):
1489 1492 pass
1490 1493 elif pas[0] and repo.ui.configbool('merge', 'followcopies', True):
1491 1494 followcopies = True
1492 1495
1493 1496 ### calculate phase
1494 1497 actionbyfile, diverge, renamedelete = calculateupdates(
1495 1498 repo, wc, p2, pas, branchmerge, force, mergeancestor,
1496 followcopies, matcher=matcher)
1499 followcopies, matcher=matcher, mergeforce=mergeforce)
1497 1500
1498 1501 # Prompt and create actions. Most of this is in the resolve phase
1499 1502 # already, but we can't handle .hgsubstate in filemerge or
1500 1503 # subrepo.submerge yet so we have to keep prompting for it.
1501 1504 if '.hgsubstate' in actionbyfile:
1502 1505 f = '.hgsubstate'
1503 1506 m, args, msg = actionbyfile[f]
1504 1507 if m == 'cd':
1505 1508 if repo.ui.promptchoice(
1506 1509 _("local changed %s which remote deleted\n"
1507 1510 "use (c)hanged version or (d)elete?"
1508 1511 "$$ &Changed $$ &Delete") % f, 0):
1509 1512 actionbyfile[f] = ('r', None, "prompt delete")
1510 1513 elif f in p1:
1511 1514 actionbyfile[f] = ('am', None, "prompt keep")
1512 1515 else:
1513 1516 actionbyfile[f] = ('a', None, "prompt keep")
1514 1517 elif m == 'dc':
1515 1518 f1, f2, fa, move, anc = args
1516 1519 flags = p2[f2].flags()
1517 1520 if repo.ui.promptchoice(
1518 1521 _("remote changed %s which local deleted\n"
1519 1522 "use (c)hanged version or leave (d)eleted?"
1520 1523 "$$ &Changed $$ &Deleted") % f, 0) == 0:
1521 1524 actionbyfile[f] = ('g', (flags, False), "prompt recreating")
1522 1525 else:
1523 1526 del actionbyfile[f]
1524 1527
1525 1528 # Convert to dictionary-of-lists format
1526 1529 actions = dict((m, []) for m in 'a am f g cd dc r dm dg m e k'.split())
1527 1530 for f, (m, args, msg) in actionbyfile.iteritems():
1528 1531 if m not in actions:
1529 1532 actions[m] = []
1530 1533 actions[m].append((f, args, msg))
1531 1534
1532 1535 if not util.checkcase(repo.path):
1533 1536 # check collision between files only in p2 for clean update
1534 1537 if (not branchmerge and
1535 1538 (force or not wc.dirty(missing=True, branch=False))):
1536 1539 _checkcollision(repo, p2.manifest(), None)
1537 1540 else:
1538 1541 _checkcollision(repo, wc.manifest(), actions)
1539 1542
1540 1543 # divergent renames
1541 1544 for f, fl in sorted(diverge.iteritems()):
1542 1545 repo.ui.warn(_("note: possible conflict - %s was renamed "
1543 1546 "multiple times to:\n") % f)
1544 1547 for nf in fl:
1545 1548 repo.ui.warn(" %s\n" % nf)
1546 1549
1547 1550 # rename and delete
1548 1551 for f, fl in sorted(renamedelete.iteritems()):
1549 1552 repo.ui.warn(_("note: possible conflict - %s was deleted "
1550 1553 "and renamed to:\n") % f)
1551 1554 for nf in fl:
1552 1555 repo.ui.warn(" %s\n" % nf)
1553 1556
1554 1557 ### apply phase
1555 1558 if not branchmerge: # just jump to the new rev
1556 1559 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
1557 1560 if not partial:
1558 1561 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
1559 1562 # note that we're in the middle of an update
1560 1563 repo.vfs.write('updatestate', p2.hex())
1561 1564
1562 1565 stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels)
1563 1566
1564 1567 if not partial:
1565 1568 repo.dirstate.beginparentchange()
1566 1569 repo.setparents(fp1, fp2)
1567 1570 recordupdates(repo, actions, branchmerge)
1568 1571 # update completed, clear state
1569 1572 util.unlink(repo.join('updatestate'))
1570 1573
1571 1574 if not branchmerge:
1572 1575 repo.dirstate.setbranch(p2.branch())
1573 1576 repo.dirstate.endparentchange()
1574 1577
1575 1578 if not partial:
1576 1579 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
1577 1580 return stats
1578 1581
1579 1582 def graft(repo, ctx, pctx, labels, keepparent=False):
1580 1583 """Do a graft-like merge.
1581 1584
1582 1585 This is a merge where the merge ancestor is chosen such that one
1583 1586 or more changesets are grafted onto the current changeset. In
1584 1587 addition to the merge, this fixes up the dirstate to include only
1585 1588 a single parent (if keepparent is False) and tries to duplicate any
1586 1589 renames/copies appropriately.
1587 1590
1588 1591 ctx - changeset to rebase
1589 1592 pctx - merge base, usually ctx.p1()
1590 1593 labels - merge labels eg ['local', 'graft']
1591 1594 keepparent - keep second parent if any
1592 1595
1593 1596 """
1594 1597 # If we're grafting a descendant onto an ancestor, be sure to pass
1595 1598 # mergeancestor=True to update. This does two things: 1) allows the merge if
1596 1599 # the destination is the same as the parent of the ctx (so we can use graft
1597 1600 # to copy commits), and 2) informs update that the incoming changes are
1598 1601 # newer than the destination so it doesn't prompt about "remote changed foo
1599 1602 # which local deleted".
1600 1603 mergeancestor = repo.changelog.isancestor(repo['.'].node(), ctx.node())
1601 1604
1602 1605 stats = update(repo, ctx.node(), True, True, pctx.node(),
1603 1606 mergeancestor=mergeancestor, labels=labels)
1604 1607
1605 1608 pother = nullid
1606 1609 parents = ctx.parents()
1607 1610 if keepparent and len(parents) == 2 and pctx in parents:
1608 1611 parents.remove(pctx)
1609 1612 pother = parents[0].node()
1610 1613
1611 1614 repo.dirstate.beginparentchange()
1612 1615 repo.setparents(repo['.'].node(), pother)
1613 1616 repo.dirstate.write(repo.currenttransaction())
1614 1617 # fix up dirstate for copies and renames
1615 1618 copies.duplicatecopies(repo, ctx.rev(), pctx.rev())
1616 1619 repo.dirstate.endparentchange()
1617 1620 return stats
General Comments 0
You need to be logged in to leave comments. Login now