##// END OF EJS Templates
scmutil: pass a matcher to scmutil.addremove() instead of a list of patterns...
Matt Harbison -
r23533:891aaa7c default
parent child Browse files
Show More
@@ -1,536 +1,537
1 1 # perf.py - performance test routines
2 2 '''helper extension to measure performance'''
3 3
4 4 from mercurial import cmdutil, scmutil, util, commands, obsolete
5 5 from mercurial import repoview, branchmap, merge, copies
6 6 import time, os, sys
7 7 import functools
8 8
9 9 cmdtable = {}
10 10 command = cmdutil.command(cmdtable)
11 11
12 12 def gettimer(ui, opts=None):
13 13 """return a timer function and formatter: (timer, formatter)
14 14
15 15 This functions exist to gather the creation of formatter in a single
16 16 place instead of duplicating it in all performance command."""
17 17 if opts is None:
18 18 opts = {}
19 19 # redirect all to stderr
20 20 ui = ui.copy()
21 21 ui.fout = ui.ferr
22 22 # get a formatter
23 23 fm = ui.formatter('perf', opts)
24 24 return functools.partial(_timer, fm), fm
25 25
26 26 def _timer(fm, func, title=None):
27 27 results = []
28 28 begin = time.time()
29 29 count = 0
30 30 while True:
31 31 ostart = os.times()
32 32 cstart = time.time()
33 33 r = func()
34 34 cstop = time.time()
35 35 ostop = os.times()
36 36 count += 1
37 37 a, b = ostart, ostop
38 38 results.append((cstop - cstart, b[0] - a[0], b[1]-a[1]))
39 39 if cstop - begin > 3 and count >= 100:
40 40 break
41 41 if cstop - begin > 10 and count >= 3:
42 42 break
43 43
44 44 fm.startitem()
45 45
46 46 if title:
47 47 fm.write('title', '! %s\n', title)
48 48 if r:
49 49 fm.write('result', '! result: %s\n', r)
50 50 m = min(results)
51 51 fm.plain('!')
52 52 fm.write('wall', ' wall %f', m[0])
53 53 fm.write('comb', ' comb %f', m[1] + m[2])
54 54 fm.write('user', ' user %f', m[1])
55 55 fm.write('sys', ' sys %f', m[2])
56 56 fm.write('count', ' (best of %d)', count)
57 57 fm.plain('\n')
58 58
59 59 @command('perfwalk')
60 60 def perfwalk(ui, repo, *pats):
61 61 timer, fm = gettimer(ui)
62 62 try:
63 63 m = scmutil.match(repo[None], pats, {})
64 64 timer(lambda: len(list(repo.dirstate.walk(m, [], True, False))))
65 65 except Exception:
66 66 try:
67 67 m = scmutil.match(repo[None], pats, {})
68 68 timer(lambda: len([b for a, b, c in repo.dirstate.statwalk([], m)]))
69 69 except Exception:
70 70 timer(lambda: len(list(cmdutil.walk(repo, pats, {}))))
71 71 fm.end()
72 72
73 73 @command('perfannotate')
74 74 def perfannotate(ui, repo, f):
75 75 timer, fm = gettimer(ui)
76 76 fc = repo['.'][f]
77 77 timer(lambda: len(fc.annotate(True)))
78 78 fm.end()
79 79
80 80 @command('perfstatus',
81 81 [('u', 'unknown', False,
82 82 'ask status to look for unknown files')])
83 83 def perfstatus(ui, repo, **opts):
84 84 #m = match.always(repo.root, repo.getcwd())
85 85 #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
86 86 # False))))
87 87 timer, fm = gettimer(ui)
88 88 timer(lambda: sum(map(len, repo.status(**opts))))
89 89 fm.end()
90 90
91 91 @command('perfaddremove')
92 92 def perfaddremove(ui, repo):
93 93 timer, fm = gettimer(ui)
94 94 try:
95 95 oldquiet = repo.ui.quiet
96 96 repo.ui.quiet = True
97 timer(lambda: scmutil.addremove(repo, dry_run=True))
97 matcher = scmutil.match(repo[None])
98 timer(lambda: scmutil.addremove(repo, matcher, dry_run=True))
98 99 finally:
99 100 repo.ui.quiet = oldquiet
100 101 fm.end()
101 102
102 103 def clearcaches(cl):
103 104 # behave somewhat consistently across internal API changes
104 105 if util.safehasattr(cl, 'clearcaches'):
105 106 cl.clearcaches()
106 107 elif util.safehasattr(cl, '_nodecache'):
107 108 from mercurial.node import nullid, nullrev
108 109 cl._nodecache = {nullid: nullrev}
109 110 cl._nodepos = None
110 111
111 112 @command('perfheads')
112 113 def perfheads(ui, repo):
113 114 timer, fm = gettimer(ui)
114 115 cl = repo.changelog
115 116 def d():
116 117 len(cl.headrevs())
117 118 clearcaches(cl)
118 119 timer(d)
119 120 fm.end()
120 121
121 122 @command('perftags')
122 123 def perftags(ui, repo):
123 124 import mercurial.changelog
124 125 import mercurial.manifest
125 126 timer, fm = gettimer(ui)
126 127 def t():
127 128 repo.changelog = mercurial.changelog.changelog(repo.sopener)
128 129 repo.manifest = mercurial.manifest.manifest(repo.sopener)
129 130 repo._tags = None
130 131 return len(repo.tags())
131 132 timer(t)
132 133 fm.end()
133 134
134 135 @command('perfancestors')
135 136 def perfancestors(ui, repo):
136 137 timer, fm = gettimer(ui)
137 138 heads = repo.changelog.headrevs()
138 139 def d():
139 140 for a in repo.changelog.ancestors(heads):
140 141 pass
141 142 timer(d)
142 143 fm.end()
143 144
144 145 @command('perfancestorset')
145 146 def perfancestorset(ui, repo, revset):
146 147 timer, fm = gettimer(ui)
147 148 revs = repo.revs(revset)
148 149 heads = repo.changelog.headrevs()
149 150 def d():
150 151 s = repo.changelog.ancestors(heads)
151 152 for rev in revs:
152 153 rev in s
153 154 timer(d)
154 155 fm.end()
155 156
156 157 @command('perfdirs')
157 158 def perfdirs(ui, repo):
158 159 timer, fm = gettimer(ui)
159 160 dirstate = repo.dirstate
160 161 'a' in dirstate
161 162 def d():
162 163 dirstate.dirs()
163 164 del dirstate._dirs
164 165 timer(d)
165 166 fm.end()
166 167
167 168 @command('perfdirstate')
168 169 def perfdirstate(ui, repo):
169 170 timer, fm = gettimer(ui)
170 171 "a" in repo.dirstate
171 172 def d():
172 173 repo.dirstate.invalidate()
173 174 "a" in repo.dirstate
174 175 timer(d)
175 176 fm.end()
176 177
177 178 @command('perfdirstatedirs')
178 179 def perfdirstatedirs(ui, repo):
179 180 timer, fm = gettimer(ui)
180 181 "a" in repo.dirstate
181 182 def d():
182 183 "a" in repo.dirstate._dirs
183 184 del repo.dirstate._dirs
184 185 timer(d)
185 186 fm.end()
186 187
187 188 @command('perfdirstatefoldmap')
188 189 def perffoldmap(ui, repo):
189 190 timer, fm = gettimer(ui)
190 191 dirstate = repo.dirstate
191 192 'a' in dirstate
192 193 def d():
193 194 dirstate._foldmap.get('a')
194 195 del dirstate._foldmap
195 196 del dirstate._dirs
196 197 timer(d)
197 198 fm.end()
198 199
199 200 @command('perfdirstatewrite')
200 201 def perfdirstatewrite(ui, repo):
201 202 timer, fm = gettimer(ui)
202 203 ds = repo.dirstate
203 204 "a" in ds
204 205 def d():
205 206 ds._dirty = True
206 207 ds.write()
207 208 timer(d)
208 209 fm.end()
209 210
210 211 @command('perfmergecalculate',
211 212 [('r', 'rev', '.', 'rev to merge against')])
212 213 def perfmergecalculate(ui, repo, rev):
213 214 timer, fm = gettimer(ui)
214 215 wctx = repo[None]
215 216 rctx = scmutil.revsingle(repo, rev, rev)
216 217 ancestor = wctx.ancestor(rctx)
217 218 # we don't want working dir files to be stat'd in the benchmark, so prime
218 219 # that cache
219 220 wctx.dirty()
220 221 def d():
221 222 # acceptremote is True because we don't want prompts in the middle of
222 223 # our benchmark
223 224 merge.calculateupdates(repo, wctx, rctx, ancestor, False, False, False,
224 225 acceptremote=True)
225 226 timer(d)
226 227 fm.end()
227 228
228 229 @command('perfpathcopies', [], "REV REV")
229 230 def perfpathcopies(ui, repo, rev1, rev2):
230 231 timer, fm = gettimer(ui)
231 232 ctx1 = scmutil.revsingle(repo, rev1, rev1)
232 233 ctx2 = scmutil.revsingle(repo, rev2, rev2)
233 234 def d():
234 235 copies.pathcopies(ctx1, ctx2)
235 236 timer(d)
236 237 fm.end()
237 238
238 239 @command('perfmanifest', [], 'REV')
239 240 def perfmanifest(ui, repo, rev):
240 241 timer, fm = gettimer(ui)
241 242 ctx = scmutil.revsingle(repo, rev, rev)
242 243 t = ctx.manifestnode()
243 244 def d():
244 245 repo.manifest._mancache.clear()
245 246 repo.manifest._cache = None
246 247 repo.manifest.read(t)
247 248 timer(d)
248 249 fm.end()
249 250
250 251 @command('perfchangeset')
251 252 def perfchangeset(ui, repo, rev):
252 253 timer, fm = gettimer(ui)
253 254 n = repo[rev].node()
254 255 def d():
255 256 repo.changelog.read(n)
256 257 #repo.changelog._cache = None
257 258 timer(d)
258 259 fm.end()
259 260
260 261 @command('perfindex')
261 262 def perfindex(ui, repo):
262 263 import mercurial.revlog
263 264 timer, fm = gettimer(ui)
264 265 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
265 266 n = repo["tip"].node()
266 267 def d():
267 268 cl = mercurial.revlog.revlog(repo.sopener, "00changelog.i")
268 269 cl.rev(n)
269 270 timer(d)
270 271 fm.end()
271 272
272 273 @command('perfstartup')
273 274 def perfstartup(ui, repo):
274 275 timer, fm = gettimer(ui)
275 276 cmd = sys.argv[0]
276 277 def d():
277 278 os.system("HGRCPATH= %s version -q > /dev/null" % cmd)
278 279 timer(d)
279 280 fm.end()
280 281
281 282 @command('perfparents')
282 283 def perfparents(ui, repo):
283 284 timer, fm = gettimer(ui)
284 285 nl = [repo.changelog.node(i) for i in xrange(1000)]
285 286 def d():
286 287 for n in nl:
287 288 repo.changelog.parents(n)
288 289 timer(d)
289 290 fm.end()
290 291
291 292 @command('perflookup')
292 293 def perflookup(ui, repo, rev):
293 294 timer, fm = gettimer(ui)
294 295 timer(lambda: len(repo.lookup(rev)))
295 296 fm.end()
296 297
297 298 @command('perfrevrange')
298 299 def perfrevrange(ui, repo, *specs):
299 300 timer, fm = gettimer(ui)
300 301 revrange = scmutil.revrange
301 302 timer(lambda: len(revrange(repo, specs)))
302 303 fm.end()
303 304
304 305 @command('perfnodelookup')
305 306 def perfnodelookup(ui, repo, rev):
306 307 timer, fm = gettimer(ui)
307 308 import mercurial.revlog
308 309 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
309 310 n = repo[rev].node()
310 311 cl = mercurial.revlog.revlog(repo.sopener, "00changelog.i")
311 312 def d():
312 313 cl.rev(n)
313 314 clearcaches(cl)
314 315 timer(d)
315 316 fm.end()
316 317
317 318 @command('perflog',
318 319 [('', 'rename', False, 'ask log to follow renames')])
319 320 def perflog(ui, repo, **opts):
320 321 timer, fm = gettimer(ui)
321 322 ui.pushbuffer()
322 323 timer(lambda: commands.log(ui, repo, rev=[], date='', user='',
323 324 copies=opts.get('rename')))
324 325 ui.popbuffer()
325 326 fm.end()
326 327
327 328 @command('perfmoonwalk')
328 329 def perfmoonwalk(ui, repo):
329 330 """benchmark walking the changelog backwards
330 331
331 332 This also loads the changelog data for each revision in the changelog.
332 333 """
333 334 timer, fm = gettimer(ui)
334 335 def moonwalk():
335 336 for i in xrange(len(repo), -1, -1):
336 337 ctx = repo[i]
337 338 ctx.branch() # read changelog data (in addition to the index)
338 339 timer(moonwalk)
339 340 fm.end()
340 341
341 342 @command('perftemplating')
342 343 def perftemplating(ui, repo):
343 344 timer, fm = gettimer(ui)
344 345 ui.pushbuffer()
345 346 timer(lambda: commands.log(ui, repo, rev=[], date='', user='',
346 347 template='{date|shortdate} [{rev}:{node|short}]'
347 348 ' {author|person}: {desc|firstline}\n'))
348 349 ui.popbuffer()
349 350 fm.end()
350 351
351 352 @command('perfcca')
352 353 def perfcca(ui, repo):
353 354 timer, fm = gettimer(ui)
354 355 timer(lambda: scmutil.casecollisionauditor(ui, False, repo.dirstate))
355 356 fm.end()
356 357
357 358 @command('perffncacheload')
358 359 def perffncacheload(ui, repo):
359 360 timer, fm = gettimer(ui)
360 361 s = repo.store
361 362 def d():
362 363 s.fncache._load()
363 364 timer(d)
364 365 fm.end()
365 366
366 367 @command('perffncachewrite')
367 368 def perffncachewrite(ui, repo):
368 369 timer, fm = gettimer(ui)
369 370 s = repo.store
370 371 s.fncache._load()
371 372 def d():
372 373 s.fncache._dirty = True
373 374 s.fncache.write()
374 375 timer(d)
375 376 fm.end()
376 377
377 378 @command('perffncacheencode')
378 379 def perffncacheencode(ui, repo):
379 380 timer, fm = gettimer(ui)
380 381 s = repo.store
381 382 s.fncache._load()
382 383 def d():
383 384 for p in s.fncache.entries:
384 385 s.encode(p)
385 386 timer(d)
386 387 fm.end()
387 388
388 389 @command('perfdiffwd')
389 390 def perfdiffwd(ui, repo):
390 391 """Profile diff of working directory changes"""
391 392 timer, fm = gettimer(ui)
392 393 options = {
393 394 'w': 'ignore_all_space',
394 395 'b': 'ignore_space_change',
395 396 'B': 'ignore_blank_lines',
396 397 }
397 398
398 399 for diffopt in ('', 'w', 'b', 'B', 'wB'):
399 400 opts = dict((options[c], '1') for c in diffopt)
400 401 def d():
401 402 ui.pushbuffer()
402 403 commands.diff(ui, repo, **opts)
403 404 ui.popbuffer()
404 405 title = 'diffopts: %s' % (diffopt and ('-' + diffopt) or 'none')
405 406 timer(d, title)
406 407 fm.end()
407 408
408 409 @command('perfrevlog',
409 410 [('d', 'dist', 100, 'distance between the revisions')],
410 411 "[INDEXFILE]")
411 412 def perfrevlog(ui, repo, file_, **opts):
412 413 timer, fm = gettimer(ui)
413 414 from mercurial import revlog
414 415 dist = opts['dist']
415 416 def d():
416 417 r = revlog.revlog(lambda fn: open(fn, 'rb'), file_)
417 418 for x in xrange(0, len(r), dist):
418 419 r.revision(r.node(x))
419 420
420 421 timer(d)
421 422 fm.end()
422 423
423 424 @command('perfrevset',
424 425 [('C', 'clear', False, 'clear volatile cache between each call.')],
425 426 "REVSET")
426 427 def perfrevset(ui, repo, expr, clear=False):
427 428 """benchmark the execution time of a revset
428 429
429 430 Use the --clean option if need to evaluate the impact of build volatile
430 431 revisions set cache on the revset execution. Volatile cache hold filtered
431 432 and obsolete related cache."""
432 433 timer, fm = gettimer(ui)
433 434 def d():
434 435 if clear:
435 436 repo.invalidatevolatilesets()
436 437 for r in repo.revs(expr): pass
437 438 timer(d)
438 439 fm.end()
439 440
440 441 @command('perfvolatilesets')
441 442 def perfvolatilesets(ui, repo, *names):
442 443 """benchmark the computation of various volatile set
443 444
444 445 Volatile set computes element related to filtering and obsolescence."""
445 446 timer, fm = gettimer(ui)
446 447 repo = repo.unfiltered()
447 448
448 449 def getobs(name):
449 450 def d():
450 451 repo.invalidatevolatilesets()
451 452 obsolete.getrevs(repo, name)
452 453 return d
453 454
454 455 allobs = sorted(obsolete.cachefuncs)
455 456 if names:
456 457 allobs = [n for n in allobs if n in names]
457 458
458 459 for name in allobs:
459 460 timer(getobs(name), title=name)
460 461
461 462 def getfiltered(name):
462 463 def d():
463 464 repo.invalidatevolatilesets()
464 465 repoview.filterrevs(repo, name)
465 466 return d
466 467
467 468 allfilter = sorted(repoview.filtertable)
468 469 if names:
469 470 allfilter = [n for n in allfilter if n in names]
470 471
471 472 for name in allfilter:
472 473 timer(getfiltered(name), title=name)
473 474 fm.end()
474 475
475 476 @command('perfbranchmap',
476 477 [('f', 'full', False,
477 478 'Includes build time of subset'),
478 479 ])
479 480 def perfbranchmap(ui, repo, full=False):
480 481 """benchmark the update of a branchmap
481 482
482 483 This benchmarks the full repo.branchmap() call with read and write disabled
483 484 """
484 485 timer, fm = gettimer(ui)
485 486 def getbranchmap(filtername):
486 487 """generate a benchmark function for the filtername"""
487 488 if filtername is None:
488 489 view = repo
489 490 else:
490 491 view = repo.filtered(filtername)
491 492 def d():
492 493 if full:
493 494 view._branchcaches.clear()
494 495 else:
495 496 view._branchcaches.pop(filtername, None)
496 497 view.branchmap()
497 498 return d
498 499 # add filter in smaller subset to bigger subset
499 500 possiblefilters = set(repoview.filtertable)
500 501 allfilters = []
501 502 while possiblefilters:
502 503 for name in possiblefilters:
503 504 subset = branchmap.subsettable.get(name)
504 505 if subset not in possiblefilters:
505 506 break
506 507 else:
507 508 assert False, 'subset cycle %s!' % possiblefilters
508 509 allfilters.append(name)
509 510 possiblefilters.remove(name)
510 511
511 512 # warm the cache
512 513 if not full:
513 514 for name in allfilters:
514 515 repo.filtered(name).branchmap()
515 516 # add unfiltered
516 517 allfilters.append(None)
517 518 oldread = branchmap.read
518 519 oldwrite = branchmap.branchcache.write
519 520 try:
520 521 branchmap.read = lambda repo: None
521 522 branchmap.write = lambda repo: None
522 523 for name in allfilters:
523 524 timer(getbranchmap(name), title=str(name))
524 525 finally:
525 526 branchmap.read = oldread
526 527 branchmap.branchcache.write = oldwrite
527 528 fm.end()
528 529
529 530 @command('perfloadmarkers')
530 531 def perfloadmarkers(ui, repo):
531 532 """benchmark the time to parse the on-disk markers for a repo
532 533
533 534 Result is the number of markers in the repo."""
534 535 timer, fm = gettimer(ui)
535 536 timer(lambda: len(obsolete.obsstore(repo.sopener)))
536 537 fm.end()
@@ -1,1286 +1,1285
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10 10
11 11 import os
12 12 import copy
13 13
14 14 from mercurial import hg, util, cmdutil, scmutil, match as match_, \
15 15 archival, pathutil, revset
16 16 from mercurial.i18n import _
17 17 from mercurial.node import hex
18 18
19 19 import lfutil
20 20 import lfcommands
21 21 import basestore
22 22
23 23 # -- Utility functions: commonly/repeatedly needed functionality ---------------
24 24
25 25 def composenormalfilematcher(match, manifest):
26 26 m = copy.copy(match)
27 27 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
28 28 manifest)
29 29 m._files = filter(notlfile, m._files)
30 30 m._fmap = set(m._files)
31 31 m._always = False
32 32 origmatchfn = m.matchfn
33 33 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
34 34 return m
35 35
36 36 def installnormalfilesmatchfn(manifest):
37 37 '''installmatchfn with a matchfn that ignores all largefiles'''
38 38 def overridematch(ctx, pats=[], opts={}, globbed=False,
39 39 default='relpath'):
40 40 match = oldmatch(ctx, pats, opts, globbed, default)
41 41 return composenormalfilematcher(match, manifest)
42 42 oldmatch = installmatchfn(overridematch)
43 43
44 44 def installmatchfn(f):
45 45 '''monkey patch the scmutil module with a custom match function.
46 46 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
47 47 oldmatch = scmutil.match
48 48 setattr(f, 'oldmatch', oldmatch)
49 49 scmutil.match = f
50 50 return oldmatch
51 51
52 52 def restorematchfn():
53 53 '''restores scmutil.match to what it was before installmatchfn
54 54 was called. no-op if scmutil.match is its original function.
55 55
56 56 Note that n calls to installmatchfn will require n calls to
57 57 restore matchfn to reverse'''
58 58 scmutil.match = getattr(scmutil.match, 'oldmatch')
59 59
60 60 def installmatchandpatsfn(f):
61 61 oldmatchandpats = scmutil.matchandpats
62 62 setattr(f, 'oldmatchandpats', oldmatchandpats)
63 63 scmutil.matchandpats = f
64 64 return oldmatchandpats
65 65
66 66 def restorematchandpatsfn():
67 67 '''restores scmutil.matchandpats to what it was before
68 68 installmatchandpatsfn was called. No-op if scmutil.matchandpats
69 69 is its original function.
70 70
71 71 Note that n calls to installmatchandpatsfn will require n calls
72 72 to restore matchfn to reverse'''
73 73 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
74 74 scmutil.matchandpats)
75 75
76 def addlargefiles(ui, repo, *pats, **opts):
76 def addlargefiles(ui, repo, matcher, **opts):
77 77 large = opts.pop('large', None)
78 78 lfsize = lfutil.getminsize(
79 79 ui, lfutil.islfilesrepo(repo), opts.pop('lfsize', None))
80 80
81 81 lfmatcher = None
82 82 if lfutil.islfilesrepo(repo):
83 83 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
84 84 if lfpats:
85 85 lfmatcher = match_.match(repo.root, '', list(lfpats))
86 86
87 87 lfnames = []
88 m = scmutil.match(repo[None], pats, opts)
88 m = copy.copy(matcher)
89 89 m.bad = lambda x, y: None
90 90 wctx = repo[None]
91 91 for f in repo.walk(m):
92 92 exact = m.exact(f)
93 93 lfile = lfutil.standin(f) in wctx
94 94 nfile = f in wctx
95 95 exists = lfile or nfile
96 96
97 97 # Don't warn the user when they attempt to add a normal tracked file.
98 98 # The normal add code will do that for us.
99 99 if exact and exists:
100 100 if lfile:
101 101 ui.warn(_('%s already a largefile\n') % f)
102 102 continue
103 103
104 104 if (exact or not exists) and not lfutil.isstandin(f):
105 105 wfile = repo.wjoin(f)
106 106
107 107 # In case the file was removed previously, but not committed
108 108 # (issue3507)
109 109 if not os.path.exists(wfile):
110 110 continue
111 111
112 112 abovemin = (lfsize and
113 113 os.lstat(wfile).st_size >= lfsize * 1024 * 1024)
114 114 if large or abovemin or (lfmatcher and lfmatcher(f)):
115 115 lfnames.append(f)
116 116 if ui.verbose or not exact:
117 117 ui.status(_('adding %s as a largefile\n') % m.rel(f))
118 118
119 119 bad = []
120 120
121 121 # Need to lock, otherwise there could be a race condition between
122 122 # when standins are created and added to the repo.
123 123 wlock = repo.wlock()
124 124 try:
125 125 if not opts.get('dry_run'):
126 126 standins = []
127 127 lfdirstate = lfutil.openlfdirstate(ui, repo)
128 128 for f in lfnames:
129 129 standinname = lfutil.standin(f)
130 130 lfutil.writestandin(repo, standinname, hash='',
131 131 executable=lfutil.getexecutable(repo.wjoin(f)))
132 132 standins.append(standinname)
133 133 if lfdirstate[f] == 'r':
134 134 lfdirstate.normallookup(f)
135 135 else:
136 136 lfdirstate.add(f)
137 137 lfdirstate.write()
138 138 bad += [lfutil.splitstandin(f)
139 139 for f in repo[None].add(standins)
140 140 if f in m.files()]
141 141 finally:
142 142 wlock.release()
143 143 return bad
144 144
145 145 def removelargefiles(ui, repo, isaddremove, *pats, **opts):
146 146 after = opts.get('after')
147 147 if not pats and not after:
148 148 raise util.Abort(_('no files specified'))
149 149 m = scmutil.match(repo[None], pats, opts)
150 150 try:
151 151 repo.lfstatus = True
152 152 s = repo.status(match=m, clean=True)
153 153 finally:
154 154 repo.lfstatus = False
155 155 manifest = repo[None].manifest()
156 156 modified, added, deleted, clean = [[f for f in list
157 157 if lfutil.standin(f) in manifest]
158 158 for list in (s.modified, s.added,
159 159 s.deleted, s.clean)]
160 160
161 161 def warn(files, msg):
162 162 for f in files:
163 163 ui.warn(msg % m.rel(f))
164 164 return int(len(files) > 0)
165 165
166 166 result = 0
167 167
168 168 if after:
169 169 remove = deleted
170 170 result = warn(modified + added + clean,
171 171 _('not removing %s: file still exists\n'))
172 172 else:
173 173 remove = deleted + clean
174 174 result = warn(modified, _('not removing %s: file is modified (use -f'
175 175 ' to force removal)\n'))
176 176 result = warn(added, _('not removing %s: file has been marked for add'
177 177 ' (use forget to undo)\n')) or result
178 178
179 179 for f in sorted(remove):
180 180 if ui.verbose or not m.exact(f):
181 181 ui.status(_('removing %s\n') % m.rel(f))
182 182
183 183 # Need to lock because standin files are deleted then removed from the
184 184 # repository and we could race in-between.
185 185 wlock = repo.wlock()
186 186 try:
187 187 lfdirstate = lfutil.openlfdirstate(ui, repo)
188 188 for f in remove:
189 189 if not after:
190 190 # If this is being called by addremove, notify the user that we
191 191 # are removing the file.
192 192 if isaddremove:
193 193 ui.status(_('removing %s\n') % f)
194 194 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
195 195 lfdirstate.remove(f)
196 196 lfdirstate.write()
197 197 remove = [lfutil.standin(f) for f in remove]
198 198 # If this is being called by addremove, let the original addremove
199 199 # function handle this.
200 200 if not isaddremove:
201 201 for f in remove:
202 202 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
203 203 repo[None].forget(remove)
204 204 finally:
205 205 wlock.release()
206 206
207 207 return result
208 208
209 209 # For overriding mercurial.hgweb.webcommands so that largefiles will
210 210 # appear at their right place in the manifests.
211 211 def decodepath(orig, path):
212 212 return lfutil.splitstandin(path) or path
213 213
214 214 # -- Wrappers: modify existing commands --------------------------------
215 215
216 216 # Add works by going through the files that the user wanted to add and
217 217 # checking if they should be added as largefiles. Then it makes a new
218 218 # matcher which matches only the normal files and runs the original
219 219 # version of add.
220 220 def overrideadd(orig, ui, repo, *pats, **opts):
221 221 normal = opts.pop('normal')
222 222 if normal:
223 223 if opts.get('large'):
224 224 raise util.Abort(_('--normal cannot be used with --large'))
225 225 return orig(ui, repo, *pats, **opts)
226 bad = addlargefiles(ui, repo, *pats, **opts)
226 matcher = scmutil.match(repo[None], pats, opts)
227 bad = addlargefiles(ui, repo, matcher, **opts)
227 228 installnormalfilesmatchfn(repo[None].manifest())
228 229 result = orig(ui, repo, *pats, **opts)
229 230 restorematchfn()
230 231
231 232 return (result == 1 or bad) and 1 or 0
232 233
233 234 def overrideremove(orig, ui, repo, *pats, **opts):
234 235 installnormalfilesmatchfn(repo[None].manifest())
235 236 result = orig(ui, repo, *pats, **opts)
236 237 restorematchfn()
237 238 return removelargefiles(ui, repo, False, *pats, **opts) or result
238 239
239 240 def overridestatusfn(orig, repo, rev2, **opts):
240 241 try:
241 242 repo._repo.lfstatus = True
242 243 return orig(repo, rev2, **opts)
243 244 finally:
244 245 repo._repo.lfstatus = False
245 246
246 247 def overridestatus(orig, ui, repo, *pats, **opts):
247 248 try:
248 249 repo.lfstatus = True
249 250 return orig(ui, repo, *pats, **opts)
250 251 finally:
251 252 repo.lfstatus = False
252 253
253 254 def overridedirty(orig, repo, ignoreupdate=False):
254 255 try:
255 256 repo._repo.lfstatus = True
256 257 return orig(repo, ignoreupdate)
257 258 finally:
258 259 repo._repo.lfstatus = False
259 260
260 261 def overridelog(orig, ui, repo, *pats, **opts):
261 262 def overridematchandpats(ctx, pats=[], opts={}, globbed=False,
262 263 default='relpath'):
263 264 """Matcher that merges root directory with .hglf, suitable for log.
264 265 It is still possible to match .hglf directly.
265 266 For any listed files run log on the standin too.
266 267 matchfn tries both the given filename and with .hglf stripped.
267 268 """
268 269 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default)
269 270 m, p = copy.copy(matchandpats)
270 271
271 272 if m.always():
272 273 # We want to match everything anyway, so there's no benefit trying
273 274 # to add standins.
274 275 return matchandpats
275 276
276 277 pats = set(p)
277 278 # TODO: handling of patterns in both cases below
278 279 if m._cwd:
279 280 if os.path.isabs(m._cwd):
280 281 # TODO: handle largefile magic when invoked from other cwd
281 282 return matchandpats
282 283 back = (m._cwd.count('/') + 1) * '../'
283 284 pats.update(back + lfutil.standin(m._cwd + '/' + f) for f in p)
284 285 else:
285 286 pats.update(lfutil.standin(f) for f in p)
286 287
287 288 for i in range(0, len(m._files)):
288 289 standin = lfutil.standin(m._files[i])
289 290 if standin in repo[ctx.node()]:
290 291 m._files[i] = standin
291 292 elif m._files[i] not in repo[ctx.node()]:
292 293 m._files.append(standin)
293 294 pats.add(standin)
294 295
295 296 m._fmap = set(m._files)
296 297 m._always = False
297 298 origmatchfn = m.matchfn
298 299 def lfmatchfn(f):
299 300 lf = lfutil.splitstandin(f)
300 301 if lf is not None and origmatchfn(lf):
301 302 return True
302 303 r = origmatchfn(f)
303 304 return r
304 305 m.matchfn = lfmatchfn
305 306
306 307 return m, pats
307 308
308 309 # For hg log --patch, the match object is used in two different senses:
309 310 # (1) to determine what revisions should be printed out, and
310 311 # (2) to determine what files to print out diffs for.
311 312 # The magic matchandpats override should be used for case (1) but not for
312 313 # case (2).
313 314 def overridemakelogfilematcher(repo, pats, opts):
314 315 pctx = repo[None]
315 316 match, pats = oldmatchandpats(pctx, pats, opts)
316 317 return lambda rev: match
317 318
318 319 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
319 320 oldmakelogfilematcher = cmdutil._makenofollowlogfilematcher
320 321 setattr(cmdutil, '_makenofollowlogfilematcher', overridemakelogfilematcher)
321 322
322 323 try:
323 324 return orig(ui, repo, *pats, **opts)
324 325 finally:
325 326 restorematchandpatsfn()
326 327 setattr(cmdutil, '_makenofollowlogfilematcher', oldmakelogfilematcher)
327 328
328 329 def overrideverify(orig, ui, repo, *pats, **opts):
329 330 large = opts.pop('large', False)
330 331 all = opts.pop('lfa', False)
331 332 contents = opts.pop('lfc', False)
332 333
333 334 result = orig(ui, repo, *pats, **opts)
334 335 if large or all or contents:
335 336 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
336 337 return result
337 338
338 339 def overridedebugstate(orig, ui, repo, *pats, **opts):
339 340 large = opts.pop('large', False)
340 341 if large:
341 342 class fakerepo(object):
342 343 dirstate = lfutil.openlfdirstate(ui, repo)
343 344 orig(ui, fakerepo, *pats, **opts)
344 345 else:
345 346 orig(ui, repo, *pats, **opts)
346 347
347 348 # Override needs to refresh standins so that update's normal merge
348 349 # will go through properly. Then the other update hook (overriding repo.update)
349 350 # will get the new files. Filemerge is also overridden so that the merge
350 351 # will merge standins correctly.
351 352 def overrideupdate(orig, ui, repo, *pats, **opts):
352 353 # Need to lock between the standins getting updated and their
353 354 # largefiles getting updated
354 355 wlock = repo.wlock()
355 356 try:
356 357 if opts['check']:
357 358 lfdirstate = lfutil.openlfdirstate(ui, repo)
358 359 unsure, s = lfdirstate.status(
359 360 match_.always(repo.root, repo.getcwd()),
360 361 [], False, False, False)
361 362
362 363 mod = len(s.modified) > 0
363 364 for lfile in unsure:
364 365 standin = lfutil.standin(lfile)
365 366 if repo['.'][standin].data().strip() != \
366 367 lfutil.hashfile(repo.wjoin(lfile)):
367 368 mod = True
368 369 else:
369 370 lfdirstate.normal(lfile)
370 371 lfdirstate.write()
371 372 if mod:
372 373 raise util.Abort(_('uncommitted changes'))
373 374 return orig(ui, repo, *pats, **opts)
374 375 finally:
375 376 wlock.release()
376 377
377 378 # Before starting the manifest merge, merge.updates will call
378 379 # checkunknown to check if there are any files in the merged-in
379 380 # changeset that collide with unknown files in the working copy.
380 381 #
381 382 # The largefiles are seen as unknown, so this prevents us from merging
382 383 # in a file 'foo' if we already have a largefile with the same name.
383 384 #
384 385 # The overridden function filters the unknown files by removing any
385 386 # largefiles. This makes the merge proceed and we can then handle this
386 387 # case further in the overridden calculateupdates function below.
387 388 def overridecheckunknownfile(origfn, repo, wctx, mctx, f):
388 389 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
389 390 return False
390 391 return origfn(repo, wctx, mctx, f)
391 392
392 393 # The manifest merge handles conflicts on the manifest level. We want
393 394 # to handle changes in largefile-ness of files at this level too.
394 395 #
395 396 # The strategy is to run the original calculateupdates and then process
396 397 # the action list it outputs. There are two cases we need to deal with:
397 398 #
398 399 # 1. Normal file in p1, largefile in p2. Here the largefile is
399 400 # detected via its standin file, which will enter the working copy
400 401 # with a "get" action. It is not "merge" since the standin is all
401 402 # Mercurial is concerned with at this level -- the link to the
402 403 # existing normal file is not relevant here.
403 404 #
404 405 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
405 406 # since the largefile will be present in the working copy and
406 407 # different from the normal file in p2. Mercurial therefore
407 408 # triggers a merge action.
408 409 #
409 410 # In both cases, we prompt the user and emit new actions to either
410 411 # remove the standin (if the normal file was kept) or to remove the
411 412 # normal file and get the standin (if the largefile was kept). The
412 413 # default prompt answer is to use the largefile version since it was
413 414 # presumably changed on purpose.
414 415 #
415 416 # Finally, the merge.applyupdates function will then take care of
416 417 # writing the files into the working copy and lfcommands.updatelfiles
417 418 # will update the largefiles.
418 419 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
419 420 partial, acceptremote, followcopies):
420 421 overwrite = force and not branchmerge
421 422 actions, diverge, renamedelete = origfn(
422 423 repo, p1, p2, pas, branchmerge, force, partial, acceptremote,
423 424 followcopies)
424 425
425 426 if overwrite:
426 427 return actions, diverge, renamedelete
427 428
428 429 # Convert to dictionary with filename as key and action as value.
429 430 lfiles = set()
430 431 actionbyfile = {}
431 432 for m, l in actions.iteritems():
432 433 for f, args, msg in l:
433 434 actionbyfile[f] = m, args, msg
434 435 splitstandin = f and lfutil.splitstandin(f)
435 436 if splitstandin in p1:
436 437 lfiles.add(splitstandin)
437 438 elif lfutil.standin(f) in p1:
438 439 lfiles.add(f)
439 440
440 441 for lfile in lfiles:
441 442 standin = lfutil.standin(lfile)
442 443 lm = actionbyfile.get(lfile, (None, None, None))[0]
443 444 sm = actionbyfile.get(standin, (None, None, None))[0]
444 445 if sm == 'g' and lm != 'r':
445 446 # Case 1: normal file in the working copy, largefile in
446 447 # the second parent
447 448 usermsg = _('remote turned local normal file %s into a largefile\n'
448 449 'use (l)argefile or keep (n)ormal file?'
449 450 '$$ &Largefile $$ &Normal file') % lfile
450 451 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
451 452 actionbyfile[lfile] = ('r', None, 'replaced by standin')
452 453 else: # keep local normal file
453 454 if branchmerge:
454 455 actionbyfile[standin] = ('k', None,
455 456 'replaced by non-standin')
456 457 else:
457 458 actionbyfile[standin] = ('r', None,
458 459 'replaced by non-standin')
459 460 elif lm == 'g' and sm != 'r':
460 461 # Case 2: largefile in the working copy, normal file in
461 462 # the second parent
462 463 usermsg = _('remote turned local largefile %s into a normal file\n'
463 464 'keep (l)argefile or use (n)ormal file?'
464 465 '$$ &Largefile $$ &Normal file') % lfile
465 466 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
466 467 if branchmerge:
467 468 # largefile can be restored from standin safely
468 469 actionbyfile[lfile] = ('k', None, 'replaced by standin')
469 470 else:
470 471 # "lfile" should be marked as "removed" without
471 472 # removal of itself
472 473 actionbyfile[lfile] = ('lfmr', None,
473 474 'forget non-standin largefile')
474 475
475 476 # linear-merge should treat this largefile as 're-added'
476 477 actionbyfile[standin] = ('a', None, 'keep standin')
477 478 else: # pick remote normal file
478 479 actionbyfile[standin] = ('r', None, 'replaced by non-standin')
479 480
480 481 # Convert back to dictionary-of-lists format
481 482 for l in actions.itervalues():
482 483 l[:] = []
483 484 actions['lfmr'] = []
484 485 for f, (m, args, msg) in actionbyfile.iteritems():
485 486 actions[m].append((f, args, msg))
486 487
487 488 return actions, diverge, renamedelete
488 489
489 490 def mergerecordupdates(orig, repo, actions, branchmerge):
490 491 if 'lfmr' in actions:
491 492 # this should be executed before 'orig', to execute 'remove'
492 493 # before all other actions
493 494 for lfile, args, msg in actions['lfmr']:
494 495 repo.dirstate.remove(lfile)
495 496
496 497 return orig(repo, actions, branchmerge)
497 498
498 499
499 500 # Override filemerge to prompt the user about how they wish to merge
500 501 # largefiles. This will handle identical edits without prompting the user.
501 502 def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca, labels=None):
502 503 if not lfutil.isstandin(orig):
503 504 return origfn(repo, mynode, orig, fcd, fco, fca, labels=labels)
504 505
505 506 ahash = fca.data().strip().lower()
506 507 dhash = fcd.data().strip().lower()
507 508 ohash = fco.data().strip().lower()
508 509 if (ohash != ahash and
509 510 ohash != dhash and
510 511 (dhash == ahash or
511 512 repo.ui.promptchoice(
512 513 _('largefile %s has a merge conflict\nancestor was %s\n'
513 514 'keep (l)ocal %s or\ntake (o)ther %s?'
514 515 '$$ &Local $$ &Other') %
515 516 (lfutil.splitstandin(orig), ahash, dhash, ohash),
516 517 0) == 1)):
517 518 repo.wwrite(fcd.path(), fco.data(), fco.flags())
518 519 return 0
519 520
520 521 # Copy first changes the matchers to match standins instead of
521 522 # largefiles. Then it overrides util.copyfile in that function it
522 523 # checks if the destination largefile already exists. It also keeps a
523 524 # list of copied files so that the largefiles can be copied and the
524 525 # dirstate updated.
525 526 def overridecopy(orig, ui, repo, pats, opts, rename=False):
526 527 # doesn't remove largefile on rename
527 528 if len(pats) < 2:
528 529 # this isn't legal, let the original function deal with it
529 530 return orig(ui, repo, pats, opts, rename)
530 531
531 532 def makestandin(relpath):
532 533 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
533 534 return os.path.join(repo.wjoin(lfutil.standin(path)))
534 535
535 536 fullpats = scmutil.expandpats(pats)
536 537 dest = fullpats[-1]
537 538
538 539 if os.path.isdir(dest):
539 540 if not os.path.isdir(makestandin(dest)):
540 541 os.makedirs(makestandin(dest))
541 542 # This could copy both lfiles and normal files in one command,
542 543 # but we don't want to do that. First replace their matcher to
543 544 # only match normal files and run it, then replace it to just
544 545 # match largefiles and run it again.
545 546 nonormalfiles = False
546 547 nolfiles = False
547 548 installnormalfilesmatchfn(repo[None].manifest())
548 549 try:
549 550 try:
550 551 result = orig(ui, repo, pats, opts, rename)
551 552 except util.Abort, e:
552 553 if str(e) != _('no files to copy'):
553 554 raise e
554 555 else:
555 556 nonormalfiles = True
556 557 result = 0
557 558 finally:
558 559 restorematchfn()
559 560
560 561 # The first rename can cause our current working directory to be removed.
561 562 # In that case there is nothing left to copy/rename so just quit.
562 563 try:
563 564 repo.getcwd()
564 565 except OSError:
565 566 return result
566 567
567 568 try:
568 569 try:
569 570 # When we call orig below it creates the standins but we don't add
570 571 # them to the dir state until later so lock during that time.
571 572 wlock = repo.wlock()
572 573
573 574 manifest = repo[None].manifest()
574 575 def overridematch(ctx, pats=[], opts={}, globbed=False,
575 576 default='relpath'):
576 577 newpats = []
577 578 # The patterns were previously mangled to add the standin
578 579 # directory; we need to remove that now
579 580 for pat in pats:
580 581 if match_.patkind(pat) is None and lfutil.shortname in pat:
581 582 newpats.append(pat.replace(lfutil.shortname, ''))
582 583 else:
583 584 newpats.append(pat)
584 585 match = oldmatch(ctx, newpats, opts, globbed, default)
585 586 m = copy.copy(match)
586 587 lfile = lambda f: lfutil.standin(f) in manifest
587 588 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
588 589 m._fmap = set(m._files)
589 590 origmatchfn = m.matchfn
590 591 m.matchfn = lambda f: (lfutil.isstandin(f) and
591 592 (f in manifest) and
592 593 origmatchfn(lfutil.splitstandin(f)) or
593 594 None)
594 595 return m
595 596 oldmatch = installmatchfn(overridematch)
596 597 listpats = []
597 598 for pat in pats:
598 599 if match_.patkind(pat) is not None:
599 600 listpats.append(pat)
600 601 else:
601 602 listpats.append(makestandin(pat))
602 603
603 604 try:
604 605 origcopyfile = util.copyfile
605 606 copiedfiles = []
606 607 def overridecopyfile(src, dest):
607 608 if (lfutil.shortname in src and
608 609 dest.startswith(repo.wjoin(lfutil.shortname))):
609 610 destlfile = dest.replace(lfutil.shortname, '')
610 611 if not opts['force'] and os.path.exists(destlfile):
611 612 raise IOError('',
612 613 _('destination largefile already exists'))
613 614 copiedfiles.append((src, dest))
614 615 origcopyfile(src, dest)
615 616
616 617 util.copyfile = overridecopyfile
617 618 result += orig(ui, repo, listpats, opts, rename)
618 619 finally:
619 620 util.copyfile = origcopyfile
620 621
621 622 lfdirstate = lfutil.openlfdirstate(ui, repo)
622 623 for (src, dest) in copiedfiles:
623 624 if (lfutil.shortname in src and
624 625 dest.startswith(repo.wjoin(lfutil.shortname))):
625 626 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
626 627 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
627 628 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
628 629 if not os.path.isdir(destlfiledir):
629 630 os.makedirs(destlfiledir)
630 631 if rename:
631 632 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
632 633
633 634 # The file is gone, but this deletes any empty parent
634 635 # directories as a side-effect.
635 636 util.unlinkpath(repo.wjoin(srclfile), True)
636 637 lfdirstate.remove(srclfile)
637 638 else:
638 639 util.copyfile(repo.wjoin(srclfile),
639 640 repo.wjoin(destlfile))
640 641
641 642 lfdirstate.add(destlfile)
642 643 lfdirstate.write()
643 644 except util.Abort, e:
644 645 if str(e) != _('no files to copy'):
645 646 raise e
646 647 else:
647 648 nolfiles = True
648 649 finally:
649 650 restorematchfn()
650 651 wlock.release()
651 652
652 653 if nolfiles and nonormalfiles:
653 654 raise util.Abort(_('no files to copy'))
654 655
655 656 return result
656 657
657 658 # When the user calls revert, we have to be careful to not revert any
658 659 # changes to other largefiles accidentally. This means we have to keep
659 660 # track of the largefiles that are being reverted so we only pull down
660 661 # the necessary largefiles.
661 662 #
662 663 # Standins are only updated (to match the hash of largefiles) before
663 664 # commits. Update the standins then run the original revert, changing
664 665 # the matcher to hit standins instead of largefiles. Based on the
665 666 # resulting standins update the largefiles.
666 667 def overriderevert(orig, ui, repo, *pats, **opts):
667 668 # Because we put the standins in a bad state (by updating them)
668 669 # and then return them to a correct state we need to lock to
669 670 # prevent others from changing them in their incorrect state.
670 671 wlock = repo.wlock()
671 672 try:
672 673 lfdirstate = lfutil.openlfdirstate(ui, repo)
673 674 s = lfutil.lfdirstatestatus(lfdirstate, repo)
674 675 lfdirstate.write()
675 676 for lfile in s.modified:
676 677 lfutil.updatestandin(repo, lfutil.standin(lfile))
677 678 for lfile in s.deleted:
678 679 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
679 680 os.unlink(repo.wjoin(lfutil.standin(lfile)))
680 681
681 682 oldstandins = lfutil.getstandinsstate(repo)
682 683
683 684 def overridematch(ctx, pats=[], opts={}, globbed=False,
684 685 default='relpath'):
685 686 match = oldmatch(ctx, pats, opts, globbed, default)
686 687 m = copy.copy(match)
687 688 def tostandin(f):
688 689 if lfutil.standin(f) in ctx:
689 690 return lfutil.standin(f)
690 691 elif lfutil.standin(f) in repo[None]:
691 692 return None
692 693 return f
693 694 m._files = [tostandin(f) for f in m._files]
694 695 m._files = [f for f in m._files if f is not None]
695 696 m._fmap = set(m._files)
696 697 origmatchfn = m.matchfn
697 698 def matchfn(f):
698 699 if lfutil.isstandin(f):
699 700 return (origmatchfn(lfutil.splitstandin(f)) and
700 701 (f in repo[None] or f in ctx))
701 702 return origmatchfn(f)
702 703 m.matchfn = matchfn
703 704 return m
704 705 oldmatch = installmatchfn(overridematch)
705 706 try:
706 707 orig(ui, repo, *pats, **opts)
707 708 finally:
708 709 restorematchfn()
709 710
710 711 newstandins = lfutil.getstandinsstate(repo)
711 712 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
712 713 # lfdirstate should be 'normallookup'-ed for updated files,
713 714 # because reverting doesn't touch dirstate for 'normal' files
714 715 # when target revision is explicitly specified: in such case,
715 716 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
716 717 # of target (standin) file.
717 718 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
718 719 normallookup=True)
719 720
720 721 finally:
721 722 wlock.release()
722 723
723 724 # after pulling changesets, we need to take some extra care to get
724 725 # largefiles updated remotely
725 726 def overridepull(orig, ui, repo, source=None, **opts):
726 727 revsprepull = len(repo)
727 728 if not source:
728 729 source = 'default'
729 730 repo.lfpullsource = source
730 731 result = orig(ui, repo, source, **opts)
731 732 revspostpull = len(repo)
732 733 lfrevs = opts.get('lfrev', [])
733 734 if opts.get('all_largefiles'):
734 735 lfrevs.append('pulled()')
735 736 if lfrevs and revspostpull > revsprepull:
736 737 numcached = 0
737 738 repo.firstpulled = revsprepull # for pulled() revset expression
738 739 try:
739 740 for rev in scmutil.revrange(repo, lfrevs):
740 741 ui.note(_('pulling largefiles for revision %s\n') % rev)
741 742 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
742 743 numcached += len(cached)
743 744 finally:
744 745 del repo.firstpulled
745 746 ui.status(_("%d largefiles cached\n") % numcached)
746 747 return result
747 748
748 749 def pulledrevsetsymbol(repo, subset, x):
749 750 """``pulled()``
750 751 Changesets that just has been pulled.
751 752
752 753 Only available with largefiles from pull --lfrev expressions.
753 754
754 755 .. container:: verbose
755 756
756 757 Some examples:
757 758
758 759 - pull largefiles for all new changesets::
759 760
760 761 hg pull -lfrev "pulled()"
761 762
762 763 - pull largefiles for all new branch heads::
763 764
764 765 hg pull -lfrev "head(pulled()) and not closed()"
765 766
766 767 """
767 768
768 769 try:
769 770 firstpulled = repo.firstpulled
770 771 except AttributeError:
771 772 raise util.Abort(_("pulled() only available in --lfrev"))
772 773 return revset.baseset([r for r in subset if r >= firstpulled])
773 774
774 775 def overrideclone(orig, ui, source, dest=None, **opts):
775 776 d = dest
776 777 if d is None:
777 778 d = hg.defaultdest(source)
778 779 if opts.get('all_largefiles') and not hg.islocal(d):
779 780 raise util.Abort(_(
780 781 '--all-largefiles is incompatible with non-local destination %s') %
781 782 d)
782 783
783 784 return orig(ui, source, dest, **opts)
784 785
785 786 def hgclone(orig, ui, opts, *args, **kwargs):
786 787 result = orig(ui, opts, *args, **kwargs)
787 788
788 789 if result is not None:
789 790 sourcerepo, destrepo = result
790 791 repo = destrepo.local()
791 792
792 793 # Caching is implicitly limited to 'rev' option, since the dest repo was
793 794 # truncated at that point. The user may expect a download count with
794 795 # this option, so attempt whether or not this is a largefile repo.
795 796 if opts.get('all_largefiles'):
796 797 success, missing = lfcommands.downloadlfiles(ui, repo, None)
797 798
798 799 if missing != 0:
799 800 return None
800 801
801 802 return result
802 803
803 804 def overriderebase(orig, ui, repo, **opts):
804 805 resuming = opts.get('continue')
805 806 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
806 807 repo._lfstatuswriters.append(lambda *msg, **opts: None)
807 808 try:
808 809 return orig(ui, repo, **opts)
809 810 finally:
810 811 repo._lfstatuswriters.pop()
811 812 repo._lfcommithooks.pop()
812 813
813 814 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
814 815 prefix=None, mtime=None, subrepos=None):
815 816 # No need to lock because we are only reading history and
816 817 # largefile caches, neither of which are modified.
817 818 lfcommands.cachelfiles(repo.ui, repo, node)
818 819
819 820 if kind not in archival.archivers:
820 821 raise util.Abort(_("unknown archive type '%s'") % kind)
821 822
822 823 ctx = repo[node]
823 824
824 825 if kind == 'files':
825 826 if prefix:
826 827 raise util.Abort(
827 828 _('cannot give prefix when archiving to files'))
828 829 else:
829 830 prefix = archival.tidyprefix(dest, kind, prefix)
830 831
831 832 def write(name, mode, islink, getdata):
832 833 if matchfn and not matchfn(name):
833 834 return
834 835 data = getdata()
835 836 if decode:
836 837 data = repo.wwritedata(name, data)
837 838 archiver.addfile(prefix + name, mode, islink, data)
838 839
839 840 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
840 841
841 842 if repo.ui.configbool("ui", "archivemeta", True):
842 843 def metadata():
843 844 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
844 845 hex(repo.changelog.node(0)), hex(node), ctx.branch())
845 846
846 847 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
847 848 if repo.tagtype(t) == 'global')
848 849 if not tags:
849 850 repo.ui.pushbuffer()
850 851 opts = {'template': '{latesttag}\n{latesttagdistance}',
851 852 'style': '', 'patch': None, 'git': None}
852 853 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
853 854 ltags, dist = repo.ui.popbuffer().split('\n')
854 855 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
855 856 tags += 'latesttagdistance: %s\n' % dist
856 857
857 858 return base + tags
858 859
859 860 write('.hg_archival.txt', 0644, False, metadata)
860 861
861 862 for f in ctx:
862 863 ff = ctx.flags(f)
863 864 getdata = ctx[f].data
864 865 if lfutil.isstandin(f):
865 866 path = lfutil.findfile(repo, getdata().strip())
866 867 if path is None:
867 868 raise util.Abort(
868 869 _('largefile %s not found in repo store or system cache')
869 870 % lfutil.splitstandin(f))
870 871 f = lfutil.splitstandin(f)
871 872
872 873 def getdatafn():
873 874 fd = None
874 875 try:
875 876 fd = open(path, 'rb')
876 877 return fd.read()
877 878 finally:
878 879 if fd:
879 880 fd.close()
880 881
881 882 getdata = getdatafn
882 883 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
883 884
884 885 if subrepos:
885 886 for subpath in sorted(ctx.substate):
886 887 sub = ctx.sub(subpath)
887 888 submatch = match_.narrowmatcher(subpath, matchfn)
888 889 sub.archive(repo.ui, archiver, prefix, submatch)
889 890
890 891 archiver.done()
891 892
892 893 def hgsubrepoarchive(orig, repo, ui, archiver, prefix, match=None):
893 894 repo._get(repo._state + ('hg',))
894 895 rev = repo._state[1]
895 896 ctx = repo._repo[rev]
896 897
897 898 lfcommands.cachelfiles(ui, repo._repo, ctx.node())
898 899
899 900 def write(name, mode, islink, getdata):
900 901 # At this point, the standin has been replaced with the largefile name,
901 902 # so the normal matcher works here without the lfutil variants.
902 903 if match and not match(f):
903 904 return
904 905 data = getdata()
905 906
906 907 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
907 908
908 909 for f in ctx:
909 910 ff = ctx.flags(f)
910 911 getdata = ctx[f].data
911 912 if lfutil.isstandin(f):
912 913 path = lfutil.findfile(repo._repo, getdata().strip())
913 914 if path is None:
914 915 raise util.Abort(
915 916 _('largefile %s not found in repo store or system cache')
916 917 % lfutil.splitstandin(f))
917 918 f = lfutil.splitstandin(f)
918 919
919 920 def getdatafn():
920 921 fd = None
921 922 try:
922 923 fd = open(os.path.join(prefix, path), 'rb')
923 924 return fd.read()
924 925 finally:
925 926 if fd:
926 927 fd.close()
927 928
928 929 getdata = getdatafn
929 930
930 931 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
931 932
932 933 for subpath in sorted(ctx.substate):
933 934 sub = ctx.sub(subpath)
934 935 submatch = match_.narrowmatcher(subpath, match)
935 936 sub.archive(ui, archiver, os.path.join(prefix, repo._path) + '/',
936 937 submatch)
937 938
938 939 # If a largefile is modified, the change is not reflected in its
939 940 # standin until a commit. cmdutil.bailifchanged() raises an exception
940 941 # if the repo has uncommitted changes. Wrap it to also check if
941 942 # largefiles were changed. This is used by bisect, backout and fetch.
942 943 def overridebailifchanged(orig, repo):
943 944 orig(repo)
944 945 repo.lfstatus = True
945 946 s = repo.status()
946 947 repo.lfstatus = False
947 948 if s.modified or s.added or s.removed or s.deleted:
948 949 raise util.Abort(_('uncommitted changes'))
949 950
950 951 def overrideforget(orig, ui, repo, *pats, **opts):
951 952 installnormalfilesmatchfn(repo[None].manifest())
952 953 result = orig(ui, repo, *pats, **opts)
953 954 restorematchfn()
954 955 m = scmutil.match(repo[None], pats, opts)
955 956
956 957 try:
957 958 repo.lfstatus = True
958 959 s = repo.status(match=m, clean=True)
959 960 finally:
960 961 repo.lfstatus = False
961 962 forget = sorted(s.modified + s.added + s.deleted + s.clean)
962 963 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
963 964
964 965 for f in forget:
965 966 if lfutil.standin(f) not in repo.dirstate and not \
966 967 os.path.isdir(m.rel(lfutil.standin(f))):
967 968 ui.warn(_('not removing %s: file is already untracked\n')
968 969 % m.rel(f))
969 970 result = 1
970 971
971 972 for f in forget:
972 973 if ui.verbose or not m.exact(f):
973 974 ui.status(_('removing %s\n') % m.rel(f))
974 975
975 976 # Need to lock because standin files are deleted then removed from the
976 977 # repository and we could race in-between.
977 978 wlock = repo.wlock()
978 979 try:
979 980 lfdirstate = lfutil.openlfdirstate(ui, repo)
980 981 for f in forget:
981 982 if lfdirstate[f] == 'a':
982 983 lfdirstate.drop(f)
983 984 else:
984 985 lfdirstate.remove(f)
985 986 lfdirstate.write()
986 987 standins = [lfutil.standin(f) for f in forget]
987 988 for f in standins:
988 989 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
989 990 repo[None].forget(standins)
990 991 finally:
991 992 wlock.release()
992 993
993 994 return result
994 995
995 996 def _getoutgoings(repo, other, missing, addfunc):
996 997 """get pairs of filename and largefile hash in outgoing revisions
997 998 in 'missing'.
998 999
999 1000 largefiles already existing on 'other' repository are ignored.
1000 1001
1001 1002 'addfunc' is invoked with each unique pairs of filename and
1002 1003 largefile hash value.
1003 1004 """
1004 1005 knowns = set()
1005 1006 lfhashes = set()
1006 1007 def dedup(fn, lfhash):
1007 1008 k = (fn, lfhash)
1008 1009 if k not in knowns:
1009 1010 knowns.add(k)
1010 1011 lfhashes.add(lfhash)
1011 1012 lfutil.getlfilestoupload(repo, missing, dedup)
1012 1013 if lfhashes:
1013 1014 lfexists = basestore._openstore(repo, other).exists(lfhashes)
1014 1015 for fn, lfhash in knowns:
1015 1016 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1016 1017 addfunc(fn, lfhash)
1017 1018
1018 1019 def outgoinghook(ui, repo, other, opts, missing):
1019 1020 if opts.pop('large', None):
1020 1021 lfhashes = set()
1021 1022 if ui.debugflag:
1022 1023 toupload = {}
1023 1024 def addfunc(fn, lfhash):
1024 1025 if fn not in toupload:
1025 1026 toupload[fn] = []
1026 1027 toupload[fn].append(lfhash)
1027 1028 lfhashes.add(lfhash)
1028 1029 def showhashes(fn):
1029 1030 for lfhash in sorted(toupload[fn]):
1030 1031 ui.debug(' %s\n' % (lfhash))
1031 1032 else:
1032 1033 toupload = set()
1033 1034 def addfunc(fn, lfhash):
1034 1035 toupload.add(fn)
1035 1036 lfhashes.add(lfhash)
1036 1037 def showhashes(fn):
1037 1038 pass
1038 1039 _getoutgoings(repo, other, missing, addfunc)
1039 1040
1040 1041 if not toupload:
1041 1042 ui.status(_('largefiles: no files to upload\n'))
1042 1043 else:
1043 1044 ui.status(_('largefiles to upload (%d entities):\n')
1044 1045 % (len(lfhashes)))
1045 1046 for file in sorted(toupload):
1046 1047 ui.status(lfutil.splitstandin(file) + '\n')
1047 1048 showhashes(file)
1048 1049 ui.status('\n')
1049 1050
1050 1051 def summaryremotehook(ui, repo, opts, changes):
1051 1052 largeopt = opts.get('large', False)
1052 1053 if changes is None:
1053 1054 if largeopt:
1054 1055 return (False, True) # only outgoing check is needed
1055 1056 else:
1056 1057 return (False, False)
1057 1058 elif largeopt:
1058 1059 url, branch, peer, outgoing = changes[1]
1059 1060 if peer is None:
1060 1061 # i18n: column positioning for "hg summary"
1061 1062 ui.status(_('largefiles: (no remote repo)\n'))
1062 1063 return
1063 1064
1064 1065 toupload = set()
1065 1066 lfhashes = set()
1066 1067 def addfunc(fn, lfhash):
1067 1068 toupload.add(fn)
1068 1069 lfhashes.add(lfhash)
1069 1070 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1070 1071
1071 1072 if not toupload:
1072 1073 # i18n: column positioning for "hg summary"
1073 1074 ui.status(_('largefiles: (no files to upload)\n'))
1074 1075 else:
1075 1076 # i18n: column positioning for "hg summary"
1076 1077 ui.status(_('largefiles: %d entities for %d files to upload\n')
1077 1078 % (len(lfhashes), len(toupload)))
1078 1079
1079 1080 def overridesummary(orig, ui, repo, *pats, **opts):
1080 1081 try:
1081 1082 repo.lfstatus = True
1082 1083 orig(ui, repo, *pats, **opts)
1083 1084 finally:
1084 1085 repo.lfstatus = False
1085 1086
1086 def scmutiladdremove(orig, repo, pats=[], opts={}, dry_run=None,
1087 def scmutiladdremove(orig, repo, matcher, opts={}, dry_run=None,
1087 1088 similarity=None):
1088 1089 if not lfutil.islfilesrepo(repo):
1089 return orig(repo, pats, opts, dry_run, similarity)
1090 return orig(repo, matcher, opts, dry_run, similarity)
1090 1091 # Get the list of missing largefiles so we can remove them
1091 1092 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1092 1093 unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
1093 1094 False, False, False)
1094 1095
1095 1096 # Call into the normal remove code, but the removing of the standin, we want
1096 1097 # to have handled by original addremove. Monkey patching here makes sure
1097 1098 # we don't remove the standin in the largefiles code, preventing a very
1098 1099 # confused state later.
1099 1100 if s.deleted:
1100 1101 m = [repo.wjoin(f) for f in s.deleted]
1101 1102 removelargefiles(repo.ui, repo, True, *m, **opts)
1102 1103 # Call into the normal add code, and any files that *should* be added as
1103 1104 # largefiles will be
1104 addlargefiles(repo.ui, repo, *pats, **opts)
1105 addlargefiles(repo.ui, repo, matcher, **opts)
1105 1106 # Now that we've handled largefiles, hand off to the original addremove
1106 1107 # function to take care of the rest. Make sure it doesn't do anything with
1107 # largefiles by installing a matcher that will ignore them.
1108 installnormalfilesmatchfn(repo[None].manifest())
1109 result = orig(repo, pats, opts, dry_run, similarity)
1110 restorematchfn()
1111 return result
1108 # largefiles by passing a matcher that will ignore them.
1109 matcher = composenormalfilematcher(matcher, repo[None].manifest())
1110 return orig(repo, matcher, opts, dry_run, similarity)
1112 1111
1113 1112 # Calling purge with --all will cause the largefiles to be deleted.
1114 1113 # Override repo.status to prevent this from happening.
1115 1114 def overridepurge(orig, ui, repo, *dirs, **opts):
1116 1115 oldstatus = repo.status
1117 1116 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1118 1117 clean=False, unknown=False, listsubrepos=False):
1119 1118 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1120 1119 listsubrepos)
1121 1120 lfdirstate = lfutil.openlfdirstate(ui, repo)
1122 1121 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1123 1122 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1124 1123 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1125 1124 unknown, ignored, r.clean)
1126 1125 repo.status = overridestatus
1127 1126 orig(ui, repo, *dirs, **opts)
1128 1127 repo.status = oldstatus
1129 1128 def overriderollback(orig, ui, repo, **opts):
1130 1129 wlock = repo.wlock()
1131 1130 try:
1132 1131 before = repo.dirstate.parents()
1133 1132 orphans = set(f for f in repo.dirstate
1134 1133 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1135 1134 result = orig(ui, repo, **opts)
1136 1135 after = repo.dirstate.parents()
1137 1136 if before == after:
1138 1137 return result # no need to restore standins
1139 1138
1140 1139 pctx = repo['.']
1141 1140 for f in repo.dirstate:
1142 1141 if lfutil.isstandin(f):
1143 1142 orphans.discard(f)
1144 1143 if repo.dirstate[f] == 'r':
1145 1144 repo.wvfs.unlinkpath(f, ignoremissing=True)
1146 1145 elif f in pctx:
1147 1146 fctx = pctx[f]
1148 1147 repo.wwrite(f, fctx.data(), fctx.flags())
1149 1148 else:
1150 1149 # content of standin is not so important in 'a',
1151 1150 # 'm' or 'n' (coming from the 2nd parent) cases
1152 1151 lfutil.writestandin(repo, f, '', False)
1153 1152 for standin in orphans:
1154 1153 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1155 1154
1156 1155 lfdirstate = lfutil.openlfdirstate(ui, repo)
1157 1156 orphans = set(lfdirstate)
1158 1157 lfiles = lfutil.listlfiles(repo)
1159 1158 for file in lfiles:
1160 1159 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1161 1160 orphans.discard(file)
1162 1161 for lfile in orphans:
1163 1162 lfdirstate.drop(lfile)
1164 1163 lfdirstate.write()
1165 1164 finally:
1166 1165 wlock.release()
1167 1166 return result
1168 1167
1169 1168 def overridetransplant(orig, ui, repo, *revs, **opts):
1170 1169 resuming = opts.get('continue')
1171 1170 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1172 1171 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1173 1172 try:
1174 1173 result = orig(ui, repo, *revs, **opts)
1175 1174 finally:
1176 1175 repo._lfstatuswriters.pop()
1177 1176 repo._lfcommithooks.pop()
1178 1177 return result
1179 1178
1180 1179 def overridecat(orig, ui, repo, file1, *pats, **opts):
1181 1180 ctx = scmutil.revsingle(repo, opts.get('rev'))
1182 1181 err = 1
1183 1182 notbad = set()
1184 1183 m = scmutil.match(ctx, (file1,) + pats, opts)
1185 1184 origmatchfn = m.matchfn
1186 1185 def lfmatchfn(f):
1187 1186 if origmatchfn(f):
1188 1187 return True
1189 1188 lf = lfutil.splitstandin(f)
1190 1189 if lf is None:
1191 1190 return False
1192 1191 notbad.add(lf)
1193 1192 return origmatchfn(lf)
1194 1193 m.matchfn = lfmatchfn
1195 1194 origbadfn = m.bad
1196 1195 def lfbadfn(f, msg):
1197 1196 if not f in notbad:
1198 1197 origbadfn(f, msg)
1199 1198 m.bad = lfbadfn
1200 1199 for f in ctx.walk(m):
1201 1200 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1202 1201 pathname=f)
1203 1202 lf = lfutil.splitstandin(f)
1204 1203 if lf is None or origmatchfn(f):
1205 1204 # duplicating unreachable code from commands.cat
1206 1205 data = ctx[f].data()
1207 1206 if opts.get('decode'):
1208 1207 data = repo.wwritedata(f, data)
1209 1208 fp.write(data)
1210 1209 else:
1211 1210 hash = lfutil.readstandin(repo, lf, ctx.rev())
1212 1211 if not lfutil.inusercache(repo.ui, hash):
1213 1212 store = basestore._openstore(repo)
1214 1213 success, missing = store.get([(lf, hash)])
1215 1214 if len(success) != 1:
1216 1215 raise util.Abort(
1217 1216 _('largefile %s is not in cache and could not be '
1218 1217 'downloaded') % lf)
1219 1218 path = lfutil.usercachepath(repo.ui, hash)
1220 1219 fpin = open(path, "rb")
1221 1220 for chunk in util.filechunkiter(fpin, 128 * 1024):
1222 1221 fp.write(chunk)
1223 1222 fpin.close()
1224 1223 fp.close()
1225 1224 err = 0
1226 1225 return err
1227 1226
1228 1227 def mergeupdate(orig, repo, node, branchmerge, force, partial,
1229 1228 *args, **kwargs):
1230 1229 wlock = repo.wlock()
1231 1230 try:
1232 1231 # branch | | |
1233 1232 # merge | force | partial | action
1234 1233 # -------+-------+---------+--------------
1235 1234 # x | x | x | linear-merge
1236 1235 # o | x | x | branch-merge
1237 1236 # x | o | x | overwrite (as clean update)
1238 1237 # o | o | x | force-branch-merge (*1)
1239 1238 # x | x | o | (*)
1240 1239 # o | x | o | (*)
1241 1240 # x | o | o | overwrite (as revert)
1242 1241 # o | o | o | (*)
1243 1242 #
1244 1243 # (*) don't care
1245 1244 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1246 1245
1247 1246 linearmerge = not branchmerge and not force and not partial
1248 1247
1249 1248 if linearmerge or (branchmerge and force and not partial):
1250 1249 # update standins for linear-merge or force-branch-merge,
1251 1250 # because largefiles in the working directory may be modified
1252 1251 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1253 1252 unsure, s = lfdirstate.status(match_.always(repo.root,
1254 1253 repo.getcwd()),
1255 1254 [], False, False, False)
1256 1255 for lfile in unsure + s.modified + s.added:
1257 1256 lfutil.updatestandin(repo, lfutil.standin(lfile))
1258 1257
1259 1258 if linearmerge:
1260 1259 # Only call updatelfiles on the standins that have changed
1261 1260 # to save time
1262 1261 oldstandins = lfutil.getstandinsstate(repo)
1263 1262
1264 1263 result = orig(repo, node, branchmerge, force, partial, *args, **kwargs)
1265 1264
1266 1265 filelist = None
1267 1266 if linearmerge:
1268 1267 newstandins = lfutil.getstandinsstate(repo)
1269 1268 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1270 1269
1271 1270 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1272 1271 normallookup=partial)
1273 1272
1274 1273 return result
1275 1274 finally:
1276 1275 wlock.release()
1277 1276
1278 1277 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1279 1278 result = orig(repo, files, *args, **kwargs)
1280 1279
1281 1280 filelist = [lfutil.splitstandin(f) for f in files if lfutil.isstandin(f)]
1282 1281 if filelist:
1283 1282 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1284 1283 printmessage=False, normallookup=True)
1285 1284
1286 1285 return result
@@ -1,2972 +1,2972
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile
11 11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import context, repair, graphmod, revset, phases, obsolete, pathutil
14 14 import changelog
15 15 import bookmarks
16 16 import encoding
17 17 import lock as lockmod
18 18
19 19 def parsealiases(cmd):
20 20 return cmd.lstrip("^").split("|")
21 21
22 22 def findpossible(cmd, table, strict=False):
23 23 """
24 24 Return cmd -> (aliases, command table entry)
25 25 for each matching command.
26 26 Return debug commands (or their aliases) only if no normal command matches.
27 27 """
28 28 choice = {}
29 29 debugchoice = {}
30 30
31 31 if cmd in table:
32 32 # short-circuit exact matches, "log" alias beats "^log|history"
33 33 keys = [cmd]
34 34 else:
35 35 keys = table.keys()
36 36
37 37 for e in keys:
38 38 aliases = parsealiases(e)
39 39 found = None
40 40 if cmd in aliases:
41 41 found = cmd
42 42 elif not strict:
43 43 for a in aliases:
44 44 if a.startswith(cmd):
45 45 found = a
46 46 break
47 47 if found is not None:
48 48 if aliases[0].startswith("debug") or found.startswith("debug"):
49 49 debugchoice[found] = (aliases, table[e])
50 50 else:
51 51 choice[found] = (aliases, table[e])
52 52
53 53 if not choice and debugchoice:
54 54 choice = debugchoice
55 55
56 56 return choice
57 57
58 58 def findcmd(cmd, table, strict=True):
59 59 """Return (aliases, command table entry) for command string."""
60 60 choice = findpossible(cmd, table, strict)
61 61
62 62 if cmd in choice:
63 63 return choice[cmd]
64 64
65 65 if len(choice) > 1:
66 66 clist = choice.keys()
67 67 clist.sort()
68 68 raise error.AmbiguousCommand(cmd, clist)
69 69
70 70 if choice:
71 71 return choice.values()[0]
72 72
73 73 raise error.UnknownCommand(cmd)
74 74
75 75 def findrepo(p):
76 76 while not os.path.isdir(os.path.join(p, ".hg")):
77 77 oldp, p = p, os.path.dirname(p)
78 78 if p == oldp:
79 79 return None
80 80
81 81 return p
82 82
83 83 def bailifchanged(repo):
84 84 if repo.dirstate.p2() != nullid:
85 85 raise util.Abort(_('outstanding uncommitted merge'))
86 86 modified, added, removed, deleted = repo.status()[:4]
87 87 if modified or added or removed or deleted:
88 88 raise util.Abort(_('uncommitted changes'))
89 89 ctx = repo[None]
90 90 for s in sorted(ctx.substate):
91 91 if ctx.sub(s).dirty():
92 92 raise util.Abort(_("uncommitted changes in subrepo %s") % s)
93 93
94 94 def logmessage(ui, opts):
95 95 """ get the log message according to -m and -l option """
96 96 message = opts.get('message')
97 97 logfile = opts.get('logfile')
98 98
99 99 if message and logfile:
100 100 raise util.Abort(_('options --message and --logfile are mutually '
101 101 'exclusive'))
102 102 if not message and logfile:
103 103 try:
104 104 if logfile == '-':
105 105 message = ui.fin.read()
106 106 else:
107 107 message = '\n'.join(util.readfile(logfile).splitlines())
108 108 except IOError, inst:
109 109 raise util.Abort(_("can't read commit message '%s': %s") %
110 110 (logfile, inst.strerror))
111 111 return message
112 112
113 113 def mergeeditform(ctxorbool, baseform):
114 114 """build appropriate editform from ctxorbool and baseform
115 115
116 116 'ctxorbool' is one of a ctx to be committed, or a bool whether
117 117 merging is committed.
118 118
119 119 This returns editform 'baseform' with '.merge' if merging is
120 120 committed, or one with '.normal' suffix otherwise.
121 121 """
122 122 if isinstance(ctxorbool, bool):
123 123 if ctxorbool:
124 124 return baseform + ".merge"
125 125 elif 1 < len(ctxorbool.parents()):
126 126 return baseform + ".merge"
127 127
128 128 return baseform + ".normal"
129 129
130 130 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
131 131 editform='', **opts):
132 132 """get appropriate commit message editor according to '--edit' option
133 133
134 134 'finishdesc' is a function to be called with edited commit message
135 135 (= 'description' of the new changeset) just after editing, but
136 136 before checking empty-ness. It should return actual text to be
137 137 stored into history. This allows to change description before
138 138 storing.
139 139
140 140 'extramsg' is a extra message to be shown in the editor instead of
141 141 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
142 142 is automatically added.
143 143
144 144 'editform' is a dot-separated list of names, to distinguish
145 145 the purpose of commit text editing.
146 146
147 147 'getcommiteditor' returns 'commitforceeditor' regardless of
148 148 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
149 149 they are specific for usage in MQ.
150 150 """
151 151 if edit or finishdesc or extramsg:
152 152 return lambda r, c, s: commitforceeditor(r, c, s,
153 153 finishdesc=finishdesc,
154 154 extramsg=extramsg,
155 155 editform=editform)
156 156 elif editform:
157 157 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
158 158 else:
159 159 return commiteditor
160 160
161 161 def loglimit(opts):
162 162 """get the log limit according to option -l/--limit"""
163 163 limit = opts.get('limit')
164 164 if limit:
165 165 try:
166 166 limit = int(limit)
167 167 except ValueError:
168 168 raise util.Abort(_('limit must be a positive integer'))
169 169 if limit <= 0:
170 170 raise util.Abort(_('limit must be positive'))
171 171 else:
172 172 limit = None
173 173 return limit
174 174
175 175 def makefilename(repo, pat, node, desc=None,
176 176 total=None, seqno=None, revwidth=None, pathname=None):
177 177 node_expander = {
178 178 'H': lambda: hex(node),
179 179 'R': lambda: str(repo.changelog.rev(node)),
180 180 'h': lambda: short(node),
181 181 'm': lambda: re.sub('[^\w]', '_', str(desc))
182 182 }
183 183 expander = {
184 184 '%': lambda: '%',
185 185 'b': lambda: os.path.basename(repo.root),
186 186 }
187 187
188 188 try:
189 189 if node:
190 190 expander.update(node_expander)
191 191 if node:
192 192 expander['r'] = (lambda:
193 193 str(repo.changelog.rev(node)).zfill(revwidth or 0))
194 194 if total is not None:
195 195 expander['N'] = lambda: str(total)
196 196 if seqno is not None:
197 197 expander['n'] = lambda: str(seqno)
198 198 if total is not None and seqno is not None:
199 199 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
200 200 if pathname is not None:
201 201 expander['s'] = lambda: os.path.basename(pathname)
202 202 expander['d'] = lambda: os.path.dirname(pathname) or '.'
203 203 expander['p'] = lambda: pathname
204 204
205 205 newname = []
206 206 patlen = len(pat)
207 207 i = 0
208 208 while i < patlen:
209 209 c = pat[i]
210 210 if c == '%':
211 211 i += 1
212 212 c = pat[i]
213 213 c = expander[c]()
214 214 newname.append(c)
215 215 i += 1
216 216 return ''.join(newname)
217 217 except KeyError, inst:
218 218 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
219 219 inst.args[0])
220 220
221 221 def makefileobj(repo, pat, node=None, desc=None, total=None,
222 222 seqno=None, revwidth=None, mode='wb', modemap=None,
223 223 pathname=None):
224 224
225 225 writable = mode not in ('r', 'rb')
226 226
227 227 if not pat or pat == '-':
228 228 fp = writable and repo.ui.fout or repo.ui.fin
229 229 if util.safehasattr(fp, 'fileno'):
230 230 return os.fdopen(os.dup(fp.fileno()), mode)
231 231 else:
232 232 # if this fp can't be duped properly, return
233 233 # a dummy object that can be closed
234 234 class wrappedfileobj(object):
235 235 noop = lambda x: None
236 236 def __init__(self, f):
237 237 self.f = f
238 238 def __getattr__(self, attr):
239 239 if attr == 'close':
240 240 return self.noop
241 241 else:
242 242 return getattr(self.f, attr)
243 243
244 244 return wrappedfileobj(fp)
245 245 if util.safehasattr(pat, 'write') and writable:
246 246 return pat
247 247 if util.safehasattr(pat, 'read') and 'r' in mode:
248 248 return pat
249 249 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
250 250 if modemap is not None:
251 251 mode = modemap.get(fn, mode)
252 252 if mode == 'wb':
253 253 modemap[fn] = 'ab'
254 254 return open(fn, mode)
255 255
256 256 def openrevlog(repo, cmd, file_, opts):
257 257 """opens the changelog, manifest, a filelog or a given revlog"""
258 258 cl = opts['changelog']
259 259 mf = opts['manifest']
260 260 msg = None
261 261 if cl and mf:
262 262 msg = _('cannot specify --changelog and --manifest at the same time')
263 263 elif cl or mf:
264 264 if file_:
265 265 msg = _('cannot specify filename with --changelog or --manifest')
266 266 elif not repo:
267 267 msg = _('cannot specify --changelog or --manifest '
268 268 'without a repository')
269 269 if msg:
270 270 raise util.Abort(msg)
271 271
272 272 r = None
273 273 if repo:
274 274 if cl:
275 275 r = repo.unfiltered().changelog
276 276 elif mf:
277 277 r = repo.manifest
278 278 elif file_:
279 279 filelog = repo.file(file_)
280 280 if len(filelog):
281 281 r = filelog
282 282 if not r:
283 283 if not file_:
284 284 raise error.CommandError(cmd, _('invalid arguments'))
285 285 if not os.path.isfile(file_):
286 286 raise util.Abort(_("revlog '%s' not found") % file_)
287 287 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
288 288 file_[:-2] + ".i")
289 289 return r
290 290
291 291 def copy(ui, repo, pats, opts, rename=False):
292 292 # called with the repo lock held
293 293 #
294 294 # hgsep => pathname that uses "/" to separate directories
295 295 # ossep => pathname that uses os.sep to separate directories
296 296 cwd = repo.getcwd()
297 297 targets = {}
298 298 after = opts.get("after")
299 299 dryrun = opts.get("dry_run")
300 300 wctx = repo[None]
301 301
302 302 def walkpat(pat):
303 303 srcs = []
304 304 badstates = after and '?' or '?r'
305 305 m = scmutil.match(repo[None], [pat], opts, globbed=True)
306 306 for abs in repo.walk(m):
307 307 state = repo.dirstate[abs]
308 308 rel = m.rel(abs)
309 309 exact = m.exact(abs)
310 310 if state in badstates:
311 311 if exact and state == '?':
312 312 ui.warn(_('%s: not copying - file is not managed\n') % rel)
313 313 if exact and state == 'r':
314 314 ui.warn(_('%s: not copying - file has been marked for'
315 315 ' remove\n') % rel)
316 316 continue
317 317 # abs: hgsep
318 318 # rel: ossep
319 319 srcs.append((abs, rel, exact))
320 320 return srcs
321 321
322 322 # abssrc: hgsep
323 323 # relsrc: ossep
324 324 # otarget: ossep
325 325 def copyfile(abssrc, relsrc, otarget, exact):
326 326 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
327 327 if '/' in abstarget:
328 328 # We cannot normalize abstarget itself, this would prevent
329 329 # case only renames, like a => A.
330 330 abspath, absname = abstarget.rsplit('/', 1)
331 331 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
332 332 reltarget = repo.pathto(abstarget, cwd)
333 333 target = repo.wjoin(abstarget)
334 334 src = repo.wjoin(abssrc)
335 335 state = repo.dirstate[abstarget]
336 336
337 337 scmutil.checkportable(ui, abstarget)
338 338
339 339 # check for collisions
340 340 prevsrc = targets.get(abstarget)
341 341 if prevsrc is not None:
342 342 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
343 343 (reltarget, repo.pathto(abssrc, cwd),
344 344 repo.pathto(prevsrc, cwd)))
345 345 return
346 346
347 347 # check for overwrites
348 348 exists = os.path.lexists(target)
349 349 samefile = False
350 350 if exists and abssrc != abstarget:
351 351 if (repo.dirstate.normalize(abssrc) ==
352 352 repo.dirstate.normalize(abstarget)):
353 353 if not rename:
354 354 ui.warn(_("%s: can't copy - same file\n") % reltarget)
355 355 return
356 356 exists = False
357 357 samefile = True
358 358
359 359 if not after and exists or after and state in 'mn':
360 360 if not opts['force']:
361 361 ui.warn(_('%s: not overwriting - file exists\n') %
362 362 reltarget)
363 363 return
364 364
365 365 if after:
366 366 if not exists:
367 367 if rename:
368 368 ui.warn(_('%s: not recording move - %s does not exist\n') %
369 369 (relsrc, reltarget))
370 370 else:
371 371 ui.warn(_('%s: not recording copy - %s does not exist\n') %
372 372 (relsrc, reltarget))
373 373 return
374 374 elif not dryrun:
375 375 try:
376 376 if exists:
377 377 os.unlink(target)
378 378 targetdir = os.path.dirname(target) or '.'
379 379 if not os.path.isdir(targetdir):
380 380 os.makedirs(targetdir)
381 381 if samefile:
382 382 tmp = target + "~hgrename"
383 383 os.rename(src, tmp)
384 384 os.rename(tmp, target)
385 385 else:
386 386 util.copyfile(src, target)
387 387 srcexists = True
388 388 except IOError, inst:
389 389 if inst.errno == errno.ENOENT:
390 390 ui.warn(_('%s: deleted in working copy\n') % relsrc)
391 391 srcexists = False
392 392 else:
393 393 ui.warn(_('%s: cannot copy - %s\n') %
394 394 (relsrc, inst.strerror))
395 395 return True # report a failure
396 396
397 397 if ui.verbose or not exact:
398 398 if rename:
399 399 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
400 400 else:
401 401 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
402 402
403 403 targets[abstarget] = abssrc
404 404
405 405 # fix up dirstate
406 406 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
407 407 dryrun=dryrun, cwd=cwd)
408 408 if rename and not dryrun:
409 409 if not after and srcexists and not samefile:
410 410 util.unlinkpath(repo.wjoin(abssrc))
411 411 wctx.forget([abssrc])
412 412
413 413 # pat: ossep
414 414 # dest ossep
415 415 # srcs: list of (hgsep, hgsep, ossep, bool)
416 416 # return: function that takes hgsep and returns ossep
417 417 def targetpathfn(pat, dest, srcs):
418 418 if os.path.isdir(pat):
419 419 abspfx = pathutil.canonpath(repo.root, cwd, pat)
420 420 abspfx = util.localpath(abspfx)
421 421 if destdirexists:
422 422 striplen = len(os.path.split(abspfx)[0])
423 423 else:
424 424 striplen = len(abspfx)
425 425 if striplen:
426 426 striplen += len(os.sep)
427 427 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
428 428 elif destdirexists:
429 429 res = lambda p: os.path.join(dest,
430 430 os.path.basename(util.localpath(p)))
431 431 else:
432 432 res = lambda p: dest
433 433 return res
434 434
435 435 # pat: ossep
436 436 # dest ossep
437 437 # srcs: list of (hgsep, hgsep, ossep, bool)
438 438 # return: function that takes hgsep and returns ossep
439 439 def targetpathafterfn(pat, dest, srcs):
440 440 if matchmod.patkind(pat):
441 441 # a mercurial pattern
442 442 res = lambda p: os.path.join(dest,
443 443 os.path.basename(util.localpath(p)))
444 444 else:
445 445 abspfx = pathutil.canonpath(repo.root, cwd, pat)
446 446 if len(abspfx) < len(srcs[0][0]):
447 447 # A directory. Either the target path contains the last
448 448 # component of the source path or it does not.
449 449 def evalpath(striplen):
450 450 score = 0
451 451 for s in srcs:
452 452 t = os.path.join(dest, util.localpath(s[0])[striplen:])
453 453 if os.path.lexists(t):
454 454 score += 1
455 455 return score
456 456
457 457 abspfx = util.localpath(abspfx)
458 458 striplen = len(abspfx)
459 459 if striplen:
460 460 striplen += len(os.sep)
461 461 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
462 462 score = evalpath(striplen)
463 463 striplen1 = len(os.path.split(abspfx)[0])
464 464 if striplen1:
465 465 striplen1 += len(os.sep)
466 466 if evalpath(striplen1) > score:
467 467 striplen = striplen1
468 468 res = lambda p: os.path.join(dest,
469 469 util.localpath(p)[striplen:])
470 470 else:
471 471 # a file
472 472 if destdirexists:
473 473 res = lambda p: os.path.join(dest,
474 474 os.path.basename(util.localpath(p)))
475 475 else:
476 476 res = lambda p: dest
477 477 return res
478 478
479 479
480 480 pats = scmutil.expandpats(pats)
481 481 if not pats:
482 482 raise util.Abort(_('no source or destination specified'))
483 483 if len(pats) == 1:
484 484 raise util.Abort(_('no destination specified'))
485 485 dest = pats.pop()
486 486 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
487 487 if not destdirexists:
488 488 if len(pats) > 1 or matchmod.patkind(pats[0]):
489 489 raise util.Abort(_('with multiple sources, destination must be an '
490 490 'existing directory'))
491 491 if util.endswithsep(dest):
492 492 raise util.Abort(_('destination %s is not a directory') % dest)
493 493
494 494 tfn = targetpathfn
495 495 if after:
496 496 tfn = targetpathafterfn
497 497 copylist = []
498 498 for pat in pats:
499 499 srcs = walkpat(pat)
500 500 if not srcs:
501 501 continue
502 502 copylist.append((tfn(pat, dest, srcs), srcs))
503 503 if not copylist:
504 504 raise util.Abort(_('no files to copy'))
505 505
506 506 errors = 0
507 507 for targetpath, srcs in copylist:
508 508 for abssrc, relsrc, exact in srcs:
509 509 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
510 510 errors += 1
511 511
512 512 if errors:
513 513 ui.warn(_('(consider using --after)\n'))
514 514
515 515 return errors != 0
516 516
517 517 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
518 518 runargs=None, appendpid=False):
519 519 '''Run a command as a service.'''
520 520
521 521 def writepid(pid):
522 522 if opts['pid_file']:
523 523 mode = appendpid and 'a' or 'w'
524 524 fp = open(opts['pid_file'], mode)
525 525 fp.write(str(pid) + '\n')
526 526 fp.close()
527 527
528 528 if opts['daemon'] and not opts['daemon_pipefds']:
529 529 # Signal child process startup with file removal
530 530 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
531 531 os.close(lockfd)
532 532 try:
533 533 if not runargs:
534 534 runargs = util.hgcmd() + sys.argv[1:]
535 535 runargs.append('--daemon-pipefds=%s' % lockpath)
536 536 # Don't pass --cwd to the child process, because we've already
537 537 # changed directory.
538 538 for i in xrange(1, len(runargs)):
539 539 if runargs[i].startswith('--cwd='):
540 540 del runargs[i]
541 541 break
542 542 elif runargs[i].startswith('--cwd'):
543 543 del runargs[i:i + 2]
544 544 break
545 545 def condfn():
546 546 return not os.path.exists(lockpath)
547 547 pid = util.rundetached(runargs, condfn)
548 548 if pid < 0:
549 549 raise util.Abort(_('child process failed to start'))
550 550 writepid(pid)
551 551 finally:
552 552 try:
553 553 os.unlink(lockpath)
554 554 except OSError, e:
555 555 if e.errno != errno.ENOENT:
556 556 raise
557 557 if parentfn:
558 558 return parentfn(pid)
559 559 else:
560 560 return
561 561
562 562 if initfn:
563 563 initfn()
564 564
565 565 if not opts['daemon']:
566 566 writepid(os.getpid())
567 567
568 568 if opts['daemon_pipefds']:
569 569 lockpath = opts['daemon_pipefds']
570 570 try:
571 571 os.setsid()
572 572 except AttributeError:
573 573 pass
574 574 os.unlink(lockpath)
575 575 util.hidewindow()
576 576 sys.stdout.flush()
577 577 sys.stderr.flush()
578 578
579 579 nullfd = os.open(os.devnull, os.O_RDWR)
580 580 logfilefd = nullfd
581 581 if logfile:
582 582 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
583 583 os.dup2(nullfd, 0)
584 584 os.dup2(logfilefd, 1)
585 585 os.dup2(logfilefd, 2)
586 586 if nullfd not in (0, 1, 2):
587 587 os.close(nullfd)
588 588 if logfile and logfilefd not in (0, 1, 2):
589 589 os.close(logfilefd)
590 590
591 591 if runfn:
592 592 return runfn()
593 593
594 594 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
595 595 """Utility function used by commands.import to import a single patch
596 596
597 597 This function is explicitly defined here to help the evolve extension to
598 598 wrap this part of the import logic.
599 599
600 600 The API is currently a bit ugly because it a simple code translation from
601 601 the import command. Feel free to make it better.
602 602
603 603 :hunk: a patch (as a binary string)
604 604 :parents: nodes that will be parent of the created commit
605 605 :opts: the full dict of option passed to the import command
606 606 :msgs: list to save commit message to.
607 607 (used in case we need to save it when failing)
608 608 :updatefunc: a function that update a repo to a given node
609 609 updatefunc(<repo>, <node>)
610 610 """
611 611 tmpname, message, user, date, branch, nodeid, p1, p2 = \
612 612 patch.extract(ui, hunk)
613 613
614 614 update = not opts.get('bypass')
615 615 strip = opts["strip"]
616 616 sim = float(opts.get('similarity') or 0)
617 617 if not tmpname:
618 618 return (None, None, False)
619 619 msg = _('applied to working directory')
620 620
621 621 rejects = False
622 622
623 623 try:
624 624 cmdline_message = logmessage(ui, opts)
625 625 if cmdline_message:
626 626 # pickup the cmdline msg
627 627 message = cmdline_message
628 628 elif message:
629 629 # pickup the patch msg
630 630 message = message.strip()
631 631 else:
632 632 # launch the editor
633 633 message = None
634 634 ui.debug('message:\n%s\n' % message)
635 635
636 636 if len(parents) == 1:
637 637 parents.append(repo[nullid])
638 638 if opts.get('exact'):
639 639 if not nodeid or not p1:
640 640 raise util.Abort(_('not a Mercurial patch'))
641 641 p1 = repo[p1]
642 642 p2 = repo[p2 or nullid]
643 643 elif p2:
644 644 try:
645 645 p1 = repo[p1]
646 646 p2 = repo[p2]
647 647 # Without any options, consider p2 only if the
648 648 # patch is being applied on top of the recorded
649 649 # first parent.
650 650 if p1 != parents[0]:
651 651 p1 = parents[0]
652 652 p2 = repo[nullid]
653 653 except error.RepoError:
654 654 p1, p2 = parents
655 655 if p2.node() == nullid:
656 656 ui.warn(_("warning: import the patch as a normal revision\n"
657 657 "(use --exact to import the patch as a merge)\n"))
658 658 else:
659 659 p1, p2 = parents
660 660
661 661 n = None
662 662 if update:
663 663 repo.dirstate.beginparentchange()
664 664 if p1 != parents[0]:
665 665 updatefunc(repo, p1.node())
666 666 if p2 != parents[1]:
667 667 repo.setparents(p1.node(), p2.node())
668 668
669 669 if opts.get('exact') or opts.get('import_branch'):
670 670 repo.dirstate.setbranch(branch or 'default')
671 671
672 672 partial = opts.get('partial', False)
673 673 files = set()
674 674 try:
675 675 patch.patch(ui, repo, tmpname, strip=strip, files=files,
676 676 eolmode=None, similarity=sim / 100.0)
677 677 except patch.PatchError, e:
678 678 if not partial:
679 679 raise util.Abort(str(e))
680 680 if partial:
681 681 rejects = True
682 682
683 683 files = list(files)
684 684 if opts.get('no_commit'):
685 685 if message:
686 686 msgs.append(message)
687 687 else:
688 688 if opts.get('exact') or p2:
689 689 # If you got here, you either use --force and know what
690 690 # you are doing or used --exact or a merge patch while
691 691 # being updated to its first parent.
692 692 m = None
693 693 else:
694 694 m = scmutil.matchfiles(repo, files or [])
695 695 editform = mergeeditform(repo[None], 'import.normal')
696 696 if opts.get('exact'):
697 697 editor = None
698 698 else:
699 699 editor = getcommiteditor(editform=editform, **opts)
700 700 n = repo.commit(message, opts.get('user') or user,
701 701 opts.get('date') or date, match=m,
702 702 editor=editor, force=partial)
703 703 repo.dirstate.endparentchange()
704 704 else:
705 705 if opts.get('exact') or opts.get('import_branch'):
706 706 branch = branch or 'default'
707 707 else:
708 708 branch = p1.branch()
709 709 store = patch.filestore()
710 710 try:
711 711 files = set()
712 712 try:
713 713 patch.patchrepo(ui, repo, p1, store, tmpname, strip,
714 714 files, eolmode=None)
715 715 except patch.PatchError, e:
716 716 raise util.Abort(str(e))
717 717 if opts.get('exact'):
718 718 editor = None
719 719 else:
720 720 editor = getcommiteditor(editform='import.bypass')
721 721 memctx = context.makememctx(repo, (p1.node(), p2.node()),
722 722 message,
723 723 opts.get('user') or user,
724 724 opts.get('date') or date,
725 725 branch, files, store,
726 726 editor=editor)
727 727 n = memctx.commit()
728 728 finally:
729 729 store.close()
730 730 if opts.get('exact') and opts.get('no_commit'):
731 731 # --exact with --no-commit is still useful in that it does merge
732 732 # and branch bits
733 733 ui.warn(_("warning: can't check exact import with --no-commit\n"))
734 734 elif opts.get('exact') and hex(n) != nodeid:
735 735 raise util.Abort(_('patch is damaged or loses information'))
736 736 if n:
737 737 # i18n: refers to a short changeset id
738 738 msg = _('created %s') % short(n)
739 739 return (msg, n, rejects)
740 740 finally:
741 741 os.unlink(tmpname)
742 742
743 743 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
744 744 opts=None):
745 745 '''export changesets as hg patches.'''
746 746
747 747 total = len(revs)
748 748 revwidth = max([len(str(rev)) for rev in revs])
749 749 filemode = {}
750 750
751 751 def single(rev, seqno, fp):
752 752 ctx = repo[rev]
753 753 node = ctx.node()
754 754 parents = [p.node() for p in ctx.parents() if p]
755 755 branch = ctx.branch()
756 756 if switch_parent:
757 757 parents.reverse()
758 758 prev = (parents and parents[0]) or nullid
759 759
760 760 shouldclose = False
761 761 if not fp and len(template) > 0:
762 762 desc_lines = ctx.description().rstrip().split('\n')
763 763 desc = desc_lines[0] #Commit always has a first line.
764 764 fp = makefileobj(repo, template, node, desc=desc, total=total,
765 765 seqno=seqno, revwidth=revwidth, mode='wb',
766 766 modemap=filemode)
767 767 if fp != template:
768 768 shouldclose = True
769 769 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
770 770 repo.ui.note("%s\n" % fp.name)
771 771
772 772 if not fp:
773 773 write = repo.ui.write
774 774 else:
775 775 def write(s, **kw):
776 776 fp.write(s)
777 777
778 778
779 779 write("# HG changeset patch\n")
780 780 write("# User %s\n" % ctx.user())
781 781 write("# Date %d %d\n" % ctx.date())
782 782 write("# %s\n" % util.datestr(ctx.date()))
783 783 if branch and branch != 'default':
784 784 write("# Branch %s\n" % branch)
785 785 write("# Node ID %s\n" % hex(node))
786 786 write("# Parent %s\n" % hex(prev))
787 787 if len(parents) > 1:
788 788 write("# Parent %s\n" % hex(parents[1]))
789 789 write(ctx.description().rstrip())
790 790 write("\n\n")
791 791
792 792 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
793 793 write(chunk, label=label)
794 794
795 795 if shouldclose:
796 796 fp.close()
797 797
798 798 for seqno, rev in enumerate(revs):
799 799 single(rev, seqno + 1, fp)
800 800
801 801 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
802 802 changes=None, stat=False, fp=None, prefix='',
803 803 listsubrepos=False):
804 804 '''show diff or diffstat.'''
805 805 if fp is None:
806 806 write = ui.write
807 807 else:
808 808 def write(s, **kw):
809 809 fp.write(s)
810 810
811 811 if stat:
812 812 diffopts = diffopts.copy(context=0)
813 813 width = 80
814 814 if not ui.plain():
815 815 width = ui.termwidth()
816 816 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
817 817 prefix=prefix)
818 818 for chunk, label in patch.diffstatui(util.iterlines(chunks),
819 819 width=width,
820 820 git=diffopts.git):
821 821 write(chunk, label=label)
822 822 else:
823 823 for chunk, label in patch.diffui(repo, node1, node2, match,
824 824 changes, diffopts, prefix=prefix):
825 825 write(chunk, label=label)
826 826
827 827 if listsubrepos:
828 828 ctx1 = repo[node1]
829 829 ctx2 = repo[node2]
830 830 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
831 831 tempnode2 = node2
832 832 try:
833 833 if node2 is not None:
834 834 tempnode2 = ctx2.substate[subpath][1]
835 835 except KeyError:
836 836 # A subrepo that existed in node1 was deleted between node1 and
837 837 # node2 (inclusive). Thus, ctx2's substate won't contain that
838 838 # subpath. The best we can do is to ignore it.
839 839 tempnode2 = None
840 840 submatch = matchmod.narrowmatcher(subpath, match)
841 841 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
842 842 stat=stat, fp=fp, prefix=prefix)
843 843
844 844 class changeset_printer(object):
845 845 '''show changeset information when templating not requested.'''
846 846
847 847 def __init__(self, ui, repo, matchfn, diffopts, buffered):
848 848 self.ui = ui
849 849 self.repo = repo
850 850 self.buffered = buffered
851 851 self.matchfn = matchfn
852 852 self.diffopts = diffopts
853 853 self.header = {}
854 854 self.hunk = {}
855 855 self.lastheader = None
856 856 self.footer = None
857 857
858 858 def flush(self, rev):
859 859 if rev in self.header:
860 860 h = self.header[rev]
861 861 if h != self.lastheader:
862 862 self.lastheader = h
863 863 self.ui.write(h)
864 864 del self.header[rev]
865 865 if rev in self.hunk:
866 866 self.ui.write(self.hunk[rev])
867 867 del self.hunk[rev]
868 868 return 1
869 869 return 0
870 870
871 871 def close(self):
872 872 if self.footer:
873 873 self.ui.write(self.footer)
874 874
875 875 def show(self, ctx, copies=None, matchfn=None, **props):
876 876 if self.buffered:
877 877 self.ui.pushbuffer()
878 878 self._show(ctx, copies, matchfn, props)
879 879 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
880 880 else:
881 881 self._show(ctx, copies, matchfn, props)
882 882
883 883 def _show(self, ctx, copies, matchfn, props):
884 884 '''show a single changeset or file revision'''
885 885 changenode = ctx.node()
886 886 rev = ctx.rev()
887 887
888 888 if self.ui.quiet:
889 889 self.ui.write("%d:%s\n" % (rev, short(changenode)),
890 890 label='log.node')
891 891 return
892 892
893 893 log = self.repo.changelog
894 894 date = util.datestr(ctx.date())
895 895
896 896 hexfunc = self.ui.debugflag and hex or short
897 897
898 898 parents = [(p, hexfunc(log.node(p)))
899 899 for p in self._meaningful_parentrevs(log, rev)]
900 900
901 901 # i18n: column positioning for "hg log"
902 902 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
903 903 label='log.changeset changeset.%s' % ctx.phasestr())
904 904
905 905 branch = ctx.branch()
906 906 # don't show the default branch name
907 907 if branch != 'default':
908 908 # i18n: column positioning for "hg log"
909 909 self.ui.write(_("branch: %s\n") % branch,
910 910 label='log.branch')
911 911 for bookmark in self.repo.nodebookmarks(changenode):
912 912 # i18n: column positioning for "hg log"
913 913 self.ui.write(_("bookmark: %s\n") % bookmark,
914 914 label='log.bookmark')
915 915 for tag in self.repo.nodetags(changenode):
916 916 # i18n: column positioning for "hg log"
917 917 self.ui.write(_("tag: %s\n") % tag,
918 918 label='log.tag')
919 919 if self.ui.debugflag:
920 920 # i18n: column positioning for "hg log"
921 921 self.ui.write(_("phase: %s\n") % _(ctx.phasestr()),
922 922 label='log.phase')
923 923 for parent in parents:
924 924 label = 'log.parent changeset.%s' % self.repo[parent[0]].phasestr()
925 925 # i18n: column positioning for "hg log"
926 926 self.ui.write(_("parent: %d:%s\n") % parent,
927 927 label=label)
928 928
929 929 if self.ui.debugflag:
930 930 mnode = ctx.manifestnode()
931 931 # i18n: column positioning for "hg log"
932 932 self.ui.write(_("manifest: %d:%s\n") %
933 933 (self.repo.manifest.rev(mnode), hex(mnode)),
934 934 label='ui.debug log.manifest')
935 935 # i18n: column positioning for "hg log"
936 936 self.ui.write(_("user: %s\n") % ctx.user(),
937 937 label='log.user')
938 938 # i18n: column positioning for "hg log"
939 939 self.ui.write(_("date: %s\n") % date,
940 940 label='log.date')
941 941
942 942 if self.ui.debugflag:
943 943 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
944 944 for key, value in zip([# i18n: column positioning for "hg log"
945 945 _("files:"),
946 946 # i18n: column positioning for "hg log"
947 947 _("files+:"),
948 948 # i18n: column positioning for "hg log"
949 949 _("files-:")], files):
950 950 if value:
951 951 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
952 952 label='ui.debug log.files')
953 953 elif ctx.files() and self.ui.verbose:
954 954 # i18n: column positioning for "hg log"
955 955 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
956 956 label='ui.note log.files')
957 957 if copies and self.ui.verbose:
958 958 copies = ['%s (%s)' % c for c in copies]
959 959 # i18n: column positioning for "hg log"
960 960 self.ui.write(_("copies: %s\n") % ' '.join(copies),
961 961 label='ui.note log.copies')
962 962
963 963 extra = ctx.extra()
964 964 if extra and self.ui.debugflag:
965 965 for key, value in sorted(extra.items()):
966 966 # i18n: column positioning for "hg log"
967 967 self.ui.write(_("extra: %s=%s\n")
968 968 % (key, value.encode('string_escape')),
969 969 label='ui.debug log.extra')
970 970
971 971 description = ctx.description().strip()
972 972 if description:
973 973 if self.ui.verbose:
974 974 self.ui.write(_("description:\n"),
975 975 label='ui.note log.description')
976 976 self.ui.write(description,
977 977 label='ui.note log.description')
978 978 self.ui.write("\n\n")
979 979 else:
980 980 # i18n: column positioning for "hg log"
981 981 self.ui.write(_("summary: %s\n") %
982 982 description.splitlines()[0],
983 983 label='log.summary')
984 984 self.ui.write("\n")
985 985
986 986 self.showpatch(changenode, matchfn)
987 987
988 988 def showpatch(self, node, matchfn):
989 989 if not matchfn:
990 990 matchfn = self.matchfn
991 991 if matchfn:
992 992 stat = self.diffopts.get('stat')
993 993 diff = self.diffopts.get('patch')
994 994 diffopts = patch.diffopts(self.ui, self.diffopts)
995 995 prev = self.repo.changelog.parents(node)[0]
996 996 if stat:
997 997 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
998 998 match=matchfn, stat=True)
999 999 if diff:
1000 1000 if stat:
1001 1001 self.ui.write("\n")
1002 1002 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1003 1003 match=matchfn, stat=False)
1004 1004 self.ui.write("\n")
1005 1005
1006 1006 def _meaningful_parentrevs(self, log, rev):
1007 1007 """Return list of meaningful (or all if debug) parentrevs for rev.
1008 1008
1009 1009 For merges (two non-nullrev revisions) both parents are meaningful.
1010 1010 Otherwise the first parent revision is considered meaningful if it
1011 1011 is not the preceding revision.
1012 1012 """
1013 1013 parents = log.parentrevs(rev)
1014 1014 if not self.ui.debugflag and parents[1] == nullrev:
1015 1015 if parents[0] >= rev - 1:
1016 1016 parents = []
1017 1017 else:
1018 1018 parents = [parents[0]]
1019 1019 return parents
1020 1020
1021 1021 class jsonchangeset(changeset_printer):
1022 1022 '''format changeset information.'''
1023 1023
1024 1024 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1025 1025 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1026 1026 self.cache = {}
1027 1027 self._first = True
1028 1028
1029 1029 def close(self):
1030 1030 if not self._first:
1031 1031 self.ui.write("\n]\n")
1032 1032 else:
1033 1033 self.ui.write("[]\n")
1034 1034
1035 1035 def _show(self, ctx, copies, matchfn, props):
1036 1036 '''show a single changeset or file revision'''
1037 1037 hexnode = hex(ctx.node())
1038 1038 rev = ctx.rev()
1039 1039 j = encoding.jsonescape
1040 1040
1041 1041 if self._first:
1042 1042 self.ui.write("[\n {")
1043 1043 self._first = False
1044 1044 else:
1045 1045 self.ui.write(",\n {")
1046 1046
1047 1047 if self.ui.quiet:
1048 1048 self.ui.write('\n "rev": %d' % rev)
1049 1049 self.ui.write(',\n "node": "%s"' % hexnode)
1050 1050 self.ui.write('\n }')
1051 1051 return
1052 1052
1053 1053 self.ui.write('\n "rev": %d' % rev)
1054 1054 self.ui.write(',\n "node": "%s"' % hexnode)
1055 1055 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1056 1056 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1057 1057 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1058 1058 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1059 1059 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1060 1060
1061 1061 self.ui.write(',\n "bookmarks": [%s]' %
1062 1062 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1063 1063 self.ui.write(',\n "tags": [%s]' %
1064 1064 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1065 1065 self.ui.write(',\n "parents": [%s]' %
1066 1066 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1067 1067
1068 1068 if self.ui.debugflag:
1069 1069 self.ui.write(',\n "manifest": "%s"' % hex(ctx.manifestnode()))
1070 1070
1071 1071 self.ui.write(',\n "extra": {%s}' %
1072 1072 ", ".join('"%s": "%s"' % (j(k), j(v))
1073 1073 for k, v in ctx.extra().items()))
1074 1074
1075 1075 files = ctx.status(ctx.p1())
1076 1076 self.ui.write(',\n "modified": [%s]' %
1077 1077 ", ".join('"%s"' % j(f) for f in files[0]))
1078 1078 self.ui.write(',\n "added": [%s]' %
1079 1079 ", ".join('"%s"' % j(f) for f in files[1]))
1080 1080 self.ui.write(',\n "removed": [%s]' %
1081 1081 ", ".join('"%s"' % j(f) for f in files[2]))
1082 1082
1083 1083 elif self.ui.verbose:
1084 1084 self.ui.write(',\n "files": [%s]' %
1085 1085 ", ".join('"%s"' % j(f) for f in ctx.files()))
1086 1086
1087 1087 if copies:
1088 1088 self.ui.write(',\n "copies": {%s}' %
1089 1089 ", ".join('"%s": %s' % (j(k), j(copies[k]))
1090 1090 for k in copies))
1091 1091
1092 1092 matchfn = self.matchfn
1093 1093 if matchfn:
1094 1094 stat = self.diffopts.get('stat')
1095 1095 diff = self.diffopts.get('patch')
1096 1096 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1097 1097 node, prev = ctx.node(), ctx.p1().node()
1098 1098 if stat:
1099 1099 self.ui.pushbuffer()
1100 1100 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1101 1101 match=matchfn, stat=True)
1102 1102 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1103 1103 if diff:
1104 1104 self.ui.pushbuffer()
1105 1105 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1106 1106 match=matchfn, stat=False)
1107 1107 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1108 1108
1109 1109 self.ui.write("\n }")
1110 1110
1111 1111 class changeset_templater(changeset_printer):
1112 1112 '''format changeset information.'''
1113 1113
1114 1114 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1115 1115 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1116 1116 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1117 1117 defaulttempl = {
1118 1118 'parent': '{rev}:{node|formatnode} ',
1119 1119 'manifest': '{rev}:{node|formatnode}',
1120 1120 'file_copy': '{name} ({source})',
1121 1121 'extra': '{key}={value|stringescape}'
1122 1122 }
1123 1123 # filecopy is preserved for compatibility reasons
1124 1124 defaulttempl['filecopy'] = defaulttempl['file_copy']
1125 1125 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1126 1126 cache=defaulttempl)
1127 1127 if tmpl:
1128 1128 self.t.cache['changeset'] = tmpl
1129 1129
1130 1130 self.cache = {}
1131 1131
1132 1132 def _meaningful_parentrevs(self, ctx):
1133 1133 """Return list of meaningful (or all if debug) parentrevs for rev.
1134 1134 """
1135 1135 parents = ctx.parents()
1136 1136 if len(parents) > 1:
1137 1137 return parents
1138 1138 if self.ui.debugflag:
1139 1139 return [parents[0], self.repo['null']]
1140 1140 if parents[0].rev() >= ctx.rev() - 1:
1141 1141 return []
1142 1142 return parents
1143 1143
1144 1144 def _show(self, ctx, copies, matchfn, props):
1145 1145 '''show a single changeset or file revision'''
1146 1146
1147 1147 showlist = templatekw.showlist
1148 1148
1149 1149 # showparents() behaviour depends on ui trace level which
1150 1150 # causes unexpected behaviours at templating level and makes
1151 1151 # it harder to extract it in a standalone function. Its
1152 1152 # behaviour cannot be changed so leave it here for now.
1153 1153 def showparents(**args):
1154 1154 ctx = args['ctx']
1155 1155 parents = [[('rev', p.rev()),
1156 1156 ('node', p.hex()),
1157 1157 ('phase', p.phasestr())]
1158 1158 for p in self._meaningful_parentrevs(ctx)]
1159 1159 return showlist('parent', parents, **args)
1160 1160
1161 1161 props = props.copy()
1162 1162 props.update(templatekw.keywords)
1163 1163 props['parents'] = showparents
1164 1164 props['templ'] = self.t
1165 1165 props['ctx'] = ctx
1166 1166 props['repo'] = self.repo
1167 1167 props['revcache'] = {'copies': copies}
1168 1168 props['cache'] = self.cache
1169 1169
1170 1170 # find correct templates for current mode
1171 1171
1172 1172 tmplmodes = [
1173 1173 (True, None),
1174 1174 (self.ui.verbose, 'verbose'),
1175 1175 (self.ui.quiet, 'quiet'),
1176 1176 (self.ui.debugflag, 'debug'),
1177 1177 ]
1178 1178
1179 1179 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1180 1180 for mode, postfix in tmplmodes:
1181 1181 for type in types:
1182 1182 cur = postfix and ('%s_%s' % (type, postfix)) or type
1183 1183 if mode and cur in self.t:
1184 1184 types[type] = cur
1185 1185
1186 1186 try:
1187 1187
1188 1188 # write header
1189 1189 if types['header']:
1190 1190 h = templater.stringify(self.t(types['header'], **props))
1191 1191 if self.buffered:
1192 1192 self.header[ctx.rev()] = h
1193 1193 else:
1194 1194 if self.lastheader != h:
1195 1195 self.lastheader = h
1196 1196 self.ui.write(h)
1197 1197
1198 1198 # write changeset metadata, then patch if requested
1199 1199 key = types['changeset']
1200 1200 self.ui.write(templater.stringify(self.t(key, **props)))
1201 1201 self.showpatch(ctx.node(), matchfn)
1202 1202
1203 1203 if types['footer']:
1204 1204 if not self.footer:
1205 1205 self.footer = templater.stringify(self.t(types['footer'],
1206 1206 **props))
1207 1207
1208 1208 except KeyError, inst:
1209 1209 msg = _("%s: no key named '%s'")
1210 1210 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1211 1211 except SyntaxError, inst:
1212 1212 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1213 1213
1214 1214 def gettemplate(ui, tmpl, style):
1215 1215 """
1216 1216 Find the template matching the given template spec or style.
1217 1217 """
1218 1218
1219 1219 # ui settings
1220 1220 if not tmpl and not style: # template are stronger than style
1221 1221 tmpl = ui.config('ui', 'logtemplate')
1222 1222 if tmpl:
1223 1223 try:
1224 1224 tmpl = templater.parsestring(tmpl)
1225 1225 except SyntaxError:
1226 1226 tmpl = templater.parsestring(tmpl, quoted=False)
1227 1227 return tmpl, None
1228 1228 else:
1229 1229 style = util.expandpath(ui.config('ui', 'style', ''))
1230 1230
1231 1231 if not tmpl and style:
1232 1232 mapfile = style
1233 1233 if not os.path.split(mapfile)[0]:
1234 1234 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1235 1235 or templater.templatepath(mapfile))
1236 1236 if mapname:
1237 1237 mapfile = mapname
1238 1238 return None, mapfile
1239 1239
1240 1240 if not tmpl:
1241 1241 return None, None
1242 1242
1243 1243 # looks like a literal template?
1244 1244 if '{' in tmpl:
1245 1245 return tmpl, None
1246 1246
1247 1247 # perhaps a stock style?
1248 1248 if not os.path.split(tmpl)[0]:
1249 1249 mapname = (templater.templatepath('map-cmdline.' + tmpl)
1250 1250 or templater.templatepath(tmpl))
1251 1251 if mapname and os.path.isfile(mapname):
1252 1252 return None, mapname
1253 1253
1254 1254 # perhaps it's a reference to [templates]
1255 1255 t = ui.config('templates', tmpl)
1256 1256 if t:
1257 1257 try:
1258 1258 tmpl = templater.parsestring(t)
1259 1259 except SyntaxError:
1260 1260 tmpl = templater.parsestring(t, quoted=False)
1261 1261 return tmpl, None
1262 1262
1263 1263 if tmpl == 'list':
1264 1264 ui.write(_("available styles: %s\n") % templater.stylelist())
1265 1265 raise util.Abort(_("specify a template"))
1266 1266
1267 1267 # perhaps it's a path to a map or a template
1268 1268 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
1269 1269 # is it a mapfile for a style?
1270 1270 if os.path.basename(tmpl).startswith("map-"):
1271 1271 return None, os.path.realpath(tmpl)
1272 1272 tmpl = open(tmpl).read()
1273 1273 return tmpl, None
1274 1274
1275 1275 # constant string?
1276 1276 return tmpl, None
1277 1277
1278 1278 def show_changeset(ui, repo, opts, buffered=False):
1279 1279 """show one changeset using template or regular display.
1280 1280
1281 1281 Display format will be the first non-empty hit of:
1282 1282 1. option 'template'
1283 1283 2. option 'style'
1284 1284 3. [ui] setting 'logtemplate'
1285 1285 4. [ui] setting 'style'
1286 1286 If all of these values are either the unset or the empty string,
1287 1287 regular display via changeset_printer() is done.
1288 1288 """
1289 1289 # options
1290 1290 matchfn = None
1291 1291 if opts.get('patch') or opts.get('stat'):
1292 1292 matchfn = scmutil.matchall(repo)
1293 1293
1294 1294 if opts.get('template') == 'json':
1295 1295 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1296 1296
1297 1297 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1298 1298
1299 1299 if not tmpl and not mapfile:
1300 1300 return changeset_printer(ui, repo, matchfn, opts, buffered)
1301 1301
1302 1302 try:
1303 1303 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1304 1304 buffered)
1305 1305 except SyntaxError, inst:
1306 1306 raise util.Abort(inst.args[0])
1307 1307 return t
1308 1308
1309 1309 def showmarker(ui, marker):
1310 1310 """utility function to display obsolescence marker in a readable way
1311 1311
1312 1312 To be used by debug function."""
1313 1313 ui.write(hex(marker.precnode()))
1314 1314 for repl in marker.succnodes():
1315 1315 ui.write(' ')
1316 1316 ui.write(hex(repl))
1317 1317 ui.write(' %X ' % marker.flags())
1318 1318 parents = marker.parentnodes()
1319 1319 if parents is not None:
1320 1320 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1321 1321 ui.write('(%s) ' % util.datestr(marker.date()))
1322 1322 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1323 1323 sorted(marker.metadata().items())
1324 1324 if t[0] != 'date')))
1325 1325 ui.write('\n')
1326 1326
1327 1327 def finddate(ui, repo, date):
1328 1328 """Find the tipmost changeset that matches the given date spec"""
1329 1329
1330 1330 df = util.matchdate(date)
1331 1331 m = scmutil.matchall(repo)
1332 1332 results = {}
1333 1333
1334 1334 def prep(ctx, fns):
1335 1335 d = ctx.date()
1336 1336 if df(d[0]):
1337 1337 results[ctx.rev()] = d
1338 1338
1339 1339 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1340 1340 rev = ctx.rev()
1341 1341 if rev in results:
1342 1342 ui.status(_("found revision %s from %s\n") %
1343 1343 (rev, util.datestr(results[rev])))
1344 1344 return str(rev)
1345 1345
1346 1346 raise util.Abort(_("revision matching date not found"))
1347 1347
1348 1348 def increasingwindows(windowsize=8, sizelimit=512):
1349 1349 while True:
1350 1350 yield windowsize
1351 1351 if windowsize < sizelimit:
1352 1352 windowsize *= 2
1353 1353
1354 1354 class FileWalkError(Exception):
1355 1355 pass
1356 1356
1357 1357 def walkfilerevs(repo, match, follow, revs, fncache):
1358 1358 '''Walks the file history for the matched files.
1359 1359
1360 1360 Returns the changeset revs that are involved in the file history.
1361 1361
1362 1362 Throws FileWalkError if the file history can't be walked using
1363 1363 filelogs alone.
1364 1364 '''
1365 1365 wanted = set()
1366 1366 copies = []
1367 1367 minrev, maxrev = min(revs), max(revs)
1368 1368 def filerevgen(filelog, last):
1369 1369 """
1370 1370 Only files, no patterns. Check the history of each file.
1371 1371
1372 1372 Examines filelog entries within minrev, maxrev linkrev range
1373 1373 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1374 1374 tuples in backwards order
1375 1375 """
1376 1376 cl_count = len(repo)
1377 1377 revs = []
1378 1378 for j in xrange(0, last + 1):
1379 1379 linkrev = filelog.linkrev(j)
1380 1380 if linkrev < minrev:
1381 1381 continue
1382 1382 # only yield rev for which we have the changelog, it can
1383 1383 # happen while doing "hg log" during a pull or commit
1384 1384 if linkrev >= cl_count:
1385 1385 break
1386 1386
1387 1387 parentlinkrevs = []
1388 1388 for p in filelog.parentrevs(j):
1389 1389 if p != nullrev:
1390 1390 parentlinkrevs.append(filelog.linkrev(p))
1391 1391 n = filelog.node(j)
1392 1392 revs.append((linkrev, parentlinkrevs,
1393 1393 follow and filelog.renamed(n)))
1394 1394
1395 1395 return reversed(revs)
1396 1396 def iterfiles():
1397 1397 pctx = repo['.']
1398 1398 for filename in match.files():
1399 1399 if follow:
1400 1400 if filename not in pctx:
1401 1401 raise util.Abort(_('cannot follow file not in parent '
1402 1402 'revision: "%s"') % filename)
1403 1403 yield filename, pctx[filename].filenode()
1404 1404 else:
1405 1405 yield filename, None
1406 1406 for filename_node in copies:
1407 1407 yield filename_node
1408 1408
1409 1409 for file_, node in iterfiles():
1410 1410 filelog = repo.file(file_)
1411 1411 if not len(filelog):
1412 1412 if node is None:
1413 1413 # A zero count may be a directory or deleted file, so
1414 1414 # try to find matching entries on the slow path.
1415 1415 if follow:
1416 1416 raise util.Abort(
1417 1417 _('cannot follow nonexistent file: "%s"') % file_)
1418 1418 raise FileWalkError("Cannot walk via filelog")
1419 1419 else:
1420 1420 continue
1421 1421
1422 1422 if node is None:
1423 1423 last = len(filelog) - 1
1424 1424 else:
1425 1425 last = filelog.rev(node)
1426 1426
1427 1427
1428 1428 # keep track of all ancestors of the file
1429 1429 ancestors = set([filelog.linkrev(last)])
1430 1430
1431 1431 # iterate from latest to oldest revision
1432 1432 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1433 1433 if not follow:
1434 1434 if rev > maxrev:
1435 1435 continue
1436 1436 else:
1437 1437 # Note that last might not be the first interesting
1438 1438 # rev to us:
1439 1439 # if the file has been changed after maxrev, we'll
1440 1440 # have linkrev(last) > maxrev, and we still need
1441 1441 # to explore the file graph
1442 1442 if rev not in ancestors:
1443 1443 continue
1444 1444 # XXX insert 1327 fix here
1445 1445 if flparentlinkrevs:
1446 1446 ancestors.update(flparentlinkrevs)
1447 1447
1448 1448 fncache.setdefault(rev, []).append(file_)
1449 1449 wanted.add(rev)
1450 1450 if copied:
1451 1451 copies.append(copied)
1452 1452
1453 1453 return wanted
1454 1454
1455 1455 def walkchangerevs(repo, match, opts, prepare):
1456 1456 '''Iterate over files and the revs in which they changed.
1457 1457
1458 1458 Callers most commonly need to iterate backwards over the history
1459 1459 in which they are interested. Doing so has awful (quadratic-looking)
1460 1460 performance, so we use iterators in a "windowed" way.
1461 1461
1462 1462 We walk a window of revisions in the desired order. Within the
1463 1463 window, we first walk forwards to gather data, then in the desired
1464 1464 order (usually backwards) to display it.
1465 1465
1466 1466 This function returns an iterator yielding contexts. Before
1467 1467 yielding each context, the iterator will first call the prepare
1468 1468 function on each context in the window in forward order.'''
1469 1469
1470 1470 follow = opts.get('follow') or opts.get('follow_first')
1471 1471
1472 1472 if opts.get('rev'):
1473 1473 revs = scmutil.revrange(repo, opts.get('rev'))
1474 1474 elif follow:
1475 1475 revs = repo.revs('reverse(:.)')
1476 1476 else:
1477 1477 revs = revset.spanset(repo)
1478 1478 revs.reverse()
1479 1479 if not revs:
1480 1480 return []
1481 1481 wanted = set()
1482 1482 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1483 1483 fncache = {}
1484 1484 change = repo.changectx
1485 1485
1486 1486 # First step is to fill wanted, the set of revisions that we want to yield.
1487 1487 # When it does not induce extra cost, we also fill fncache for revisions in
1488 1488 # wanted: a cache of filenames that were changed (ctx.files()) and that
1489 1489 # match the file filtering conditions.
1490 1490
1491 1491 if not slowpath and not match.files():
1492 1492 # No files, no patterns. Display all revs.
1493 1493 wanted = revs
1494 1494
1495 1495 if not slowpath and match.files():
1496 1496 # We only have to read through the filelog to find wanted revisions
1497 1497
1498 1498 try:
1499 1499 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1500 1500 except FileWalkError:
1501 1501 slowpath = True
1502 1502
1503 1503 # We decided to fall back to the slowpath because at least one
1504 1504 # of the paths was not a file. Check to see if at least one of them
1505 1505 # existed in history, otherwise simply return
1506 1506 for path in match.files():
1507 1507 if path == '.' or path in repo.store:
1508 1508 break
1509 1509 else:
1510 1510 return []
1511 1511
1512 1512 if slowpath:
1513 1513 # We have to read the changelog to match filenames against
1514 1514 # changed files
1515 1515
1516 1516 if follow:
1517 1517 raise util.Abort(_('can only follow copies/renames for explicit '
1518 1518 'filenames'))
1519 1519
1520 1520 # The slow path checks files modified in every changeset.
1521 1521 # This is really slow on large repos, so compute the set lazily.
1522 1522 class lazywantedset(object):
1523 1523 def __init__(self):
1524 1524 self.set = set()
1525 1525 self.revs = set(revs)
1526 1526
1527 1527 # No need to worry about locality here because it will be accessed
1528 1528 # in the same order as the increasing window below.
1529 1529 def __contains__(self, value):
1530 1530 if value in self.set:
1531 1531 return True
1532 1532 elif not value in self.revs:
1533 1533 return False
1534 1534 else:
1535 1535 self.revs.discard(value)
1536 1536 ctx = change(value)
1537 1537 matches = filter(match, ctx.files())
1538 1538 if matches:
1539 1539 fncache[value] = matches
1540 1540 self.set.add(value)
1541 1541 return True
1542 1542 return False
1543 1543
1544 1544 def discard(self, value):
1545 1545 self.revs.discard(value)
1546 1546 self.set.discard(value)
1547 1547
1548 1548 wanted = lazywantedset()
1549 1549
1550 1550 class followfilter(object):
1551 1551 def __init__(self, onlyfirst=False):
1552 1552 self.startrev = nullrev
1553 1553 self.roots = set()
1554 1554 self.onlyfirst = onlyfirst
1555 1555
1556 1556 def match(self, rev):
1557 1557 def realparents(rev):
1558 1558 if self.onlyfirst:
1559 1559 return repo.changelog.parentrevs(rev)[0:1]
1560 1560 else:
1561 1561 return filter(lambda x: x != nullrev,
1562 1562 repo.changelog.parentrevs(rev))
1563 1563
1564 1564 if self.startrev == nullrev:
1565 1565 self.startrev = rev
1566 1566 return True
1567 1567
1568 1568 if rev > self.startrev:
1569 1569 # forward: all descendants
1570 1570 if not self.roots:
1571 1571 self.roots.add(self.startrev)
1572 1572 for parent in realparents(rev):
1573 1573 if parent in self.roots:
1574 1574 self.roots.add(rev)
1575 1575 return True
1576 1576 else:
1577 1577 # backwards: all parents
1578 1578 if not self.roots:
1579 1579 self.roots.update(realparents(self.startrev))
1580 1580 if rev in self.roots:
1581 1581 self.roots.remove(rev)
1582 1582 self.roots.update(realparents(rev))
1583 1583 return True
1584 1584
1585 1585 return False
1586 1586
1587 1587 # it might be worthwhile to do this in the iterator if the rev range
1588 1588 # is descending and the prune args are all within that range
1589 1589 for rev in opts.get('prune', ()):
1590 1590 rev = repo[rev].rev()
1591 1591 ff = followfilter()
1592 1592 stop = min(revs[0], revs[-1])
1593 1593 for x in xrange(rev, stop - 1, -1):
1594 1594 if ff.match(x):
1595 1595 wanted = wanted - [x]
1596 1596
1597 1597 # Now that wanted is correctly initialized, we can iterate over the
1598 1598 # revision range, yielding only revisions in wanted.
1599 1599 def iterate():
1600 1600 if follow and not match.files():
1601 1601 ff = followfilter(onlyfirst=opts.get('follow_first'))
1602 1602 def want(rev):
1603 1603 return ff.match(rev) and rev in wanted
1604 1604 else:
1605 1605 def want(rev):
1606 1606 return rev in wanted
1607 1607
1608 1608 it = iter(revs)
1609 1609 stopiteration = False
1610 1610 for windowsize in increasingwindows():
1611 1611 nrevs = []
1612 1612 for i in xrange(windowsize):
1613 1613 try:
1614 1614 rev = it.next()
1615 1615 if want(rev):
1616 1616 nrevs.append(rev)
1617 1617 except (StopIteration):
1618 1618 stopiteration = True
1619 1619 break
1620 1620 for rev in sorted(nrevs):
1621 1621 fns = fncache.get(rev)
1622 1622 ctx = change(rev)
1623 1623 if not fns:
1624 1624 def fns_generator():
1625 1625 for f in ctx.files():
1626 1626 if match(f):
1627 1627 yield f
1628 1628 fns = fns_generator()
1629 1629 prepare(ctx, fns)
1630 1630 for rev in nrevs:
1631 1631 yield change(rev)
1632 1632
1633 1633 if stopiteration:
1634 1634 break
1635 1635
1636 1636 return iterate()
1637 1637
1638 1638 def _makefollowlogfilematcher(repo, files, followfirst):
1639 1639 # When displaying a revision with --patch --follow FILE, we have
1640 1640 # to know which file of the revision must be diffed. With
1641 1641 # --follow, we want the names of the ancestors of FILE in the
1642 1642 # revision, stored in "fcache". "fcache" is populated by
1643 1643 # reproducing the graph traversal already done by --follow revset
1644 1644 # and relating linkrevs to file names (which is not "correct" but
1645 1645 # good enough).
1646 1646 fcache = {}
1647 1647 fcacheready = [False]
1648 1648 pctx = repo['.']
1649 1649
1650 1650 def populate():
1651 1651 for fn in files:
1652 1652 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1653 1653 for c in i:
1654 1654 fcache.setdefault(c.linkrev(), set()).add(c.path())
1655 1655
1656 1656 def filematcher(rev):
1657 1657 if not fcacheready[0]:
1658 1658 # Lazy initialization
1659 1659 fcacheready[0] = True
1660 1660 populate()
1661 1661 return scmutil.matchfiles(repo, fcache.get(rev, []))
1662 1662
1663 1663 return filematcher
1664 1664
1665 1665 def _makenofollowlogfilematcher(repo, pats, opts):
1666 1666 '''hook for extensions to override the filematcher for non-follow cases'''
1667 1667 return None
1668 1668
1669 1669 def _makelogrevset(repo, pats, opts, revs):
1670 1670 """Return (expr, filematcher) where expr is a revset string built
1671 1671 from log options and file patterns or None. If --stat or --patch
1672 1672 are not passed filematcher is None. Otherwise it is a callable
1673 1673 taking a revision number and returning a match objects filtering
1674 1674 the files to be detailed when displaying the revision.
1675 1675 """
1676 1676 opt2revset = {
1677 1677 'no_merges': ('not merge()', None),
1678 1678 'only_merges': ('merge()', None),
1679 1679 '_ancestors': ('ancestors(%(val)s)', None),
1680 1680 '_fancestors': ('_firstancestors(%(val)s)', None),
1681 1681 '_descendants': ('descendants(%(val)s)', None),
1682 1682 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1683 1683 '_matchfiles': ('_matchfiles(%(val)s)', None),
1684 1684 'date': ('date(%(val)r)', None),
1685 1685 'branch': ('branch(%(val)r)', ' or '),
1686 1686 '_patslog': ('filelog(%(val)r)', ' or '),
1687 1687 '_patsfollow': ('follow(%(val)r)', ' or '),
1688 1688 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1689 1689 'keyword': ('keyword(%(val)r)', ' or '),
1690 1690 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1691 1691 'user': ('user(%(val)r)', ' or '),
1692 1692 }
1693 1693
1694 1694 opts = dict(opts)
1695 1695 # follow or not follow?
1696 1696 follow = opts.get('follow') or opts.get('follow_first')
1697 1697 followfirst = opts.get('follow_first') and 1 or 0
1698 1698 # --follow with FILE behaviour depends on revs...
1699 1699 it = iter(revs)
1700 1700 startrev = it.next()
1701 1701 try:
1702 1702 followdescendants = startrev < it.next()
1703 1703 except (StopIteration):
1704 1704 followdescendants = False
1705 1705
1706 1706 # branch and only_branch are really aliases and must be handled at
1707 1707 # the same time
1708 1708 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1709 1709 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1710 1710 # pats/include/exclude are passed to match.match() directly in
1711 1711 # _matchfiles() revset but walkchangerevs() builds its matcher with
1712 1712 # scmutil.match(). The difference is input pats are globbed on
1713 1713 # platforms without shell expansion (windows).
1714 1714 pctx = repo[None]
1715 1715 match, pats = scmutil.matchandpats(pctx, pats, opts)
1716 1716 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1717 1717 if not slowpath:
1718 1718 for f in match.files():
1719 1719 if follow and f not in pctx:
1720 1720 # If the file exists, it may be a directory, so let it
1721 1721 # take the slow path.
1722 1722 if os.path.exists(repo.wjoin(f)):
1723 1723 slowpath = True
1724 1724 continue
1725 1725 else:
1726 1726 raise util.Abort(_('cannot follow file not in parent '
1727 1727 'revision: "%s"') % f)
1728 1728 filelog = repo.file(f)
1729 1729 if not filelog:
1730 1730 # A zero count may be a directory or deleted file, so
1731 1731 # try to find matching entries on the slow path.
1732 1732 if follow:
1733 1733 raise util.Abort(
1734 1734 _('cannot follow nonexistent file: "%s"') % f)
1735 1735 slowpath = True
1736 1736
1737 1737 # We decided to fall back to the slowpath because at least one
1738 1738 # of the paths was not a file. Check to see if at least one of them
1739 1739 # existed in history - in that case, we'll continue down the
1740 1740 # slowpath; otherwise, we can turn off the slowpath
1741 1741 if slowpath:
1742 1742 for path in match.files():
1743 1743 if path == '.' or path in repo.store:
1744 1744 break
1745 1745 else:
1746 1746 slowpath = False
1747 1747
1748 1748 fpats = ('_patsfollow', '_patsfollowfirst')
1749 1749 fnopats = (('_ancestors', '_fancestors'),
1750 1750 ('_descendants', '_fdescendants'))
1751 1751 if slowpath:
1752 1752 # See walkchangerevs() slow path.
1753 1753 #
1754 1754 # pats/include/exclude cannot be represented as separate
1755 1755 # revset expressions as their filtering logic applies at file
1756 1756 # level. For instance "-I a -X a" matches a revision touching
1757 1757 # "a" and "b" while "file(a) and not file(b)" does
1758 1758 # not. Besides, filesets are evaluated against the working
1759 1759 # directory.
1760 1760 matchargs = ['r:', 'd:relpath']
1761 1761 for p in pats:
1762 1762 matchargs.append('p:' + p)
1763 1763 for p in opts.get('include', []):
1764 1764 matchargs.append('i:' + p)
1765 1765 for p in opts.get('exclude', []):
1766 1766 matchargs.append('x:' + p)
1767 1767 matchargs = ','.join(('%r' % p) for p in matchargs)
1768 1768 opts['_matchfiles'] = matchargs
1769 1769 if follow:
1770 1770 opts[fnopats[0][followfirst]] = '.'
1771 1771 else:
1772 1772 if follow:
1773 1773 if pats:
1774 1774 # follow() revset interprets its file argument as a
1775 1775 # manifest entry, so use match.files(), not pats.
1776 1776 opts[fpats[followfirst]] = list(match.files())
1777 1777 else:
1778 1778 opts[fnopats[followdescendants][followfirst]] = str(startrev)
1779 1779 else:
1780 1780 opts['_patslog'] = list(pats)
1781 1781
1782 1782 filematcher = None
1783 1783 if opts.get('patch') or opts.get('stat'):
1784 1784 # When following files, track renames via a special matcher.
1785 1785 # If we're forced to take the slowpath it means we're following
1786 1786 # at least one pattern/directory, so don't bother with rename tracking.
1787 1787 if follow and not match.always() and not slowpath:
1788 1788 # _makefollowlogfilematcher expects its files argument to be
1789 1789 # relative to the repo root, so use match.files(), not pats.
1790 1790 filematcher = _makefollowlogfilematcher(repo, match.files(),
1791 1791 followfirst)
1792 1792 else:
1793 1793 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
1794 1794 if filematcher is None:
1795 1795 filematcher = lambda rev: match
1796 1796
1797 1797 expr = []
1798 1798 for op, val in sorted(opts.iteritems()):
1799 1799 if not val:
1800 1800 continue
1801 1801 if op not in opt2revset:
1802 1802 continue
1803 1803 revop, andor = opt2revset[op]
1804 1804 if '%(val)' not in revop:
1805 1805 expr.append(revop)
1806 1806 else:
1807 1807 if not isinstance(val, list):
1808 1808 e = revop % {'val': val}
1809 1809 else:
1810 1810 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
1811 1811 expr.append(e)
1812 1812
1813 1813 if expr:
1814 1814 expr = '(' + ' and '.join(expr) + ')'
1815 1815 else:
1816 1816 expr = None
1817 1817 return expr, filematcher
1818 1818
1819 1819 def getgraphlogrevs(repo, pats, opts):
1820 1820 """Return (revs, expr, filematcher) where revs is an iterable of
1821 1821 revision numbers, expr is a revset string built from log options
1822 1822 and file patterns or None, and used to filter 'revs'. If --stat or
1823 1823 --patch are not passed filematcher is None. Otherwise it is a
1824 1824 callable taking a revision number and returning a match objects
1825 1825 filtering the files to be detailed when displaying the revision.
1826 1826 """
1827 1827 if not len(repo):
1828 1828 return [], None, None
1829 1829 limit = loglimit(opts)
1830 1830 # Default --rev value depends on --follow but --follow behaviour
1831 1831 # depends on revisions resolved from --rev...
1832 1832 follow = opts.get('follow') or opts.get('follow_first')
1833 1833 possiblyunsorted = False # whether revs might need sorting
1834 1834 if opts.get('rev'):
1835 1835 revs = scmutil.revrange(repo, opts['rev'])
1836 1836 # Don't sort here because _makelogrevset might depend on the
1837 1837 # order of revs
1838 1838 possiblyunsorted = True
1839 1839 else:
1840 1840 if follow and len(repo) > 0:
1841 1841 revs = repo.revs('reverse(:.)')
1842 1842 else:
1843 1843 revs = revset.spanset(repo)
1844 1844 revs.reverse()
1845 1845 if not revs:
1846 1846 return revset.baseset(), None, None
1847 1847 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
1848 1848 if possiblyunsorted:
1849 1849 revs.sort(reverse=True)
1850 1850 if expr:
1851 1851 # Revset matchers often operate faster on revisions in changelog
1852 1852 # order, because most filters deal with the changelog.
1853 1853 revs.reverse()
1854 1854 matcher = revset.match(repo.ui, expr)
1855 1855 # Revset matches can reorder revisions. "A or B" typically returns
1856 1856 # returns the revision matching A then the revision matching B. Sort
1857 1857 # again to fix that.
1858 1858 revs = matcher(repo, revs)
1859 1859 revs.sort(reverse=True)
1860 1860 if limit is not None:
1861 1861 limitedrevs = []
1862 1862 for idx, rev in enumerate(revs):
1863 1863 if idx >= limit:
1864 1864 break
1865 1865 limitedrevs.append(rev)
1866 1866 revs = revset.baseset(limitedrevs)
1867 1867
1868 1868 return revs, expr, filematcher
1869 1869
1870 1870 def getlogrevs(repo, pats, opts):
1871 1871 """Return (revs, expr, filematcher) where revs is an iterable of
1872 1872 revision numbers, expr is a revset string built from log options
1873 1873 and file patterns or None, and used to filter 'revs'. If --stat or
1874 1874 --patch are not passed filematcher is None. Otherwise it is a
1875 1875 callable taking a revision number and returning a match objects
1876 1876 filtering the files to be detailed when displaying the revision.
1877 1877 """
1878 1878 limit = loglimit(opts)
1879 1879 # Default --rev value depends on --follow but --follow behaviour
1880 1880 # depends on revisions resolved from --rev...
1881 1881 follow = opts.get('follow') or opts.get('follow_first')
1882 1882 if opts.get('rev'):
1883 1883 revs = scmutil.revrange(repo, opts['rev'])
1884 1884 elif follow:
1885 1885 revs = repo.revs('reverse(:.)')
1886 1886 else:
1887 1887 revs = revset.spanset(repo)
1888 1888 revs.reverse()
1889 1889 if not revs:
1890 1890 return revset.baseset([]), None, None
1891 1891 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
1892 1892 if expr:
1893 1893 # Revset matchers often operate faster on revisions in changelog
1894 1894 # order, because most filters deal with the changelog.
1895 1895 if not opts.get('rev'):
1896 1896 revs.reverse()
1897 1897 matcher = revset.match(repo.ui, expr)
1898 1898 # Revset matches can reorder revisions. "A or B" typically returns
1899 1899 # returns the revision matching A then the revision matching B. Sort
1900 1900 # again to fix that.
1901 1901 revs = matcher(repo, revs)
1902 1902 if not opts.get('rev'):
1903 1903 revs.sort(reverse=True)
1904 1904 if limit is not None:
1905 1905 count = 0
1906 1906 limitedrevs = []
1907 1907 it = iter(revs)
1908 1908 while count < limit:
1909 1909 try:
1910 1910 limitedrevs.append(it.next())
1911 1911 except (StopIteration):
1912 1912 break
1913 1913 count += 1
1914 1914 revs = revset.baseset(limitedrevs)
1915 1915
1916 1916 return revs, expr, filematcher
1917 1917
1918 1918 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
1919 1919 filematcher=None):
1920 1920 seen, state = [], graphmod.asciistate()
1921 1921 for rev, type, ctx, parents in dag:
1922 1922 char = 'o'
1923 1923 if ctx.node() in showparents:
1924 1924 char = '@'
1925 1925 elif ctx.obsolete():
1926 1926 char = 'x'
1927 1927 copies = None
1928 1928 if getrenamed and ctx.rev():
1929 1929 copies = []
1930 1930 for fn in ctx.files():
1931 1931 rename = getrenamed(fn, ctx.rev())
1932 1932 if rename:
1933 1933 copies.append((fn, rename[0]))
1934 1934 revmatchfn = None
1935 1935 if filematcher is not None:
1936 1936 revmatchfn = filematcher(ctx.rev())
1937 1937 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
1938 1938 lines = displayer.hunk.pop(rev).split('\n')
1939 1939 if not lines[-1]:
1940 1940 del lines[-1]
1941 1941 displayer.flush(rev)
1942 1942 edges = edgefn(type, char, lines, seen, rev, parents)
1943 1943 for type, char, lines, coldata in edges:
1944 1944 graphmod.ascii(ui, state, type, char, lines, coldata)
1945 1945 displayer.close()
1946 1946
1947 1947 def graphlog(ui, repo, *pats, **opts):
1948 1948 # Parameters are identical to log command ones
1949 1949 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
1950 1950 revdag = graphmod.dagwalker(repo, revs)
1951 1951
1952 1952 getrenamed = None
1953 1953 if opts.get('copies'):
1954 1954 endrev = None
1955 1955 if opts.get('rev'):
1956 1956 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
1957 1957 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
1958 1958 displayer = show_changeset(ui, repo, opts, buffered=True)
1959 1959 showparents = [ctx.node() for ctx in repo[None].parents()]
1960 1960 displaygraph(ui, revdag, displayer, showparents,
1961 1961 graphmod.asciiedges, getrenamed, filematcher)
1962 1962
1963 1963 def checkunsupportedgraphflags(pats, opts):
1964 1964 for op in ["newest_first"]:
1965 1965 if op in opts and opts[op]:
1966 1966 raise util.Abort(_("-G/--graph option is incompatible with --%s")
1967 1967 % op.replace("_", "-"))
1968 1968
1969 1969 def graphrevs(repo, nodes, opts):
1970 1970 limit = loglimit(opts)
1971 1971 nodes.reverse()
1972 1972 if limit is not None:
1973 1973 nodes = nodes[:limit]
1974 1974 return graphmod.nodes(repo, nodes)
1975 1975
1976 1976 def add(ui, repo, match, dryrun, listsubrepos, prefix, explicitonly):
1977 1977 join = lambda f: os.path.join(prefix, f)
1978 1978 bad = []
1979 1979 oldbad = match.bad
1980 1980 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1981 1981 names = []
1982 1982 wctx = repo[None]
1983 1983 cca = None
1984 1984 abort, warn = scmutil.checkportabilityalert(ui)
1985 1985 if abort or warn:
1986 1986 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1987 1987 for f in wctx.walk(match):
1988 1988 exact = match.exact(f)
1989 1989 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
1990 1990 if cca:
1991 1991 cca(f)
1992 1992 names.append(f)
1993 1993 if ui.verbose or not exact:
1994 1994 ui.status(_('adding %s\n') % match.rel(join(f)))
1995 1995
1996 1996 for subpath in sorted(wctx.substate):
1997 1997 sub = wctx.sub(subpath)
1998 1998 try:
1999 1999 submatch = matchmod.narrowmatcher(subpath, match)
2000 2000 if listsubrepos:
2001 2001 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
2002 2002 False))
2003 2003 else:
2004 2004 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
2005 2005 True))
2006 2006 except error.LookupError:
2007 2007 ui.status(_("skipping missing subrepository: %s\n")
2008 2008 % join(subpath))
2009 2009
2010 2010 if not dryrun:
2011 2011 rejected = wctx.add(names, prefix)
2012 2012 bad.extend(f for f in rejected if f in match.files())
2013 2013 return bad
2014 2014
2015 2015 def forget(ui, repo, match, prefix, explicitonly):
2016 2016 join = lambda f: os.path.join(prefix, f)
2017 2017 bad = []
2018 2018 oldbad = match.bad
2019 2019 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2020 2020 wctx = repo[None]
2021 2021 forgot = []
2022 2022 s = repo.status(match=match, clean=True)
2023 2023 forget = sorted(s[0] + s[1] + s[3] + s[6])
2024 2024 if explicitonly:
2025 2025 forget = [f for f in forget if match.exact(f)]
2026 2026
2027 2027 for subpath in sorted(wctx.substate):
2028 2028 sub = wctx.sub(subpath)
2029 2029 try:
2030 2030 submatch = matchmod.narrowmatcher(subpath, match)
2031 2031 subbad, subforgot = sub.forget(ui, submatch, prefix)
2032 2032 bad.extend([subpath + '/' + f for f in subbad])
2033 2033 forgot.extend([subpath + '/' + f for f in subforgot])
2034 2034 except error.LookupError:
2035 2035 ui.status(_("skipping missing subrepository: %s\n")
2036 2036 % join(subpath))
2037 2037
2038 2038 if not explicitonly:
2039 2039 for f in match.files():
2040 2040 if f not in repo.dirstate and not os.path.isdir(match.rel(join(f))):
2041 2041 if f not in forgot:
2042 2042 if os.path.exists(match.rel(join(f))):
2043 2043 ui.warn(_('not removing %s: '
2044 2044 'file is already untracked\n')
2045 2045 % match.rel(join(f)))
2046 2046 bad.append(f)
2047 2047
2048 2048 for f in forget:
2049 2049 if ui.verbose or not match.exact(f):
2050 2050 ui.status(_('removing %s\n') % match.rel(join(f)))
2051 2051
2052 2052 rejected = wctx.forget(forget, prefix)
2053 2053 bad.extend(f for f in rejected if f in match.files())
2054 2054 forgot.extend(forget)
2055 2055 return bad, forgot
2056 2056
2057 2057 def remove(ui, repo, m, prefix, after, force, subrepos):
2058 2058 join = lambda f: os.path.join(prefix, f)
2059 2059 ret = 0
2060 2060 s = repo.status(match=m, clean=True)
2061 2061 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2062 2062
2063 2063 wctx = repo[None]
2064 2064
2065 2065 for subpath in sorted(wctx.substate):
2066 2066 def matchessubrepo(matcher, subpath):
2067 2067 if matcher.exact(subpath):
2068 2068 return True
2069 2069 for f in matcher.files():
2070 2070 if f.startswith(subpath):
2071 2071 return True
2072 2072 return False
2073 2073
2074 2074 if subrepos or matchessubrepo(m, subpath):
2075 2075 sub = wctx.sub(subpath)
2076 2076 try:
2077 2077 submatch = matchmod.narrowmatcher(subpath, m)
2078 2078 if sub.removefiles(ui, submatch, prefix, after, force,
2079 2079 subrepos):
2080 2080 ret = 1
2081 2081 except error.LookupError:
2082 2082 ui.status(_("skipping missing subrepository: %s\n")
2083 2083 % join(subpath))
2084 2084
2085 2085 # warn about failure to delete explicit files/dirs
2086 2086 for f in m.files():
2087 2087 def insubrepo():
2088 2088 for subpath in wctx.substate:
2089 2089 if f.startswith(subpath):
2090 2090 return True
2091 2091 return False
2092 2092
2093 2093 if f in repo.dirstate or f in wctx.dirs() or f == '.' or insubrepo():
2094 2094 continue
2095 2095
2096 2096 if os.path.exists(m.rel(join(f))):
2097 2097 if os.path.isdir(m.rel(join(f))):
2098 2098 ui.warn(_('not removing %s: no tracked files\n')
2099 2099 % m.rel(join(f)))
2100 2100 else:
2101 2101 ui.warn(_('not removing %s: file is untracked\n')
2102 2102 % m.rel(join(f)))
2103 2103 # missing files will generate a warning elsewhere
2104 2104 ret = 1
2105 2105
2106 2106 if force:
2107 2107 list = modified + deleted + clean + added
2108 2108 elif after:
2109 2109 list = deleted
2110 2110 for f in modified + added + clean:
2111 2111 ui.warn(_('not removing %s: file still exists\n') % m.rel(join(f)))
2112 2112 ret = 1
2113 2113 else:
2114 2114 list = deleted + clean
2115 2115 for f in modified:
2116 2116 ui.warn(_('not removing %s: file is modified (use -f'
2117 2117 ' to force removal)\n') % m.rel(join(f)))
2118 2118 ret = 1
2119 2119 for f in added:
2120 2120 ui.warn(_('not removing %s: file has been marked for add'
2121 2121 ' (use forget to undo)\n') % m.rel(join(f)))
2122 2122 ret = 1
2123 2123
2124 2124 for f in sorted(list):
2125 2125 if ui.verbose or not m.exact(f):
2126 2126 ui.status(_('removing %s\n') % m.rel(join(f)))
2127 2127
2128 2128 wlock = repo.wlock()
2129 2129 try:
2130 2130 if not after:
2131 2131 for f in list:
2132 2132 if f in added:
2133 2133 continue # we never unlink added files on remove
2134 2134 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2135 2135 repo[None].forget(list)
2136 2136 finally:
2137 2137 wlock.release()
2138 2138
2139 2139 return ret
2140 2140
2141 2141 def cat(ui, repo, ctx, matcher, prefix, **opts):
2142 2142 err = 1
2143 2143
2144 2144 def write(path):
2145 2145 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2146 2146 pathname=os.path.join(prefix, path))
2147 2147 data = ctx[path].data()
2148 2148 if opts.get('decode'):
2149 2149 data = repo.wwritedata(path, data)
2150 2150 fp.write(data)
2151 2151 fp.close()
2152 2152
2153 2153 # Automation often uses hg cat on single files, so special case it
2154 2154 # for performance to avoid the cost of parsing the manifest.
2155 2155 if len(matcher.files()) == 1 and not matcher.anypats():
2156 2156 file = matcher.files()[0]
2157 2157 mf = repo.manifest
2158 2158 mfnode = ctx._changeset[0]
2159 2159 if mf.find(mfnode, file)[0]:
2160 2160 write(file)
2161 2161 return 0
2162 2162
2163 2163 # Don't warn about "missing" files that are really in subrepos
2164 2164 bad = matcher.bad
2165 2165
2166 2166 def badfn(path, msg):
2167 2167 for subpath in ctx.substate:
2168 2168 if path.startswith(subpath):
2169 2169 return
2170 2170 bad(path, msg)
2171 2171
2172 2172 matcher.bad = badfn
2173 2173
2174 2174 for abs in ctx.walk(matcher):
2175 2175 write(abs)
2176 2176 err = 0
2177 2177
2178 2178 matcher.bad = bad
2179 2179
2180 2180 for subpath in sorted(ctx.substate):
2181 2181 sub = ctx.sub(subpath)
2182 2182 try:
2183 2183 submatch = matchmod.narrowmatcher(subpath, matcher)
2184 2184
2185 2185 if not sub.cat(ui, submatch, os.path.join(prefix, sub._path),
2186 2186 **opts):
2187 2187 err = 0
2188 2188 except error.RepoLookupError:
2189 2189 ui.status(_("skipping missing subrepository: %s\n")
2190 2190 % os.path.join(prefix, subpath))
2191 2191
2192 2192 return err
2193 2193
2194 2194 def commit(ui, repo, commitfunc, pats, opts):
2195 2195 '''commit the specified files or all outstanding changes'''
2196 2196 date = opts.get('date')
2197 2197 if date:
2198 2198 opts['date'] = util.parsedate(date)
2199 2199 message = logmessage(ui, opts)
2200 matcher = scmutil.match(repo[None], pats, opts)
2200 2201
2201 2202 # extract addremove carefully -- this function can be called from a command
2202 2203 # that doesn't support addremove
2203 2204 if opts.get('addremove'):
2204 scmutil.addremove(repo, pats, opts)
2205
2206 return commitfunc(ui, repo, message,
2207 scmutil.match(repo[None], pats, opts), opts)
2205 scmutil.addremove(repo, matcher, opts)
2206
2207 return commitfunc(ui, repo, message, matcher, opts)
2208 2208
2209 2209 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2210 2210 # amend will reuse the existing user if not specified, but the obsolete
2211 2211 # marker creation requires that the current user's name is specified.
2212 2212 if obsolete._enabled:
2213 2213 ui.username() # raise exception if username not set
2214 2214
2215 2215 ui.note(_('amending changeset %s\n') % old)
2216 2216 base = old.p1()
2217 2217
2218 2218 wlock = lock = newid = None
2219 2219 try:
2220 2220 wlock = repo.wlock()
2221 2221 lock = repo.lock()
2222 2222 tr = repo.transaction('amend')
2223 2223 try:
2224 2224 # See if we got a message from -m or -l, if not, open the editor
2225 2225 # with the message of the changeset to amend
2226 2226 message = logmessage(ui, opts)
2227 2227 # ensure logfile does not conflict with later enforcement of the
2228 2228 # message. potential logfile content has been processed by
2229 2229 # `logmessage` anyway.
2230 2230 opts.pop('logfile')
2231 2231 # First, do a regular commit to record all changes in the working
2232 2232 # directory (if there are any)
2233 2233 ui.callhooks = False
2234 2234 currentbookmark = repo._bookmarkcurrent
2235 2235 try:
2236 2236 repo._bookmarkcurrent = None
2237 2237 opts['message'] = 'temporary amend commit for %s' % old
2238 2238 node = commit(ui, repo, commitfunc, pats, opts)
2239 2239 finally:
2240 2240 repo._bookmarkcurrent = currentbookmark
2241 2241 ui.callhooks = True
2242 2242 ctx = repo[node]
2243 2243
2244 2244 # Participating changesets:
2245 2245 #
2246 2246 # node/ctx o - new (intermediate) commit that contains changes
2247 2247 # | from working dir to go into amending commit
2248 2248 # | (or a workingctx if there were no changes)
2249 2249 # |
2250 2250 # old o - changeset to amend
2251 2251 # |
2252 2252 # base o - parent of amending changeset
2253 2253
2254 2254 # Update extra dict from amended commit (e.g. to preserve graft
2255 2255 # source)
2256 2256 extra.update(old.extra())
2257 2257
2258 2258 # Also update it from the intermediate commit or from the wctx
2259 2259 extra.update(ctx.extra())
2260 2260
2261 2261 if len(old.parents()) > 1:
2262 2262 # ctx.files() isn't reliable for merges, so fall back to the
2263 2263 # slower repo.status() method
2264 2264 files = set([fn for st in repo.status(base, old)[:3]
2265 2265 for fn in st])
2266 2266 else:
2267 2267 files = set(old.files())
2268 2268
2269 2269 # Second, we use either the commit we just did, or if there were no
2270 2270 # changes the parent of the working directory as the version of the
2271 2271 # files in the final amend commit
2272 2272 if node:
2273 2273 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2274 2274
2275 2275 user = ctx.user()
2276 2276 date = ctx.date()
2277 2277 # Recompute copies (avoid recording a -> b -> a)
2278 2278 copied = copies.pathcopies(base, ctx)
2279 2279
2280 2280 # Prune files which were reverted by the updates: if old
2281 2281 # introduced file X and our intermediate commit, node,
2282 2282 # renamed that file, then those two files are the same and
2283 2283 # we can discard X from our list of files. Likewise if X
2284 2284 # was deleted, it's no longer relevant
2285 2285 files.update(ctx.files())
2286 2286
2287 2287 def samefile(f):
2288 2288 if f in ctx.manifest():
2289 2289 a = ctx.filectx(f)
2290 2290 if f in base.manifest():
2291 2291 b = base.filectx(f)
2292 2292 return (not a.cmp(b)
2293 2293 and a.flags() == b.flags())
2294 2294 else:
2295 2295 return False
2296 2296 else:
2297 2297 return f not in base.manifest()
2298 2298 files = [f for f in files if not samefile(f)]
2299 2299
2300 2300 def filectxfn(repo, ctx_, path):
2301 2301 try:
2302 2302 fctx = ctx[path]
2303 2303 flags = fctx.flags()
2304 2304 mctx = context.memfilectx(repo,
2305 2305 fctx.path(), fctx.data(),
2306 2306 islink='l' in flags,
2307 2307 isexec='x' in flags,
2308 2308 copied=copied.get(path))
2309 2309 return mctx
2310 2310 except KeyError:
2311 2311 return None
2312 2312 else:
2313 2313 ui.note(_('copying changeset %s to %s\n') % (old, base))
2314 2314
2315 2315 # Use version of files as in the old cset
2316 2316 def filectxfn(repo, ctx_, path):
2317 2317 try:
2318 2318 return old.filectx(path)
2319 2319 except KeyError:
2320 2320 return None
2321 2321
2322 2322 user = opts.get('user') or old.user()
2323 2323 date = opts.get('date') or old.date()
2324 2324 editform = mergeeditform(old, 'commit.amend')
2325 2325 editor = getcommiteditor(editform=editform, **opts)
2326 2326 if not message:
2327 2327 editor = getcommiteditor(edit=True, editform=editform)
2328 2328 message = old.description()
2329 2329
2330 2330 pureextra = extra.copy()
2331 2331 extra['amend_source'] = old.hex()
2332 2332
2333 2333 new = context.memctx(repo,
2334 2334 parents=[base.node(), old.p2().node()],
2335 2335 text=message,
2336 2336 files=files,
2337 2337 filectxfn=filectxfn,
2338 2338 user=user,
2339 2339 date=date,
2340 2340 extra=extra,
2341 2341 editor=editor)
2342 2342
2343 2343 newdesc = changelog.stripdesc(new.description())
2344 2344 if ((not node)
2345 2345 and newdesc == old.description()
2346 2346 and user == old.user()
2347 2347 and date == old.date()
2348 2348 and pureextra == old.extra()):
2349 2349 # nothing changed. continuing here would create a new node
2350 2350 # anyway because of the amend_source noise.
2351 2351 #
2352 2352 # This not what we expect from amend.
2353 2353 return old.node()
2354 2354
2355 2355 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2356 2356 try:
2357 2357 if opts.get('secret'):
2358 2358 commitphase = 'secret'
2359 2359 else:
2360 2360 commitphase = old.phase()
2361 2361 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2362 2362 newid = repo.commitctx(new)
2363 2363 finally:
2364 2364 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2365 2365 if newid != old.node():
2366 2366 # Reroute the working copy parent to the new changeset
2367 2367 repo.setparents(newid, nullid)
2368 2368
2369 2369 # Move bookmarks from old parent to amend commit
2370 2370 bms = repo.nodebookmarks(old.node())
2371 2371 if bms:
2372 2372 marks = repo._bookmarks
2373 2373 for bm in bms:
2374 2374 marks[bm] = newid
2375 2375 marks.write()
2376 2376 #commit the whole amend process
2377 2377 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2378 2378 if createmarkers and newid != old.node():
2379 2379 # mark the new changeset as successor of the rewritten one
2380 2380 new = repo[newid]
2381 2381 obs = [(old, (new,))]
2382 2382 if node:
2383 2383 obs.append((ctx, ()))
2384 2384
2385 2385 obsolete.createmarkers(repo, obs)
2386 2386 tr.close()
2387 2387 finally:
2388 2388 tr.release()
2389 2389 if not createmarkers and newid != old.node():
2390 2390 # Strip the intermediate commit (if there was one) and the amended
2391 2391 # commit
2392 2392 if node:
2393 2393 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2394 2394 ui.note(_('stripping amended changeset %s\n') % old)
2395 2395 repair.strip(ui, repo, old.node(), topic='amend-backup')
2396 2396 finally:
2397 2397 if newid is None:
2398 2398 repo.dirstate.invalidate()
2399 2399 lockmod.release(lock, wlock)
2400 2400 return newid
2401 2401
2402 2402 def commiteditor(repo, ctx, subs, editform=''):
2403 2403 if ctx.description():
2404 2404 return ctx.description()
2405 2405 return commitforceeditor(repo, ctx, subs, editform=editform)
2406 2406
2407 2407 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2408 2408 editform=''):
2409 2409 if not extramsg:
2410 2410 extramsg = _("Leave message empty to abort commit.")
2411 2411
2412 2412 forms = [e for e in editform.split('.') if e]
2413 2413 forms.insert(0, 'changeset')
2414 2414 while forms:
2415 2415 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2416 2416 if tmpl:
2417 2417 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2418 2418 break
2419 2419 forms.pop()
2420 2420 else:
2421 2421 committext = buildcommittext(repo, ctx, subs, extramsg)
2422 2422
2423 2423 # run editor in the repository root
2424 2424 olddir = os.getcwd()
2425 2425 os.chdir(repo.root)
2426 2426 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2427 2427 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2428 2428 os.chdir(olddir)
2429 2429
2430 2430 if finishdesc:
2431 2431 text = finishdesc(text)
2432 2432 if not text.strip():
2433 2433 raise util.Abort(_("empty commit message"))
2434 2434
2435 2435 return text
2436 2436
2437 2437 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2438 2438 ui = repo.ui
2439 2439 tmpl, mapfile = gettemplate(ui, tmpl, None)
2440 2440
2441 2441 try:
2442 2442 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2443 2443 except SyntaxError, inst:
2444 2444 raise util.Abort(inst.args[0])
2445 2445
2446 2446 for k, v in repo.ui.configitems('committemplate'):
2447 2447 if k != 'changeset':
2448 2448 t.t.cache[k] = v
2449 2449
2450 2450 if not extramsg:
2451 2451 extramsg = '' # ensure that extramsg is string
2452 2452
2453 2453 ui.pushbuffer()
2454 2454 t.show(ctx, extramsg=extramsg)
2455 2455 return ui.popbuffer()
2456 2456
2457 2457 def buildcommittext(repo, ctx, subs, extramsg):
2458 2458 edittext = []
2459 2459 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2460 2460 if ctx.description():
2461 2461 edittext.append(ctx.description())
2462 2462 edittext.append("")
2463 2463 edittext.append("") # Empty line between message and comments.
2464 2464 edittext.append(_("HG: Enter commit message."
2465 2465 " Lines beginning with 'HG:' are removed."))
2466 2466 edittext.append("HG: %s" % extramsg)
2467 2467 edittext.append("HG: --")
2468 2468 edittext.append(_("HG: user: %s") % ctx.user())
2469 2469 if ctx.p2():
2470 2470 edittext.append(_("HG: branch merge"))
2471 2471 if ctx.branch():
2472 2472 edittext.append(_("HG: branch '%s'") % ctx.branch())
2473 2473 if bookmarks.iscurrent(repo):
2474 2474 edittext.append(_("HG: bookmark '%s'") % repo._bookmarkcurrent)
2475 2475 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2476 2476 edittext.extend([_("HG: added %s") % f for f in added])
2477 2477 edittext.extend([_("HG: changed %s") % f for f in modified])
2478 2478 edittext.extend([_("HG: removed %s") % f for f in removed])
2479 2479 if not added and not modified and not removed:
2480 2480 edittext.append(_("HG: no files changed"))
2481 2481 edittext.append("")
2482 2482
2483 2483 return "\n".join(edittext)
2484 2484
2485 2485 def commitstatus(repo, node, branch, bheads=None, opts={}):
2486 2486 ctx = repo[node]
2487 2487 parents = ctx.parents()
2488 2488
2489 2489 if (not opts.get('amend') and bheads and node not in bheads and not
2490 2490 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2491 2491 repo.ui.status(_('created new head\n'))
2492 2492 # The message is not printed for initial roots. For the other
2493 2493 # changesets, it is printed in the following situations:
2494 2494 #
2495 2495 # Par column: for the 2 parents with ...
2496 2496 # N: null or no parent
2497 2497 # B: parent is on another named branch
2498 2498 # C: parent is a regular non head changeset
2499 2499 # H: parent was a branch head of the current branch
2500 2500 # Msg column: whether we print "created new head" message
2501 2501 # In the following, it is assumed that there already exists some
2502 2502 # initial branch heads of the current branch, otherwise nothing is
2503 2503 # printed anyway.
2504 2504 #
2505 2505 # Par Msg Comment
2506 2506 # N N y additional topo root
2507 2507 #
2508 2508 # B N y additional branch root
2509 2509 # C N y additional topo head
2510 2510 # H N n usual case
2511 2511 #
2512 2512 # B B y weird additional branch root
2513 2513 # C B y branch merge
2514 2514 # H B n merge with named branch
2515 2515 #
2516 2516 # C C y additional head from merge
2517 2517 # C H n merge with a head
2518 2518 #
2519 2519 # H H n head merge: head count decreases
2520 2520
2521 2521 if not opts.get('close_branch'):
2522 2522 for r in parents:
2523 2523 if r.closesbranch() and r.branch() == branch:
2524 2524 repo.ui.status(_('reopening closed branch head %d\n') % r)
2525 2525
2526 2526 if repo.ui.debugflag:
2527 2527 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2528 2528 elif repo.ui.verbose:
2529 2529 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2530 2530
2531 2531 def revert(ui, repo, ctx, parents, *pats, **opts):
2532 2532 parent, p2 = parents
2533 2533 node = ctx.node()
2534 2534
2535 2535 mf = ctx.manifest()
2536 2536 if node == p2:
2537 2537 parent = p2
2538 2538 if node == parent:
2539 2539 pmf = mf
2540 2540 else:
2541 2541 pmf = None
2542 2542
2543 2543 # need all matching names in dirstate and manifest of target rev,
2544 2544 # so have to walk both. do not print errors if files exist in one
2545 2545 # but not other.
2546 2546
2547 2547 # `names` is a mapping for all elements in working copy and target revision
2548 2548 # The mapping is in the form:
2549 2549 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2550 2550 names = {}
2551 2551
2552 2552 wlock = repo.wlock()
2553 2553 try:
2554 2554 ## filling of the `names` mapping
2555 2555 # walk dirstate to fill `names`
2556 2556
2557 2557 m = scmutil.match(repo[None], pats, opts)
2558 2558 if not m.always() or node != parent:
2559 2559 m.bad = lambda x, y: False
2560 2560 for abs in repo.walk(m):
2561 2561 names[abs] = m.rel(abs), m.exact(abs)
2562 2562
2563 2563 # walk target manifest to fill `names`
2564 2564
2565 2565 def badfn(path, msg):
2566 2566 if path in names:
2567 2567 return
2568 2568 if path in ctx.substate:
2569 2569 return
2570 2570 path_ = path + '/'
2571 2571 for f in names:
2572 2572 if f.startswith(path_):
2573 2573 return
2574 2574 ui.warn("%s: %s\n" % (m.rel(path), msg))
2575 2575
2576 2576 m = scmutil.match(ctx, pats, opts)
2577 2577 m.bad = badfn
2578 2578 for abs in ctx.walk(m):
2579 2579 if abs not in names:
2580 2580 names[abs] = m.rel(abs), m.exact(abs)
2581 2581
2582 2582 # Find status of all file in `names`.
2583 2583 m = scmutil.matchfiles(repo, names)
2584 2584
2585 2585 changes = repo.status(node1=node, match=m,
2586 2586 unknown=True, ignored=True, clean=True)
2587 2587 else:
2588 2588 changes = repo.status(match=m)
2589 2589 for kind in changes:
2590 2590 for abs in kind:
2591 2591 names[abs] = m.rel(abs), m.exact(abs)
2592 2592
2593 2593 m = scmutil.matchfiles(repo, names)
2594 2594
2595 2595 modified = set(changes.modified)
2596 2596 added = set(changes.added)
2597 2597 removed = set(changes.removed)
2598 2598 _deleted = set(changes.deleted)
2599 2599 unknown = set(changes.unknown)
2600 2600 unknown.update(changes.ignored)
2601 2601 clean = set(changes.clean)
2602 2602 modadded = set()
2603 2603
2604 2604 # split between files known in target manifest and the others
2605 2605 smf = set(mf)
2606 2606
2607 2607 # determine the exact nature of the deleted changesets
2608 2608 deladded = _deleted - smf
2609 2609 deleted = _deleted - deladded
2610 2610
2611 2611 # We need to account for the state of file in the dirstate.
2612 2612 #
2613 2613 # Even, when we revert against something else than parent. This will
2614 2614 # slightly alter the behavior of revert (doing back up or not, delete
2615 2615 # or just forget etc).
2616 2616 if parent == node:
2617 2617 dsmodified = modified
2618 2618 dsadded = added
2619 2619 dsremoved = removed
2620 2620 # store all local modifications, useful later for rename detection
2621 2621 localchanges = dsmodified | dsadded
2622 2622 modified, added, removed = set(), set(), set()
2623 2623 else:
2624 2624 changes = repo.status(node1=parent, match=m)
2625 2625 dsmodified = set(changes.modified)
2626 2626 dsadded = set(changes.added)
2627 2627 dsremoved = set(changes.removed)
2628 2628 # store all local modifications, useful later for rename detection
2629 2629 localchanges = dsmodified | dsadded
2630 2630
2631 2631 # only take into account for removes between wc and target
2632 2632 clean |= dsremoved - removed
2633 2633 dsremoved &= removed
2634 2634 # distinct between dirstate remove and other
2635 2635 removed -= dsremoved
2636 2636
2637 2637 modadded = added & dsmodified
2638 2638 added -= modadded
2639 2639
2640 2640 # tell newly modified apart.
2641 2641 dsmodified &= modified
2642 2642 dsmodified |= modified & dsadded # dirstate added may needs backup
2643 2643 modified -= dsmodified
2644 2644
2645 2645 # We need to wait for some post-processing to update this set
2646 2646 # before making the distinction. The dirstate will be used for
2647 2647 # that purpose.
2648 2648 dsadded = added
2649 2649
2650 2650 # in case of merge, files that are actually added can be reported as
2651 2651 # modified, we need to post process the result
2652 2652 if p2 != nullid:
2653 2653 if pmf is None:
2654 2654 # only need parent manifest in the merge case,
2655 2655 # so do not read by default
2656 2656 pmf = repo[parent].manifest()
2657 2657 mergeadd = dsmodified - set(pmf)
2658 2658 dsadded |= mergeadd
2659 2659 dsmodified -= mergeadd
2660 2660
2661 2661 # if f is a rename, update `names` to also revert the source
2662 2662 cwd = repo.getcwd()
2663 2663 for f in localchanges:
2664 2664 src = repo.dirstate.copied(f)
2665 2665 # XXX should we check for rename down to target node?
2666 2666 if src and src not in names and repo.dirstate[src] == 'r':
2667 2667 dsremoved.add(src)
2668 2668 names[src] = (repo.pathto(src, cwd), True)
2669 2669
2670 2670 # distinguish between file to forget and the other
2671 2671 added = set()
2672 2672 for abs in dsadded:
2673 2673 if repo.dirstate[abs] != 'a':
2674 2674 added.add(abs)
2675 2675 dsadded -= added
2676 2676
2677 2677 for abs in deladded:
2678 2678 if repo.dirstate[abs] == 'a':
2679 2679 dsadded.add(abs)
2680 2680 deladded -= dsadded
2681 2681
2682 2682 # For files marked as removed, we check if an unknown file is present at
2683 2683 # the same path. If a such file exists it may need to be backed up.
2684 2684 # Making the distinction at this stage helps have simpler backup
2685 2685 # logic.
2686 2686 removunk = set()
2687 2687 for abs in removed:
2688 2688 target = repo.wjoin(abs)
2689 2689 if os.path.lexists(target):
2690 2690 removunk.add(abs)
2691 2691 removed -= removunk
2692 2692
2693 2693 dsremovunk = set()
2694 2694 for abs in dsremoved:
2695 2695 target = repo.wjoin(abs)
2696 2696 if os.path.lexists(target):
2697 2697 dsremovunk.add(abs)
2698 2698 dsremoved -= dsremovunk
2699 2699
2700 2700 # action to be actually performed by revert
2701 2701 # (<list of file>, message>) tuple
2702 2702 actions = {'revert': ([], _('reverting %s\n')),
2703 2703 'add': ([], _('adding %s\n')),
2704 2704 'remove': ([], _('removing %s\n')),
2705 2705 'drop': ([], _('removing %s\n')),
2706 2706 'forget': ([], _('forgetting %s\n')),
2707 2707 'undelete': ([], _('undeleting %s\n')),
2708 2708 'noop': (None, _('no changes needed to %s\n')),
2709 2709 'unknown': (None, _('file not managed: %s\n')),
2710 2710 }
2711 2711
2712 2712 # "constant" that convey the backup strategy.
2713 2713 # All set to `discard` if `no-backup` is set do avoid checking
2714 2714 # no_backup lower in the code.
2715 2715 # These values are ordered for comparison purposes
2716 2716 backup = 2 # unconditionally do backup
2717 2717 check = 1 # check if the existing file differs from target
2718 2718 discard = 0 # never do backup
2719 2719 if opts.get('no_backup'):
2720 2720 backup = check = discard
2721 2721
2722 2722 backupanddel = actions['remove']
2723 2723 if not opts.get('no_backup'):
2724 2724 backupanddel = actions['drop']
2725 2725
2726 2726 disptable = (
2727 2727 # dispatch table:
2728 2728 # file state
2729 2729 # action
2730 2730 # make backup
2731 2731
2732 2732 ## Sets that results that will change file on disk
2733 2733 # Modified compared to target, no local change
2734 2734 (modified, actions['revert'], discard),
2735 2735 # Modified compared to target, but local file is deleted
2736 2736 (deleted, actions['revert'], discard),
2737 2737 # Modified compared to target, local change
2738 2738 (dsmodified, actions['revert'], backup),
2739 2739 # Added since target
2740 2740 (added, actions['remove'], discard),
2741 2741 # Added in working directory
2742 2742 (dsadded, actions['forget'], discard),
2743 2743 # Added since target, have local modification
2744 2744 (modadded, backupanddel, backup),
2745 2745 # Added since target but file is missing in working directory
2746 2746 (deladded, actions['drop'], discard),
2747 2747 # Removed since target, before working copy parent
2748 2748 (removed, actions['add'], discard),
2749 2749 # Same as `removed` but an unknown file exists at the same path
2750 2750 (removunk, actions['add'], check),
2751 2751 # Removed since targe, marked as such in working copy parent
2752 2752 (dsremoved, actions['undelete'], discard),
2753 2753 # Same as `dsremoved` but an unknown file exists at the same path
2754 2754 (dsremovunk, actions['undelete'], check),
2755 2755 ## the following sets does not result in any file changes
2756 2756 # File with no modification
2757 2757 (clean, actions['noop'], discard),
2758 2758 # Existing file, not tracked anywhere
2759 2759 (unknown, actions['unknown'], discard),
2760 2760 )
2761 2761
2762 2762 needdata = ('revert', 'add', 'undelete')
2763 2763 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
2764 2764
2765 2765 wctx = repo[None]
2766 2766 for abs, (rel, exact) in sorted(names.items()):
2767 2767 # target file to be touch on disk (relative to cwd)
2768 2768 target = repo.wjoin(abs)
2769 2769 # search the entry in the dispatch table.
2770 2770 # if the file is in any of these sets, it was touched in the working
2771 2771 # directory parent and we are sure it needs to be reverted.
2772 2772 for table, (xlist, msg), dobackup in disptable:
2773 2773 if abs not in table:
2774 2774 continue
2775 2775 if xlist is not None:
2776 2776 xlist.append(abs)
2777 2777 if dobackup and (backup <= dobackup
2778 2778 or wctx[abs].cmp(ctx[abs])):
2779 2779 bakname = "%s.orig" % rel
2780 2780 ui.note(_('saving current version of %s as %s\n') %
2781 2781 (rel, bakname))
2782 2782 if not opts.get('dry_run'):
2783 2783 util.rename(target, bakname)
2784 2784 if ui.verbose or not exact:
2785 2785 if not isinstance(msg, basestring):
2786 2786 msg = msg(abs)
2787 2787 ui.status(msg % rel)
2788 2788 elif exact:
2789 2789 ui.warn(msg % rel)
2790 2790 break
2791 2791
2792 2792
2793 2793 if not opts.get('dry_run'):
2794 2794 _performrevert(repo, parents, ctx, actions)
2795 2795
2796 2796 # get the list of subrepos that must be reverted
2797 2797 subrepomatch = scmutil.match(ctx, pats, opts)
2798 2798 targetsubs = sorted(s for s in ctx.substate if subrepomatch(s))
2799 2799
2800 2800 if targetsubs:
2801 2801 # Revert the subrepos on the revert list
2802 2802 for sub in targetsubs:
2803 2803 ctx.sub(sub).revert(ui, ctx.substate[sub], *pats, **opts)
2804 2804 finally:
2805 2805 wlock.release()
2806 2806
2807 2807 def _revertprefetch(repo, ctx, *files):
2808 2808 """Let extension changing the storage layer prefetch content"""
2809 2809 pass
2810 2810
2811 2811 def _performrevert(repo, parents, ctx, actions):
2812 2812 """function that actually perform all the actions computed for revert
2813 2813
2814 2814 This is an independent function to let extension to plug in and react to
2815 2815 the imminent revert.
2816 2816
2817 2817 Make sure you have the working directory locked when calling this function.
2818 2818 """
2819 2819 parent, p2 = parents
2820 2820 node = ctx.node()
2821 2821 def checkout(f):
2822 2822 fc = ctx[f]
2823 2823 repo.wwrite(f, fc.data(), fc.flags())
2824 2824
2825 2825 audit_path = pathutil.pathauditor(repo.root)
2826 2826 for f in actions['forget'][0]:
2827 2827 repo.dirstate.drop(f)
2828 2828 for f in actions['remove'][0]:
2829 2829 audit_path(f)
2830 2830 util.unlinkpath(repo.wjoin(f))
2831 2831 repo.dirstate.remove(f)
2832 2832 for f in actions['drop'][0]:
2833 2833 audit_path(f)
2834 2834 repo.dirstate.remove(f)
2835 2835
2836 2836 normal = None
2837 2837 if node == parent:
2838 2838 # We're reverting to our parent. If possible, we'd like status
2839 2839 # to report the file as clean. We have to use normallookup for
2840 2840 # merges to avoid losing information about merged/dirty files.
2841 2841 if p2 != nullid:
2842 2842 normal = repo.dirstate.normallookup
2843 2843 else:
2844 2844 normal = repo.dirstate.normal
2845 2845 for f in actions['revert'][0]:
2846 2846 checkout(f)
2847 2847 if normal:
2848 2848 normal(f)
2849 2849
2850 2850 for f in actions['add'][0]:
2851 2851 checkout(f)
2852 2852 repo.dirstate.add(f)
2853 2853
2854 2854 normal = repo.dirstate.normallookup
2855 2855 if node == parent and p2 == nullid:
2856 2856 normal = repo.dirstate.normal
2857 2857 for f in actions['undelete'][0]:
2858 2858 checkout(f)
2859 2859 normal(f)
2860 2860
2861 2861 copied = copies.pathcopies(repo[parent], ctx)
2862 2862
2863 2863 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
2864 2864 if f in copied:
2865 2865 repo.dirstate.copy(copied[f], f)
2866 2866
2867 2867 def command(table):
2868 2868 """Returns a function object to be used as a decorator for making commands.
2869 2869
2870 2870 This function receives a command table as its argument. The table should
2871 2871 be a dict.
2872 2872
2873 2873 The returned function can be used as a decorator for adding commands
2874 2874 to that command table. This function accepts multiple arguments to define
2875 2875 a command.
2876 2876
2877 2877 The first argument is the command name.
2878 2878
2879 2879 The options argument is an iterable of tuples defining command arguments.
2880 2880 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
2881 2881
2882 2882 The synopsis argument defines a short, one line summary of how to use the
2883 2883 command. This shows up in the help output.
2884 2884
2885 2885 The norepo argument defines whether the command does not require a
2886 2886 local repository. Most commands operate against a repository, thus the
2887 2887 default is False.
2888 2888
2889 2889 The optionalrepo argument defines whether the command optionally requires
2890 2890 a local repository.
2891 2891
2892 2892 The inferrepo argument defines whether to try to find a repository from the
2893 2893 command line arguments. If True, arguments will be examined for potential
2894 2894 repository locations. See ``findrepo()``. If a repository is found, it
2895 2895 will be used.
2896 2896 """
2897 2897 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
2898 2898 inferrepo=False):
2899 2899 def decorator(func):
2900 2900 if synopsis:
2901 2901 table[name] = func, list(options), synopsis
2902 2902 else:
2903 2903 table[name] = func, list(options)
2904 2904
2905 2905 if norepo:
2906 2906 # Avoid import cycle.
2907 2907 import commands
2908 2908 commands.norepo += ' %s' % ' '.join(parsealiases(name))
2909 2909
2910 2910 if optionalrepo:
2911 2911 import commands
2912 2912 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
2913 2913
2914 2914 if inferrepo:
2915 2915 import commands
2916 2916 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
2917 2917
2918 2918 return func
2919 2919 return decorator
2920 2920
2921 2921 return cmd
2922 2922
2923 2923 # a list of (ui, repo, otherpeer, opts, missing) functions called by
2924 2924 # commands.outgoing. "missing" is "missing" of the result of
2925 2925 # "findcommonoutgoing()"
2926 2926 outgoinghooks = util.hooks()
2927 2927
2928 2928 # a list of (ui, repo) functions called by commands.summary
2929 2929 summaryhooks = util.hooks()
2930 2930
2931 2931 # a list of (ui, repo, opts, changes) functions called by commands.summary.
2932 2932 #
2933 2933 # functions should return tuple of booleans below, if 'changes' is None:
2934 2934 # (whether-incomings-are-needed, whether-outgoings-are-needed)
2935 2935 #
2936 2936 # otherwise, 'changes' is a tuple of tuples below:
2937 2937 # - (sourceurl, sourcebranch, sourcepeer, incoming)
2938 2938 # - (desturl, destbranch, destpeer, outgoing)
2939 2939 summaryremotehooks = util.hooks()
2940 2940
2941 2941 # A list of state files kept by multistep operations like graft.
2942 2942 # Since graft cannot be aborted, it is considered 'clearable' by update.
2943 2943 # note: bisect is intentionally excluded
2944 2944 # (state file, clearable, allowcommit, error, hint)
2945 2945 unfinishedstates = [
2946 2946 ('graftstate', True, False, _('graft in progress'),
2947 2947 _("use 'hg graft --continue' or 'hg update' to abort")),
2948 2948 ('updatestate', True, False, _('last update was interrupted'),
2949 2949 _("use 'hg update' to get a consistent checkout"))
2950 2950 ]
2951 2951
2952 2952 def checkunfinished(repo, commit=False):
2953 2953 '''Look for an unfinished multistep operation, like graft, and abort
2954 2954 if found. It's probably good to check this right before
2955 2955 bailifchanged().
2956 2956 '''
2957 2957 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2958 2958 if commit and allowcommit:
2959 2959 continue
2960 2960 if repo.vfs.exists(f):
2961 2961 raise util.Abort(msg, hint=hint)
2962 2962
2963 2963 def clearunfinished(repo):
2964 2964 '''Check for unfinished operations (as above), and clear the ones
2965 2965 that are clearable.
2966 2966 '''
2967 2967 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2968 2968 if not clearable and repo.vfs.exists(f):
2969 2969 raise util.Abort(msg, hint=hint)
2970 2970 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2971 2971 if clearable and repo.vfs.exists(f):
2972 2972 util.unlink(repo.join(f))
@@ -1,6263 +1,6264
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _
11 11 import os, re, difflib, time, tempfile, errno, shlex
12 12 import sys, socket
13 13 import hg, scmutil, util, revlog, copies, error, bookmarks
14 14 import patch, help, encoding, templatekw, discovery
15 15 import archival, changegroup, cmdutil, hbisect
16 16 import sshserver, hgweb, commandserver
17 17 import extensions
18 18 from hgweb import server as hgweb_server
19 19 import merge as mergemod
20 20 import minirst, revset, fileset
21 21 import dagparser, context, simplemerge, graphmod, copies
22 22 import random
23 23 import setdiscovery, treediscovery, dagutil, pvec, localrepo
24 24 import phases, obsolete, exchange
25 25 import ui as uimod
26 26
27 27 table = {}
28 28
29 29 command = cmdutil.command(table)
30 30
31 31 # Space delimited list of commands that don't require local repositories.
32 32 # This should be populated by passing norepo=True into the @command decorator.
33 33 norepo = ''
34 34 # Space delimited list of commands that optionally require local repositories.
35 35 # This should be populated by passing optionalrepo=True into the @command
36 36 # decorator.
37 37 optionalrepo = ''
38 38 # Space delimited list of commands that will examine arguments looking for
39 39 # a repository. This should be populated by passing inferrepo=True into the
40 40 # @command decorator.
41 41 inferrepo = ''
42 42
43 43 # common command options
44 44
45 45 globalopts = [
46 46 ('R', 'repository', '',
47 47 _('repository root directory or name of overlay bundle file'),
48 48 _('REPO')),
49 49 ('', 'cwd', '',
50 50 _('change working directory'), _('DIR')),
51 51 ('y', 'noninteractive', None,
52 52 _('do not prompt, automatically pick the first choice for all prompts')),
53 53 ('q', 'quiet', None, _('suppress output')),
54 54 ('v', 'verbose', None, _('enable additional output')),
55 55 ('', 'config', [],
56 56 _('set/override config option (use \'section.name=value\')'),
57 57 _('CONFIG')),
58 58 ('', 'debug', None, _('enable debugging output')),
59 59 ('', 'debugger', None, _('start debugger')),
60 60 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
61 61 _('ENCODE')),
62 62 ('', 'encodingmode', encoding.encodingmode,
63 63 _('set the charset encoding mode'), _('MODE')),
64 64 ('', 'traceback', None, _('always print a traceback on exception')),
65 65 ('', 'time', None, _('time how long the command takes')),
66 66 ('', 'profile', None, _('print command execution profile')),
67 67 ('', 'version', None, _('output version information and exit')),
68 68 ('h', 'help', None, _('display help and exit')),
69 69 ('', 'hidden', False, _('consider hidden changesets')),
70 70 ]
71 71
72 72 dryrunopts = [('n', 'dry-run', None,
73 73 _('do not perform actions, just print output'))]
74 74
75 75 remoteopts = [
76 76 ('e', 'ssh', '',
77 77 _('specify ssh command to use'), _('CMD')),
78 78 ('', 'remotecmd', '',
79 79 _('specify hg command to run on the remote side'), _('CMD')),
80 80 ('', 'insecure', None,
81 81 _('do not verify server certificate (ignoring web.cacerts config)')),
82 82 ]
83 83
84 84 walkopts = [
85 85 ('I', 'include', [],
86 86 _('include names matching the given patterns'), _('PATTERN')),
87 87 ('X', 'exclude', [],
88 88 _('exclude names matching the given patterns'), _('PATTERN')),
89 89 ]
90 90
91 91 commitopts = [
92 92 ('m', 'message', '',
93 93 _('use text as commit message'), _('TEXT')),
94 94 ('l', 'logfile', '',
95 95 _('read commit message from file'), _('FILE')),
96 96 ]
97 97
98 98 commitopts2 = [
99 99 ('d', 'date', '',
100 100 _('record the specified date as commit date'), _('DATE')),
101 101 ('u', 'user', '',
102 102 _('record the specified user as committer'), _('USER')),
103 103 ]
104 104
105 105 # hidden for now
106 106 formatteropts = [
107 107 ('T', 'template', '',
108 108 _('display with template (DEPRECATED)'), _('TEMPLATE')),
109 109 ]
110 110
111 111 templateopts = [
112 112 ('', 'style', '',
113 113 _('display using template map file (DEPRECATED)'), _('STYLE')),
114 114 ('T', 'template', '',
115 115 _('display with template'), _('TEMPLATE')),
116 116 ]
117 117
118 118 logopts = [
119 119 ('p', 'patch', None, _('show patch')),
120 120 ('g', 'git', None, _('use git extended diff format')),
121 121 ('l', 'limit', '',
122 122 _('limit number of changes displayed'), _('NUM')),
123 123 ('M', 'no-merges', None, _('do not show merges')),
124 124 ('', 'stat', None, _('output diffstat-style summary of changes')),
125 125 ('G', 'graph', None, _("show the revision DAG")),
126 126 ] + templateopts
127 127
128 128 diffopts = [
129 129 ('a', 'text', None, _('treat all files as text')),
130 130 ('g', 'git', None, _('use git extended diff format')),
131 131 ('', 'nodates', None, _('omit dates from diff headers'))
132 132 ]
133 133
134 134 diffwsopts = [
135 135 ('w', 'ignore-all-space', None,
136 136 _('ignore white space when comparing lines')),
137 137 ('b', 'ignore-space-change', None,
138 138 _('ignore changes in the amount of white space')),
139 139 ('B', 'ignore-blank-lines', None,
140 140 _('ignore changes whose lines are all blank')),
141 141 ]
142 142
143 143 diffopts2 = [
144 144 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
145 145 ('p', 'show-function', None, _('show which function each change is in')),
146 146 ('', 'reverse', None, _('produce a diff that undoes the changes')),
147 147 ] + diffwsopts + [
148 148 ('U', 'unified', '',
149 149 _('number of lines of context to show'), _('NUM')),
150 150 ('', 'stat', None, _('output diffstat-style summary of changes')),
151 151 ]
152 152
153 153 mergetoolopts = [
154 154 ('t', 'tool', '', _('specify merge tool')),
155 155 ]
156 156
157 157 similarityopts = [
158 158 ('s', 'similarity', '',
159 159 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
160 160 ]
161 161
162 162 subrepoopts = [
163 163 ('S', 'subrepos', None,
164 164 _('recurse into subrepositories'))
165 165 ]
166 166
167 167 # Commands start here, listed alphabetically
168 168
169 169 @command('^add',
170 170 walkopts + subrepoopts + dryrunopts,
171 171 _('[OPTION]... [FILE]...'),
172 172 inferrepo=True)
173 173 def add(ui, repo, *pats, **opts):
174 174 """add the specified files on the next commit
175 175
176 176 Schedule files to be version controlled and added to the
177 177 repository.
178 178
179 179 The files will be added to the repository at the next commit. To
180 180 undo an add before that, see :hg:`forget`.
181 181
182 182 If no names are given, add all files to the repository.
183 183
184 184 .. container:: verbose
185 185
186 186 An example showing how new (unknown) files are added
187 187 automatically by :hg:`add`::
188 188
189 189 $ ls
190 190 foo.c
191 191 $ hg status
192 192 ? foo.c
193 193 $ hg add
194 194 adding foo.c
195 195 $ hg status
196 196 A foo.c
197 197
198 198 Returns 0 if all files are successfully added.
199 199 """
200 200
201 201 m = scmutil.match(repo[None], pats, opts)
202 202 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
203 203 opts.get('subrepos'), prefix="", explicitonly=False)
204 204 return rejected and 1 or 0
205 205
206 206 @command('addremove',
207 207 similarityopts + walkopts + dryrunopts,
208 208 _('[OPTION]... [FILE]...'),
209 209 inferrepo=True)
210 210 def addremove(ui, repo, *pats, **opts):
211 211 """add all new files, delete all missing files
212 212
213 213 Add all new files and remove all missing files from the
214 214 repository.
215 215
216 216 New files are ignored if they match any of the patterns in
217 217 ``.hgignore``. As with add, these changes take effect at the next
218 218 commit.
219 219
220 220 Use the -s/--similarity option to detect renamed files. This
221 221 option takes a percentage between 0 (disabled) and 100 (files must
222 222 be identical) as its parameter. With a parameter greater than 0,
223 223 this compares every removed file with every added file and records
224 224 those similar enough as renames. Detecting renamed files this way
225 225 can be expensive. After using this option, :hg:`status -C` can be
226 226 used to check which files were identified as moved or renamed. If
227 227 not specified, -s/--similarity defaults to 100 and only renames of
228 228 identical files are detected.
229 229
230 230 Returns 0 if all files are successfully added.
231 231 """
232 232 try:
233 233 sim = float(opts.get('similarity') or 100)
234 234 except ValueError:
235 235 raise util.Abort(_('similarity must be a number'))
236 236 if sim < 0 or sim > 100:
237 237 raise util.Abort(_('similarity must be between 0 and 100'))
238 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
238 matcher = scmutil.match(repo[None], pats, opts)
239 return scmutil.addremove(repo, matcher, opts, similarity=sim / 100.0)
239 240
240 241 @command('^annotate|blame',
241 242 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
242 243 ('', 'follow', None,
243 244 _('follow copies/renames and list the filename (DEPRECATED)')),
244 245 ('', 'no-follow', None, _("don't follow copies and renames")),
245 246 ('a', 'text', None, _('treat all files as text')),
246 247 ('u', 'user', None, _('list the author (long with -v)')),
247 248 ('f', 'file', None, _('list the filename')),
248 249 ('d', 'date', None, _('list the date (short with -q)')),
249 250 ('n', 'number', None, _('list the revision number (default)')),
250 251 ('c', 'changeset', None, _('list the changeset')),
251 252 ('l', 'line-number', None, _('show line number at the first appearance'))
252 253 ] + diffwsopts + walkopts + formatteropts,
253 254 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
254 255 inferrepo=True)
255 256 def annotate(ui, repo, *pats, **opts):
256 257 """show changeset information by line for each file
257 258
258 259 List changes in files, showing the revision id responsible for
259 260 each line
260 261
261 262 This command is useful for discovering when a change was made and
262 263 by whom.
263 264
264 265 Without the -a/--text option, annotate will avoid processing files
265 266 it detects as binary. With -a, annotate will annotate the file
266 267 anyway, although the results will probably be neither useful
267 268 nor desirable.
268 269
269 270 Returns 0 on success.
270 271 """
271 272 if not pats:
272 273 raise util.Abort(_('at least one filename or pattern is required'))
273 274
274 275 if opts.get('follow'):
275 276 # --follow is deprecated and now just an alias for -f/--file
276 277 # to mimic the behavior of Mercurial before version 1.5
277 278 opts['file'] = True
278 279
279 280 fm = ui.formatter('annotate', opts)
280 281 datefunc = ui.quiet and util.shortdate or util.datestr
281 282 hexfn = fm.hexfunc
282 283
283 284 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
284 285 ('number', ' ', lambda x: x[0].rev(), str),
285 286 ('changeset', ' ', lambda x: hexfn(x[0].node()), str),
286 287 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
287 288 ('file', ' ', lambda x: x[0].path(), str),
288 289 ('line_number', ':', lambda x: x[1], str),
289 290 ]
290 291 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
291 292
292 293 if (not opts.get('user') and not opts.get('changeset')
293 294 and not opts.get('date') and not opts.get('file')):
294 295 opts['number'] = True
295 296
296 297 linenumber = opts.get('line_number') is not None
297 298 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
298 299 raise util.Abort(_('at least one of -n/-c is required for -l'))
299 300
300 301 if fm:
301 302 def makefunc(get, fmt):
302 303 return get
303 304 else:
304 305 def makefunc(get, fmt):
305 306 return lambda x: fmt(get(x))
306 307 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
307 308 if opts.get(op)]
308 309 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
309 310 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
310 311 if opts.get(op))
311 312
312 313 def bad(x, y):
313 314 raise util.Abort("%s: %s" % (x, y))
314 315
315 316 ctx = scmutil.revsingle(repo, opts.get('rev'))
316 317 m = scmutil.match(ctx, pats, opts)
317 318 m.bad = bad
318 319 follow = not opts.get('no_follow')
319 320 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
320 321 whitespace=True)
321 322 for abs in ctx.walk(m):
322 323 fctx = ctx[abs]
323 324 if not opts.get('text') and util.binary(fctx.data()):
324 325 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
325 326 continue
326 327
327 328 lines = fctx.annotate(follow=follow, linenumber=linenumber,
328 329 diffopts=diffopts)
329 330 formats = []
330 331 pieces = []
331 332
332 333 for f, sep in funcmap:
333 334 l = [f(n) for n, dummy in lines]
334 335 if l:
335 336 if fm:
336 337 formats.append(['%s' for x in l])
337 338 else:
338 339 sizes = [encoding.colwidth(x) for x in l]
339 340 ml = max(sizes)
340 341 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
341 342 pieces.append(l)
342 343
343 344 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
344 345 fm.startitem()
345 346 fm.write(fields, "".join(f), *p)
346 347 fm.write('line', ": %s", l[1])
347 348
348 349 if lines and not lines[-1][1].endswith('\n'):
349 350 fm.plain('\n')
350 351
351 352 fm.end()
352 353
353 354 @command('archive',
354 355 [('', 'no-decode', None, _('do not pass files through decoders')),
355 356 ('p', 'prefix', '', _('directory prefix for files in archive'),
356 357 _('PREFIX')),
357 358 ('r', 'rev', '', _('revision to distribute'), _('REV')),
358 359 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
359 360 ] + subrepoopts + walkopts,
360 361 _('[OPTION]... DEST'))
361 362 def archive(ui, repo, dest, **opts):
362 363 '''create an unversioned archive of a repository revision
363 364
364 365 By default, the revision used is the parent of the working
365 366 directory; use -r/--rev to specify a different revision.
366 367
367 368 The archive type is automatically detected based on file
368 369 extension (or override using -t/--type).
369 370
370 371 .. container:: verbose
371 372
372 373 Examples:
373 374
374 375 - create a zip file containing the 1.0 release::
375 376
376 377 hg archive -r 1.0 project-1.0.zip
377 378
378 379 - create a tarball excluding .hg files::
379 380
380 381 hg archive project.tar.gz -X ".hg*"
381 382
382 383 Valid types are:
383 384
384 385 :``files``: a directory full of files (default)
385 386 :``tar``: tar archive, uncompressed
386 387 :``tbz2``: tar archive, compressed using bzip2
387 388 :``tgz``: tar archive, compressed using gzip
388 389 :``uzip``: zip archive, uncompressed
389 390 :``zip``: zip archive, compressed using deflate
390 391
391 392 The exact name of the destination archive or directory is given
392 393 using a format string; see :hg:`help export` for details.
393 394
394 395 Each member added to an archive file has a directory prefix
395 396 prepended. Use -p/--prefix to specify a format string for the
396 397 prefix. The default is the basename of the archive, with suffixes
397 398 removed.
398 399
399 400 Returns 0 on success.
400 401 '''
401 402
402 403 ctx = scmutil.revsingle(repo, opts.get('rev'))
403 404 if not ctx:
404 405 raise util.Abort(_('no working directory: please specify a revision'))
405 406 node = ctx.node()
406 407 dest = cmdutil.makefilename(repo, dest, node)
407 408 if os.path.realpath(dest) == repo.root:
408 409 raise util.Abort(_('repository root cannot be destination'))
409 410
410 411 kind = opts.get('type') or archival.guesskind(dest) or 'files'
411 412 prefix = opts.get('prefix')
412 413
413 414 if dest == '-':
414 415 if kind == 'files':
415 416 raise util.Abort(_('cannot archive plain files to stdout'))
416 417 dest = cmdutil.makefileobj(repo, dest)
417 418 if not prefix:
418 419 prefix = os.path.basename(repo.root) + '-%h'
419 420
420 421 prefix = cmdutil.makefilename(repo, prefix, node)
421 422 matchfn = scmutil.match(ctx, [], opts)
422 423 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
423 424 matchfn, prefix, subrepos=opts.get('subrepos'))
424 425
425 426 @command('backout',
426 427 [('', 'merge', None, _('merge with old dirstate parent after backout')),
427 428 ('', 'parent', '',
428 429 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
429 430 ('r', 'rev', '', _('revision to backout'), _('REV')),
430 431 ('e', 'edit', False, _('invoke editor on commit messages')),
431 432 ] + mergetoolopts + walkopts + commitopts + commitopts2,
432 433 _('[OPTION]... [-r] REV'))
433 434 def backout(ui, repo, node=None, rev=None, **opts):
434 435 '''reverse effect of earlier changeset
435 436
436 437 Prepare a new changeset with the effect of REV undone in the
437 438 current working directory.
438 439
439 440 If REV is the parent of the working directory, then this new changeset
440 441 is committed automatically. Otherwise, hg needs to merge the
441 442 changes and the merged result is left uncommitted.
442 443
443 444 .. note::
444 445
445 446 backout cannot be used to fix either an unwanted or
446 447 incorrect merge.
447 448
448 449 .. container:: verbose
449 450
450 451 By default, the pending changeset will have one parent,
451 452 maintaining a linear history. With --merge, the pending
452 453 changeset will instead have two parents: the old parent of the
453 454 working directory and a new child of REV that simply undoes REV.
454 455
455 456 Before version 1.7, the behavior without --merge was equivalent
456 457 to specifying --merge followed by :hg:`update --clean .` to
457 458 cancel the merge and leave the child of REV as a head to be
458 459 merged separately.
459 460
460 461 See :hg:`help dates` for a list of formats valid for -d/--date.
461 462
462 463 Returns 0 on success, 1 if nothing to backout or there are unresolved
463 464 files.
464 465 '''
465 466 if rev and node:
466 467 raise util.Abort(_("please specify just one revision"))
467 468
468 469 if not rev:
469 470 rev = node
470 471
471 472 if not rev:
472 473 raise util.Abort(_("please specify a revision to backout"))
473 474
474 475 date = opts.get('date')
475 476 if date:
476 477 opts['date'] = util.parsedate(date)
477 478
478 479 cmdutil.checkunfinished(repo)
479 480 cmdutil.bailifchanged(repo)
480 481 node = scmutil.revsingle(repo, rev).node()
481 482
482 483 op1, op2 = repo.dirstate.parents()
483 484 if not repo.changelog.isancestor(node, op1):
484 485 raise util.Abort(_('cannot backout change that is not an ancestor'))
485 486
486 487 p1, p2 = repo.changelog.parents(node)
487 488 if p1 == nullid:
488 489 raise util.Abort(_('cannot backout a change with no parents'))
489 490 if p2 != nullid:
490 491 if not opts.get('parent'):
491 492 raise util.Abort(_('cannot backout a merge changeset'))
492 493 p = repo.lookup(opts['parent'])
493 494 if p not in (p1, p2):
494 495 raise util.Abort(_('%s is not a parent of %s') %
495 496 (short(p), short(node)))
496 497 parent = p
497 498 else:
498 499 if opts.get('parent'):
499 500 raise util.Abort(_('cannot use --parent on non-merge changeset'))
500 501 parent = p1
501 502
502 503 # the backout should appear on the same branch
503 504 wlock = repo.wlock()
504 505 try:
505 506 branch = repo.dirstate.branch()
506 507 bheads = repo.branchheads(branch)
507 508 rctx = scmutil.revsingle(repo, hex(parent))
508 509 if not opts.get('merge') and op1 != node:
509 510 try:
510 511 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
511 512 'backout')
512 513 repo.dirstate.beginparentchange()
513 514 stats = mergemod.update(repo, parent, True, True, False,
514 515 node, False)
515 516 repo.setparents(op1, op2)
516 517 repo.dirstate.endparentchange()
517 518 hg._showstats(repo, stats)
518 519 if stats[3]:
519 520 repo.ui.status(_("use 'hg resolve' to retry unresolved "
520 521 "file merges\n"))
521 522 else:
522 523 msg = _("changeset %s backed out, "
523 524 "don't forget to commit.\n")
524 525 ui.status(msg % short(node))
525 526 return stats[3] > 0
526 527 finally:
527 528 ui.setconfig('ui', 'forcemerge', '', '')
528 529 else:
529 530 hg.clean(repo, node, show_stats=False)
530 531 repo.dirstate.setbranch(branch)
531 532 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
532 533
533 534
534 535 def commitfunc(ui, repo, message, match, opts):
535 536 editform = 'backout'
536 537 e = cmdutil.getcommiteditor(editform=editform, **opts)
537 538 if not message:
538 539 # we don't translate commit messages
539 540 message = "Backed out changeset %s" % short(node)
540 541 e = cmdutil.getcommiteditor(edit=True, editform=editform)
541 542 return repo.commit(message, opts.get('user'), opts.get('date'),
542 543 match, editor=e)
543 544 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
544 545 if not newnode:
545 546 ui.status(_("nothing changed\n"))
546 547 return 1
547 548 cmdutil.commitstatus(repo, newnode, branch, bheads)
548 549
549 550 def nice(node):
550 551 return '%d:%s' % (repo.changelog.rev(node), short(node))
551 552 ui.status(_('changeset %s backs out changeset %s\n') %
552 553 (nice(repo.changelog.tip()), nice(node)))
553 554 if opts.get('merge') and op1 != node:
554 555 hg.clean(repo, op1, show_stats=False)
555 556 ui.status(_('merging with changeset %s\n')
556 557 % nice(repo.changelog.tip()))
557 558 try:
558 559 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
559 560 'backout')
560 561 return hg.merge(repo, hex(repo.changelog.tip()))
561 562 finally:
562 563 ui.setconfig('ui', 'forcemerge', '', '')
563 564 finally:
564 565 wlock.release()
565 566 return 0
566 567
567 568 @command('bisect',
568 569 [('r', 'reset', False, _('reset bisect state')),
569 570 ('g', 'good', False, _('mark changeset good')),
570 571 ('b', 'bad', False, _('mark changeset bad')),
571 572 ('s', 'skip', False, _('skip testing changeset')),
572 573 ('e', 'extend', False, _('extend the bisect range')),
573 574 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
574 575 ('U', 'noupdate', False, _('do not update to target'))],
575 576 _("[-gbsr] [-U] [-c CMD] [REV]"))
576 577 def bisect(ui, repo, rev=None, extra=None, command=None,
577 578 reset=None, good=None, bad=None, skip=None, extend=None,
578 579 noupdate=None):
579 580 """subdivision search of changesets
580 581
581 582 This command helps to find changesets which introduce problems. To
582 583 use, mark the earliest changeset you know exhibits the problem as
583 584 bad, then mark the latest changeset which is free from the problem
584 585 as good. Bisect will update your working directory to a revision
585 586 for testing (unless the -U/--noupdate option is specified). Once
586 587 you have performed tests, mark the working directory as good or
587 588 bad, and bisect will either update to another candidate changeset
588 589 or announce that it has found the bad revision.
589 590
590 591 As a shortcut, you can also use the revision argument to mark a
591 592 revision as good or bad without checking it out first.
592 593
593 594 If you supply a command, it will be used for automatic bisection.
594 595 The environment variable HG_NODE will contain the ID of the
595 596 changeset being tested. The exit status of the command will be
596 597 used to mark revisions as good or bad: status 0 means good, 125
597 598 means to skip the revision, 127 (command not found) will abort the
598 599 bisection, and any other non-zero exit status means the revision
599 600 is bad.
600 601
601 602 .. container:: verbose
602 603
603 604 Some examples:
604 605
605 606 - start a bisection with known bad revision 34, and good revision 12::
606 607
607 608 hg bisect --bad 34
608 609 hg bisect --good 12
609 610
610 611 - advance the current bisection by marking current revision as good or
611 612 bad::
612 613
613 614 hg bisect --good
614 615 hg bisect --bad
615 616
616 617 - mark the current revision, or a known revision, to be skipped (e.g. if
617 618 that revision is not usable because of another issue)::
618 619
619 620 hg bisect --skip
620 621 hg bisect --skip 23
621 622
622 623 - skip all revisions that do not touch directories ``foo`` or ``bar``::
623 624
624 625 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
625 626
626 627 - forget the current bisection::
627 628
628 629 hg bisect --reset
629 630
630 631 - use 'make && make tests' to automatically find the first broken
631 632 revision::
632 633
633 634 hg bisect --reset
634 635 hg bisect --bad 34
635 636 hg bisect --good 12
636 637 hg bisect --command "make && make tests"
637 638
638 639 - see all changesets whose states are already known in the current
639 640 bisection::
640 641
641 642 hg log -r "bisect(pruned)"
642 643
643 644 - see the changeset currently being bisected (especially useful
644 645 if running with -U/--noupdate)::
645 646
646 647 hg log -r "bisect(current)"
647 648
648 649 - see all changesets that took part in the current bisection::
649 650
650 651 hg log -r "bisect(range)"
651 652
652 653 - you can even get a nice graph::
653 654
654 655 hg log --graph -r "bisect(range)"
655 656
656 657 See :hg:`help revsets` for more about the `bisect()` keyword.
657 658
658 659 Returns 0 on success.
659 660 """
660 661 def extendbisectrange(nodes, good):
661 662 # bisect is incomplete when it ends on a merge node and
662 663 # one of the parent was not checked.
663 664 parents = repo[nodes[0]].parents()
664 665 if len(parents) > 1:
665 666 side = good and state['bad'] or state['good']
666 667 num = len(set(i.node() for i in parents) & set(side))
667 668 if num == 1:
668 669 return parents[0].ancestor(parents[1])
669 670 return None
670 671
671 672 def print_result(nodes, good):
672 673 displayer = cmdutil.show_changeset(ui, repo, {})
673 674 if len(nodes) == 1:
674 675 # narrowed it down to a single revision
675 676 if good:
676 677 ui.write(_("The first good revision is:\n"))
677 678 else:
678 679 ui.write(_("The first bad revision is:\n"))
679 680 displayer.show(repo[nodes[0]])
680 681 extendnode = extendbisectrange(nodes, good)
681 682 if extendnode is not None:
682 683 ui.write(_('Not all ancestors of this changeset have been'
683 684 ' checked.\nUse bisect --extend to continue the '
684 685 'bisection from\nthe common ancestor, %s.\n')
685 686 % extendnode)
686 687 else:
687 688 # multiple possible revisions
688 689 if good:
689 690 ui.write(_("Due to skipped revisions, the first "
690 691 "good revision could be any of:\n"))
691 692 else:
692 693 ui.write(_("Due to skipped revisions, the first "
693 694 "bad revision could be any of:\n"))
694 695 for n in nodes:
695 696 displayer.show(repo[n])
696 697 displayer.close()
697 698
698 699 def check_state(state, interactive=True):
699 700 if not state['good'] or not state['bad']:
700 701 if (good or bad or skip or reset) and interactive:
701 702 return
702 703 if not state['good']:
703 704 raise util.Abort(_('cannot bisect (no known good revisions)'))
704 705 else:
705 706 raise util.Abort(_('cannot bisect (no known bad revisions)'))
706 707 return True
707 708
708 709 # backward compatibility
709 710 if rev in "good bad reset init".split():
710 711 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
711 712 cmd, rev, extra = rev, extra, None
712 713 if cmd == "good":
713 714 good = True
714 715 elif cmd == "bad":
715 716 bad = True
716 717 else:
717 718 reset = True
718 719 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
719 720 raise util.Abort(_('incompatible arguments'))
720 721
721 722 cmdutil.checkunfinished(repo)
722 723
723 724 if reset:
724 725 p = repo.join("bisect.state")
725 726 if os.path.exists(p):
726 727 os.unlink(p)
727 728 return
728 729
729 730 state = hbisect.load_state(repo)
730 731
731 732 if command:
732 733 changesets = 1
733 734 if noupdate:
734 735 try:
735 736 node = state['current'][0]
736 737 except LookupError:
737 738 raise util.Abort(_('current bisect revision is unknown - '
738 739 'start a new bisect to fix'))
739 740 else:
740 741 node, p2 = repo.dirstate.parents()
741 742 if p2 != nullid:
742 743 raise util.Abort(_('current bisect revision is a merge'))
743 744 try:
744 745 while changesets:
745 746 # update state
746 747 state['current'] = [node]
747 748 hbisect.save_state(repo, state)
748 749 status = ui.system(command, environ={'HG_NODE': hex(node)})
749 750 if status == 125:
750 751 transition = "skip"
751 752 elif status == 0:
752 753 transition = "good"
753 754 # status < 0 means process was killed
754 755 elif status == 127:
755 756 raise util.Abort(_("failed to execute %s") % command)
756 757 elif status < 0:
757 758 raise util.Abort(_("%s killed") % command)
758 759 else:
759 760 transition = "bad"
760 761 ctx = scmutil.revsingle(repo, rev, node)
761 762 rev = None # clear for future iterations
762 763 state[transition].append(ctx.node())
763 764 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
764 765 check_state(state, interactive=False)
765 766 # bisect
766 767 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
767 768 # update to next check
768 769 node = nodes[0]
769 770 if not noupdate:
770 771 cmdutil.bailifchanged(repo)
771 772 hg.clean(repo, node, show_stats=False)
772 773 finally:
773 774 state['current'] = [node]
774 775 hbisect.save_state(repo, state)
775 776 print_result(nodes, bgood)
776 777 return
777 778
778 779 # update state
779 780
780 781 if rev:
781 782 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
782 783 else:
783 784 nodes = [repo.lookup('.')]
784 785
785 786 if good or bad or skip:
786 787 if good:
787 788 state['good'] += nodes
788 789 elif bad:
789 790 state['bad'] += nodes
790 791 elif skip:
791 792 state['skip'] += nodes
792 793 hbisect.save_state(repo, state)
793 794
794 795 if not check_state(state):
795 796 return
796 797
797 798 # actually bisect
798 799 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
799 800 if extend:
800 801 if not changesets:
801 802 extendnode = extendbisectrange(nodes, good)
802 803 if extendnode is not None:
803 804 ui.write(_("Extending search to changeset %d:%s\n")
804 805 % (extendnode.rev(), extendnode))
805 806 state['current'] = [extendnode.node()]
806 807 hbisect.save_state(repo, state)
807 808 if noupdate:
808 809 return
809 810 cmdutil.bailifchanged(repo)
810 811 return hg.clean(repo, extendnode.node())
811 812 raise util.Abort(_("nothing to extend"))
812 813
813 814 if changesets == 0:
814 815 print_result(nodes, good)
815 816 else:
816 817 assert len(nodes) == 1 # only a single node can be tested next
817 818 node = nodes[0]
818 819 # compute the approximate number of remaining tests
819 820 tests, size = 0, 2
820 821 while size <= changesets:
821 822 tests, size = tests + 1, size * 2
822 823 rev = repo.changelog.rev(node)
823 824 ui.write(_("Testing changeset %d:%s "
824 825 "(%d changesets remaining, ~%d tests)\n")
825 826 % (rev, short(node), changesets, tests))
826 827 state['current'] = [node]
827 828 hbisect.save_state(repo, state)
828 829 if not noupdate:
829 830 cmdutil.bailifchanged(repo)
830 831 return hg.clean(repo, node)
831 832
832 833 @command('bookmarks|bookmark',
833 834 [('f', 'force', False, _('force')),
834 835 ('r', 'rev', '', _('revision'), _('REV')),
835 836 ('d', 'delete', False, _('delete a given bookmark')),
836 837 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
837 838 ('i', 'inactive', False, _('mark a bookmark inactive')),
838 839 ] + formatteropts,
839 840 _('hg bookmarks [OPTIONS]... [NAME]...'))
840 841 def bookmark(ui, repo, *names, **opts):
841 842 '''create a new bookmark or list existing bookmarks
842 843
843 844 Bookmarks are labels on changesets to help track lines of development.
844 845 Bookmarks are unversioned and can be moved, renamed and deleted.
845 846 Deleting or moving a bookmark has no effect on the associated changesets.
846 847
847 848 Creating or updating to a bookmark causes it to be marked as 'active'.
848 849 The active bookmark is indicated with a '*'.
849 850 When a commit is made, the active bookmark will advance to the new commit.
850 851 A plain :hg:`update` will also advance an active bookmark, if possible.
851 852 Updating away from a bookmark will cause it to be deactivated.
852 853
853 854 Bookmarks can be pushed and pulled between repositories (see
854 855 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
855 856 diverged, a new 'divergent bookmark' of the form 'name@path' will
856 857 be created. Using :hg:`merge` will resolve the divergence.
857 858
858 859 A bookmark named '@' has the special property that :hg:`clone` will
859 860 check it out by default if it exists.
860 861
861 862 .. container:: verbose
862 863
863 864 Examples:
864 865
865 866 - create an active bookmark for a new line of development::
866 867
867 868 hg book new-feature
868 869
869 870 - create an inactive bookmark as a place marker::
870 871
871 872 hg book -i reviewed
872 873
873 874 - create an inactive bookmark on another changeset::
874 875
875 876 hg book -r .^ tested
876 877
877 878 - move the '@' bookmark from another branch::
878 879
879 880 hg book -f @
880 881 '''
881 882 force = opts.get('force')
882 883 rev = opts.get('rev')
883 884 delete = opts.get('delete')
884 885 rename = opts.get('rename')
885 886 inactive = opts.get('inactive')
886 887
887 888 def checkformat(mark):
888 889 mark = mark.strip()
889 890 if not mark:
890 891 raise util.Abort(_("bookmark names cannot consist entirely of "
891 892 "whitespace"))
892 893 scmutil.checknewlabel(repo, mark, 'bookmark')
893 894 return mark
894 895
895 896 def checkconflict(repo, mark, cur, force=False, target=None):
896 897 if mark in marks and not force:
897 898 if target:
898 899 if marks[mark] == target and target == cur:
899 900 # re-activating a bookmark
900 901 return
901 902 anc = repo.changelog.ancestors([repo[target].rev()])
902 903 bmctx = repo[marks[mark]]
903 904 divs = [repo[b].node() for b in marks
904 905 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
905 906
906 907 # allow resolving a single divergent bookmark even if moving
907 908 # the bookmark across branches when a revision is specified
908 909 # that contains a divergent bookmark
909 910 if bmctx.rev() not in anc and target in divs:
910 911 bookmarks.deletedivergent(repo, [target], mark)
911 912 return
912 913
913 914 deletefrom = [b for b in divs
914 915 if repo[b].rev() in anc or b == target]
915 916 bookmarks.deletedivergent(repo, deletefrom, mark)
916 917 if bookmarks.validdest(repo, bmctx, repo[target]):
917 918 ui.status(_("moving bookmark '%s' forward from %s\n") %
918 919 (mark, short(bmctx.node())))
919 920 return
920 921 raise util.Abort(_("bookmark '%s' already exists "
921 922 "(use -f to force)") % mark)
922 923 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
923 924 and not force):
924 925 raise util.Abort(
925 926 _("a bookmark cannot have the name of an existing branch"))
926 927
927 928 if delete and rename:
928 929 raise util.Abort(_("--delete and --rename are incompatible"))
929 930 if delete and rev:
930 931 raise util.Abort(_("--rev is incompatible with --delete"))
931 932 if rename and rev:
932 933 raise util.Abort(_("--rev is incompatible with --rename"))
933 934 if not names and (delete or rev):
934 935 raise util.Abort(_("bookmark name required"))
935 936
936 937 if delete or rename or names or inactive:
937 938 wlock = repo.wlock()
938 939 try:
939 940 cur = repo.changectx('.').node()
940 941 marks = repo._bookmarks
941 942 if delete:
942 943 for mark in names:
943 944 if mark not in marks:
944 945 raise util.Abort(_("bookmark '%s' does not exist") %
945 946 mark)
946 947 if mark == repo._bookmarkcurrent:
947 948 bookmarks.unsetcurrent(repo)
948 949 del marks[mark]
949 950 marks.write()
950 951
951 952 elif rename:
952 953 if not names:
953 954 raise util.Abort(_("new bookmark name required"))
954 955 elif len(names) > 1:
955 956 raise util.Abort(_("only one new bookmark name allowed"))
956 957 mark = checkformat(names[0])
957 958 if rename not in marks:
958 959 raise util.Abort(_("bookmark '%s' does not exist") % rename)
959 960 checkconflict(repo, mark, cur, force)
960 961 marks[mark] = marks[rename]
961 962 if repo._bookmarkcurrent == rename and not inactive:
962 963 bookmarks.setcurrent(repo, mark)
963 964 del marks[rename]
964 965 marks.write()
965 966
966 967 elif names:
967 968 newact = None
968 969 for mark in names:
969 970 mark = checkformat(mark)
970 971 if newact is None:
971 972 newact = mark
972 973 if inactive and mark == repo._bookmarkcurrent:
973 974 bookmarks.unsetcurrent(repo)
974 975 return
975 976 tgt = cur
976 977 if rev:
977 978 tgt = scmutil.revsingle(repo, rev).node()
978 979 checkconflict(repo, mark, cur, force, tgt)
979 980 marks[mark] = tgt
980 981 if not inactive and cur == marks[newact] and not rev:
981 982 bookmarks.setcurrent(repo, newact)
982 983 elif cur != tgt and newact == repo._bookmarkcurrent:
983 984 bookmarks.unsetcurrent(repo)
984 985 marks.write()
985 986
986 987 elif inactive:
987 988 if len(marks) == 0:
988 989 ui.status(_("no bookmarks set\n"))
989 990 elif not repo._bookmarkcurrent:
990 991 ui.status(_("no active bookmark\n"))
991 992 else:
992 993 bookmarks.unsetcurrent(repo)
993 994 finally:
994 995 wlock.release()
995 996 else: # show bookmarks
996 997 fm = ui.formatter('bookmarks', opts)
997 998 hexfn = fm.hexfunc
998 999 marks = repo._bookmarks
999 1000 if len(marks) == 0 and not fm:
1000 1001 ui.status(_("no bookmarks set\n"))
1001 1002 for bmark, n in sorted(marks.iteritems()):
1002 1003 current = repo._bookmarkcurrent
1003 1004 if bmark == current:
1004 1005 prefix, label = '*', 'bookmarks.current'
1005 1006 else:
1006 1007 prefix, label = ' ', ''
1007 1008
1008 1009 fm.startitem()
1009 1010 if not ui.quiet:
1010 1011 fm.plain(' %s ' % prefix, label=label)
1011 1012 fm.write('bookmark', '%s', bmark, label=label)
1012 1013 pad = " " * (25 - encoding.colwidth(bmark))
1013 1014 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1014 1015 repo.changelog.rev(n), hexfn(n), label=label)
1015 1016 fm.data(active=(bmark == current))
1016 1017 fm.plain('\n')
1017 1018 fm.end()
1018 1019
1019 1020 @command('branch',
1020 1021 [('f', 'force', None,
1021 1022 _('set branch name even if it shadows an existing branch')),
1022 1023 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1023 1024 _('[-fC] [NAME]'))
1024 1025 def branch(ui, repo, label=None, **opts):
1025 1026 """set or show the current branch name
1026 1027
1027 1028 .. note::
1028 1029
1029 1030 Branch names are permanent and global. Use :hg:`bookmark` to create a
1030 1031 light-weight bookmark instead. See :hg:`help glossary` for more
1031 1032 information about named branches and bookmarks.
1032 1033
1033 1034 With no argument, show the current branch name. With one argument,
1034 1035 set the working directory branch name (the branch will not exist
1035 1036 in the repository until the next commit). Standard practice
1036 1037 recommends that primary development take place on the 'default'
1037 1038 branch.
1038 1039
1039 1040 Unless -f/--force is specified, branch will not let you set a
1040 1041 branch name that already exists, even if it's inactive.
1041 1042
1042 1043 Use -C/--clean to reset the working directory branch to that of
1043 1044 the parent of the working directory, negating a previous branch
1044 1045 change.
1045 1046
1046 1047 Use the command :hg:`update` to switch to an existing branch. Use
1047 1048 :hg:`commit --close-branch` to mark this branch as closed.
1048 1049
1049 1050 Returns 0 on success.
1050 1051 """
1051 1052 if label:
1052 1053 label = label.strip()
1053 1054
1054 1055 if not opts.get('clean') and not label:
1055 1056 ui.write("%s\n" % repo.dirstate.branch())
1056 1057 return
1057 1058
1058 1059 wlock = repo.wlock()
1059 1060 try:
1060 1061 if opts.get('clean'):
1061 1062 label = repo[None].p1().branch()
1062 1063 repo.dirstate.setbranch(label)
1063 1064 ui.status(_('reset working directory to branch %s\n') % label)
1064 1065 elif label:
1065 1066 if not opts.get('force') and label in repo.branchmap():
1066 1067 if label not in [p.branch() for p in repo.parents()]:
1067 1068 raise util.Abort(_('a branch of the same name already'
1068 1069 ' exists'),
1069 1070 # i18n: "it" refers to an existing branch
1070 1071 hint=_("use 'hg update' to switch to it"))
1071 1072 scmutil.checknewlabel(repo, label, 'branch')
1072 1073 repo.dirstate.setbranch(label)
1073 1074 ui.status(_('marked working directory as branch %s\n') % label)
1074 1075 ui.status(_('(branches are permanent and global, '
1075 1076 'did you want a bookmark?)\n'))
1076 1077 finally:
1077 1078 wlock.release()
1078 1079
1079 1080 @command('branches',
1080 1081 [('a', 'active', False, _('show only branches that have unmerged heads')),
1081 1082 ('c', 'closed', False, _('show normal and closed branches')),
1082 1083 ] + formatteropts,
1083 1084 _('[-ac]'))
1084 1085 def branches(ui, repo, active=False, closed=False, **opts):
1085 1086 """list repository named branches
1086 1087
1087 1088 List the repository's named branches, indicating which ones are
1088 1089 inactive. If -c/--closed is specified, also list branches which have
1089 1090 been marked closed (see :hg:`commit --close-branch`).
1090 1091
1091 1092 If -a/--active is specified, only show active branches. A branch
1092 1093 is considered active if it contains repository heads.
1093 1094
1094 1095 Use the command :hg:`update` to switch to an existing branch.
1095 1096
1096 1097 Returns 0.
1097 1098 """
1098 1099
1099 1100 fm = ui.formatter('branches', opts)
1100 1101 hexfunc = fm.hexfunc
1101 1102
1102 1103 allheads = set(repo.heads())
1103 1104 branches = []
1104 1105 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1105 1106 isactive = not isclosed and bool(set(heads) & allheads)
1106 1107 branches.append((tag, repo[tip], isactive, not isclosed))
1107 1108 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1108 1109 reverse=True)
1109 1110
1110 1111 for tag, ctx, isactive, isopen in branches:
1111 1112 if active and not isactive:
1112 1113 continue
1113 1114 if isactive:
1114 1115 label = 'branches.active'
1115 1116 notice = ''
1116 1117 elif not isopen:
1117 1118 if not closed:
1118 1119 continue
1119 1120 label = 'branches.closed'
1120 1121 notice = _(' (closed)')
1121 1122 else:
1122 1123 label = 'branches.inactive'
1123 1124 notice = _(' (inactive)')
1124 1125 current = (tag == repo.dirstate.branch())
1125 1126 if current:
1126 1127 label = 'branches.current'
1127 1128
1128 1129 fm.startitem()
1129 1130 fm.write('branch', '%s', tag, label=label)
1130 1131 rev = ctx.rev()
1131 1132 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1132 1133 fmt = ' ' * padsize + ' %d:%s'
1133 1134 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1134 1135 label='log.changeset changeset.%s' % ctx.phasestr())
1135 1136 fm.data(active=isactive, closed=not isopen, current=current)
1136 1137 if not ui.quiet:
1137 1138 fm.plain(notice)
1138 1139 fm.plain('\n')
1139 1140 fm.end()
1140 1141
1141 1142 @command('bundle',
1142 1143 [('f', 'force', None, _('run even when the destination is unrelated')),
1143 1144 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1144 1145 _('REV')),
1145 1146 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1146 1147 _('BRANCH')),
1147 1148 ('', 'base', [],
1148 1149 _('a base changeset assumed to be available at the destination'),
1149 1150 _('REV')),
1150 1151 ('a', 'all', None, _('bundle all changesets in the repository')),
1151 1152 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1152 1153 ] + remoteopts,
1153 1154 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1154 1155 def bundle(ui, repo, fname, dest=None, **opts):
1155 1156 """create a changegroup file
1156 1157
1157 1158 Generate a compressed changegroup file collecting changesets not
1158 1159 known to be in another repository.
1159 1160
1160 1161 If you omit the destination repository, then hg assumes the
1161 1162 destination will have all the nodes you specify with --base
1162 1163 parameters. To create a bundle containing all changesets, use
1163 1164 -a/--all (or --base null).
1164 1165
1165 1166 You can change compression method with the -t/--type option.
1166 1167 The available compression methods are: none, bzip2, and
1167 1168 gzip (by default, bundles are compressed using bzip2).
1168 1169
1169 1170 The bundle file can then be transferred using conventional means
1170 1171 and applied to another repository with the unbundle or pull
1171 1172 command. This is useful when direct push and pull are not
1172 1173 available or when exporting an entire repository is undesirable.
1173 1174
1174 1175 Applying bundles preserves all changeset contents including
1175 1176 permissions, copy/rename information, and revision history.
1176 1177
1177 1178 Returns 0 on success, 1 if no changes found.
1178 1179 """
1179 1180 revs = None
1180 1181 if 'rev' in opts:
1181 1182 revs = scmutil.revrange(repo, opts['rev'])
1182 1183
1183 1184 bundletype = opts.get('type', 'bzip2').lower()
1184 1185 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1185 1186 bundletype = btypes.get(bundletype)
1186 1187 if bundletype not in changegroup.bundletypes:
1187 1188 raise util.Abort(_('unknown bundle type specified with --type'))
1188 1189
1189 1190 if opts.get('all'):
1190 1191 base = ['null']
1191 1192 else:
1192 1193 base = scmutil.revrange(repo, opts.get('base'))
1193 1194 # TODO: get desired bundlecaps from command line.
1194 1195 bundlecaps = None
1195 1196 if base:
1196 1197 if dest:
1197 1198 raise util.Abort(_("--base is incompatible with specifying "
1198 1199 "a destination"))
1199 1200 common = [repo.lookup(rev) for rev in base]
1200 1201 heads = revs and map(repo.lookup, revs) or revs
1201 1202 cg = changegroup.getchangegroup(repo, 'bundle', heads=heads,
1202 1203 common=common, bundlecaps=bundlecaps)
1203 1204 outgoing = None
1204 1205 else:
1205 1206 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1206 1207 dest, branches = hg.parseurl(dest, opts.get('branch'))
1207 1208 other = hg.peer(repo, opts, dest)
1208 1209 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1209 1210 heads = revs and map(repo.lookup, revs) or revs
1210 1211 outgoing = discovery.findcommonoutgoing(repo, other,
1211 1212 onlyheads=heads,
1212 1213 force=opts.get('force'),
1213 1214 portable=True)
1214 1215 cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
1215 1216 bundlecaps)
1216 1217 if not cg:
1217 1218 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1218 1219 return 1
1219 1220
1220 1221 changegroup.writebundle(cg, fname, bundletype)
1221 1222
1222 1223 @command('cat',
1223 1224 [('o', 'output', '',
1224 1225 _('print output to file with formatted name'), _('FORMAT')),
1225 1226 ('r', 'rev', '', _('print the given revision'), _('REV')),
1226 1227 ('', 'decode', None, _('apply any matching decode filter')),
1227 1228 ] + walkopts,
1228 1229 _('[OPTION]... FILE...'),
1229 1230 inferrepo=True)
1230 1231 def cat(ui, repo, file1, *pats, **opts):
1231 1232 """output the current or given revision of files
1232 1233
1233 1234 Print the specified files as they were at the given revision. If
1234 1235 no revision is given, the parent of the working directory is used.
1235 1236
1236 1237 Output may be to a file, in which case the name of the file is
1237 1238 given using a format string. The formatting rules as follows:
1238 1239
1239 1240 :``%%``: literal "%" character
1240 1241 :``%s``: basename of file being printed
1241 1242 :``%d``: dirname of file being printed, or '.' if in repository root
1242 1243 :``%p``: root-relative path name of file being printed
1243 1244 :``%H``: changeset hash (40 hexadecimal digits)
1244 1245 :``%R``: changeset revision number
1245 1246 :``%h``: short-form changeset hash (12 hexadecimal digits)
1246 1247 :``%r``: zero-padded changeset revision number
1247 1248 :``%b``: basename of the exporting repository
1248 1249
1249 1250 Returns 0 on success.
1250 1251 """
1251 1252 ctx = scmutil.revsingle(repo, opts.get('rev'))
1252 1253 m = scmutil.match(ctx, (file1,) + pats, opts)
1253 1254
1254 1255 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1255 1256
1256 1257 @command('^clone',
1257 1258 [('U', 'noupdate', None,
1258 1259 _('the clone will include an empty working copy (only a repository)')),
1259 1260 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1260 1261 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1261 1262 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1262 1263 ('', 'pull', None, _('use pull protocol to copy metadata')),
1263 1264 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1264 1265 ] + remoteopts,
1265 1266 _('[OPTION]... SOURCE [DEST]'),
1266 1267 norepo=True)
1267 1268 def clone(ui, source, dest=None, **opts):
1268 1269 """make a copy of an existing repository
1269 1270
1270 1271 Create a copy of an existing repository in a new directory.
1271 1272
1272 1273 If no destination directory name is specified, it defaults to the
1273 1274 basename of the source.
1274 1275
1275 1276 The location of the source is added to the new repository's
1276 1277 ``.hg/hgrc`` file, as the default to be used for future pulls.
1277 1278
1278 1279 Only local paths and ``ssh://`` URLs are supported as
1279 1280 destinations. For ``ssh://`` destinations, no working directory or
1280 1281 ``.hg/hgrc`` will be created on the remote side.
1281 1282
1282 1283 To pull only a subset of changesets, specify one or more revisions
1283 1284 identifiers with -r/--rev or branches with -b/--branch. The
1284 1285 resulting clone will contain only the specified changesets and
1285 1286 their ancestors. These options (or 'clone src#rev dest') imply
1286 1287 --pull, even for local source repositories. Note that specifying a
1287 1288 tag will include the tagged changeset but not the changeset
1288 1289 containing the tag.
1289 1290
1290 1291 If the source repository has a bookmark called '@' set, that
1291 1292 revision will be checked out in the new repository by default.
1292 1293
1293 1294 To check out a particular version, use -u/--update, or
1294 1295 -U/--noupdate to create a clone with no working directory.
1295 1296
1296 1297 .. container:: verbose
1297 1298
1298 1299 For efficiency, hardlinks are used for cloning whenever the
1299 1300 source and destination are on the same filesystem (note this
1300 1301 applies only to the repository data, not to the working
1301 1302 directory). Some filesystems, such as AFS, implement hardlinking
1302 1303 incorrectly, but do not report errors. In these cases, use the
1303 1304 --pull option to avoid hardlinking.
1304 1305
1305 1306 In some cases, you can clone repositories and the working
1306 1307 directory using full hardlinks with ::
1307 1308
1308 1309 $ cp -al REPO REPOCLONE
1309 1310
1310 1311 This is the fastest way to clone, but it is not always safe. The
1311 1312 operation is not atomic (making sure REPO is not modified during
1312 1313 the operation is up to you) and you have to make sure your
1313 1314 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1314 1315 so). Also, this is not compatible with certain extensions that
1315 1316 place their metadata under the .hg directory, such as mq.
1316 1317
1317 1318 Mercurial will update the working directory to the first applicable
1318 1319 revision from this list:
1319 1320
1320 1321 a) null if -U or the source repository has no changesets
1321 1322 b) if -u . and the source repository is local, the first parent of
1322 1323 the source repository's working directory
1323 1324 c) the changeset specified with -u (if a branch name, this means the
1324 1325 latest head of that branch)
1325 1326 d) the changeset specified with -r
1326 1327 e) the tipmost head specified with -b
1327 1328 f) the tipmost head specified with the url#branch source syntax
1328 1329 g) the revision marked with the '@' bookmark, if present
1329 1330 h) the tipmost head of the default branch
1330 1331 i) tip
1331 1332
1332 1333 Examples:
1333 1334
1334 1335 - clone a remote repository to a new directory named hg/::
1335 1336
1336 1337 hg clone http://selenic.com/hg
1337 1338
1338 1339 - create a lightweight local clone::
1339 1340
1340 1341 hg clone project/ project-feature/
1341 1342
1342 1343 - clone from an absolute path on an ssh server (note double-slash)::
1343 1344
1344 1345 hg clone ssh://user@server//home/projects/alpha/
1345 1346
1346 1347 - do a high-speed clone over a LAN while checking out a
1347 1348 specified version::
1348 1349
1349 1350 hg clone --uncompressed http://server/repo -u 1.5
1350 1351
1351 1352 - create a repository without changesets after a particular revision::
1352 1353
1353 1354 hg clone -r 04e544 experimental/ good/
1354 1355
1355 1356 - clone (and track) a particular named branch::
1356 1357
1357 1358 hg clone http://selenic.com/hg#stable
1358 1359
1359 1360 See :hg:`help urls` for details on specifying URLs.
1360 1361
1361 1362 Returns 0 on success.
1362 1363 """
1363 1364 if opts.get('noupdate') and opts.get('updaterev'):
1364 1365 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1365 1366
1366 1367 r = hg.clone(ui, opts, source, dest,
1367 1368 pull=opts.get('pull'),
1368 1369 stream=opts.get('uncompressed'),
1369 1370 rev=opts.get('rev'),
1370 1371 update=opts.get('updaterev') or not opts.get('noupdate'),
1371 1372 branch=opts.get('branch'))
1372 1373
1373 1374 return r is None
1374 1375
1375 1376 @command('^commit|ci',
1376 1377 [('A', 'addremove', None,
1377 1378 _('mark new/missing files as added/removed before committing')),
1378 1379 ('', 'close-branch', None,
1379 1380 _('mark a branch as closed, hiding it from the branch list')),
1380 1381 ('', 'amend', None, _('amend the parent of the working dir')),
1381 1382 ('s', 'secret', None, _('use the secret phase for committing')),
1382 1383 ('e', 'edit', None, _('invoke editor on commit messages')),
1383 1384 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1384 1385 _('[OPTION]... [FILE]...'),
1385 1386 inferrepo=True)
1386 1387 def commit(ui, repo, *pats, **opts):
1387 1388 """commit the specified files or all outstanding changes
1388 1389
1389 1390 Commit changes to the given files into the repository. Unlike a
1390 1391 centralized SCM, this operation is a local operation. See
1391 1392 :hg:`push` for a way to actively distribute your changes.
1392 1393
1393 1394 If a list of files is omitted, all changes reported by :hg:`status`
1394 1395 will be committed.
1395 1396
1396 1397 If you are committing the result of a merge, do not provide any
1397 1398 filenames or -I/-X filters.
1398 1399
1399 1400 If no commit message is specified, Mercurial starts your
1400 1401 configured editor where you can enter a message. In case your
1401 1402 commit fails, you will find a backup of your message in
1402 1403 ``.hg/last-message.txt``.
1403 1404
1404 1405 The --amend flag can be used to amend the parent of the
1405 1406 working directory with a new commit that contains the changes
1406 1407 in the parent in addition to those currently reported by :hg:`status`,
1407 1408 if there are any. The old commit is stored in a backup bundle in
1408 1409 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1409 1410 on how to restore it).
1410 1411
1411 1412 Message, user and date are taken from the amended commit unless
1412 1413 specified. When a message isn't specified on the command line,
1413 1414 the editor will open with the message of the amended commit.
1414 1415
1415 1416 It is not possible to amend public changesets (see :hg:`help phases`)
1416 1417 or changesets that have children.
1417 1418
1418 1419 See :hg:`help dates` for a list of formats valid for -d/--date.
1419 1420
1420 1421 Returns 0 on success, 1 if nothing changed.
1421 1422 """
1422 1423 if opts.get('subrepos'):
1423 1424 if opts.get('amend'):
1424 1425 raise util.Abort(_('cannot amend with --subrepos'))
1425 1426 # Let --subrepos on the command line override config setting.
1426 1427 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1427 1428
1428 1429 cmdutil.checkunfinished(repo, commit=True)
1429 1430
1430 1431 branch = repo[None].branch()
1431 1432 bheads = repo.branchheads(branch)
1432 1433
1433 1434 extra = {}
1434 1435 if opts.get('close_branch'):
1435 1436 extra['close'] = 1
1436 1437
1437 1438 if not bheads:
1438 1439 raise util.Abort(_('can only close branch heads'))
1439 1440 elif opts.get('amend'):
1440 1441 if repo.parents()[0].p1().branch() != branch and \
1441 1442 repo.parents()[0].p2().branch() != branch:
1442 1443 raise util.Abort(_('can only close branch heads'))
1443 1444
1444 1445 if opts.get('amend'):
1445 1446 if ui.configbool('ui', 'commitsubrepos'):
1446 1447 raise util.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1447 1448
1448 1449 old = repo['.']
1449 1450 if not old.mutable():
1450 1451 raise util.Abort(_('cannot amend public changesets'))
1451 1452 if len(repo[None].parents()) > 1:
1452 1453 raise util.Abort(_('cannot amend while merging'))
1453 1454 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1454 1455 if not allowunstable and old.children():
1455 1456 raise util.Abort(_('cannot amend changeset with children'))
1456 1457
1457 1458 # commitfunc is used only for temporary amend commit by cmdutil.amend
1458 1459 def commitfunc(ui, repo, message, match, opts):
1459 1460 return repo.commit(message,
1460 1461 opts.get('user') or old.user(),
1461 1462 opts.get('date') or old.date(),
1462 1463 match,
1463 1464 extra=extra)
1464 1465
1465 1466 current = repo._bookmarkcurrent
1466 1467 marks = old.bookmarks()
1467 1468 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1468 1469 if node == old.node():
1469 1470 ui.status(_("nothing changed\n"))
1470 1471 return 1
1471 1472 elif marks:
1472 1473 ui.debug('moving bookmarks %r from %s to %s\n' %
1473 1474 (marks, old.hex(), hex(node)))
1474 1475 newmarks = repo._bookmarks
1475 1476 for bm in marks:
1476 1477 newmarks[bm] = node
1477 1478 if bm == current:
1478 1479 bookmarks.setcurrent(repo, bm)
1479 1480 newmarks.write()
1480 1481 else:
1481 1482 def commitfunc(ui, repo, message, match, opts):
1482 1483 backup = ui.backupconfig('phases', 'new-commit')
1483 1484 baseui = repo.baseui
1484 1485 basebackup = baseui.backupconfig('phases', 'new-commit')
1485 1486 try:
1486 1487 if opts.get('secret'):
1487 1488 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1488 1489 # Propagate to subrepos
1489 1490 baseui.setconfig('phases', 'new-commit', 'secret', 'commit')
1490 1491
1491 1492 editform = cmdutil.mergeeditform(repo[None], 'commit.normal')
1492 1493 editor = cmdutil.getcommiteditor(editform=editform, **opts)
1493 1494 return repo.commit(message, opts.get('user'), opts.get('date'),
1494 1495 match,
1495 1496 editor=editor,
1496 1497 extra=extra)
1497 1498 finally:
1498 1499 ui.restoreconfig(backup)
1499 1500 repo.baseui.restoreconfig(basebackup)
1500 1501
1501 1502
1502 1503 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1503 1504
1504 1505 if not node:
1505 1506 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1506 1507 if stat[3]:
1507 1508 ui.status(_("nothing changed (%d missing files, see "
1508 1509 "'hg status')\n") % len(stat[3]))
1509 1510 else:
1510 1511 ui.status(_("nothing changed\n"))
1511 1512 return 1
1512 1513
1513 1514 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1514 1515
1515 1516 @command('config|showconfig|debugconfig',
1516 1517 [('u', 'untrusted', None, _('show untrusted configuration options')),
1517 1518 ('e', 'edit', None, _('edit user config')),
1518 1519 ('l', 'local', None, _('edit repository config')),
1519 1520 ('g', 'global', None, _('edit global config'))],
1520 1521 _('[-u] [NAME]...'),
1521 1522 optionalrepo=True)
1522 1523 def config(ui, repo, *values, **opts):
1523 1524 """show combined config settings from all hgrc files
1524 1525
1525 1526 With no arguments, print names and values of all config items.
1526 1527
1527 1528 With one argument of the form section.name, print just the value
1528 1529 of that config item.
1529 1530
1530 1531 With multiple arguments, print names and values of all config
1531 1532 items with matching section names.
1532 1533
1533 1534 With --edit, start an editor on the user-level config file. With
1534 1535 --global, edit the system-wide config file. With --local, edit the
1535 1536 repository-level config file.
1536 1537
1537 1538 With --debug, the source (filename and line number) is printed
1538 1539 for each config item.
1539 1540
1540 1541 See :hg:`help config` for more information about config files.
1541 1542
1542 1543 Returns 0 on success, 1 if NAME does not exist.
1543 1544
1544 1545 """
1545 1546
1546 1547 if opts.get('edit') or opts.get('local') or opts.get('global'):
1547 1548 if opts.get('local') and opts.get('global'):
1548 1549 raise util.Abort(_("can't use --local and --global together"))
1549 1550
1550 1551 if opts.get('local'):
1551 1552 if not repo:
1552 1553 raise util.Abort(_("can't use --local outside a repository"))
1553 1554 paths = [repo.join('hgrc')]
1554 1555 elif opts.get('global'):
1555 1556 paths = scmutil.systemrcpath()
1556 1557 else:
1557 1558 paths = scmutil.userrcpath()
1558 1559
1559 1560 for f in paths:
1560 1561 if os.path.exists(f):
1561 1562 break
1562 1563 else:
1563 1564 if opts.get('global'):
1564 1565 samplehgrc = uimod.samplehgrcs['global']
1565 1566 elif opts.get('local'):
1566 1567 samplehgrc = uimod.samplehgrcs['local']
1567 1568 else:
1568 1569 samplehgrc = uimod.samplehgrcs['user']
1569 1570
1570 1571 f = paths[0]
1571 1572 fp = open(f, "w")
1572 1573 fp.write(samplehgrc)
1573 1574 fp.close()
1574 1575
1575 1576 editor = ui.geteditor()
1576 1577 ui.system("%s \"%s\"" % (editor, f),
1577 1578 onerr=util.Abort, errprefix=_("edit failed"))
1578 1579 return
1579 1580
1580 1581 for f in scmutil.rcpath():
1581 1582 ui.debug('read config from: %s\n' % f)
1582 1583 untrusted = bool(opts.get('untrusted'))
1583 1584 if values:
1584 1585 sections = [v for v in values if '.' not in v]
1585 1586 items = [v for v in values if '.' in v]
1586 1587 if len(items) > 1 or items and sections:
1587 1588 raise util.Abort(_('only one config item permitted'))
1588 1589 matched = False
1589 1590 for section, name, value in ui.walkconfig(untrusted=untrusted):
1590 1591 value = str(value).replace('\n', '\\n')
1591 1592 sectname = section + '.' + name
1592 1593 if values:
1593 1594 for v in values:
1594 1595 if v == section:
1595 1596 ui.debug('%s: ' %
1596 1597 ui.configsource(section, name, untrusted))
1597 1598 ui.write('%s=%s\n' % (sectname, value))
1598 1599 matched = True
1599 1600 elif v == sectname:
1600 1601 ui.debug('%s: ' %
1601 1602 ui.configsource(section, name, untrusted))
1602 1603 ui.write(value, '\n')
1603 1604 matched = True
1604 1605 else:
1605 1606 ui.debug('%s: ' %
1606 1607 ui.configsource(section, name, untrusted))
1607 1608 ui.write('%s=%s\n' % (sectname, value))
1608 1609 matched = True
1609 1610 if matched:
1610 1611 return 0
1611 1612 return 1
1612 1613
1613 1614 @command('copy|cp',
1614 1615 [('A', 'after', None, _('record a copy that has already occurred')),
1615 1616 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1616 1617 ] + walkopts + dryrunopts,
1617 1618 _('[OPTION]... [SOURCE]... DEST'))
1618 1619 def copy(ui, repo, *pats, **opts):
1619 1620 """mark files as copied for the next commit
1620 1621
1621 1622 Mark dest as having copies of source files. If dest is a
1622 1623 directory, copies are put in that directory. If dest is a file,
1623 1624 the source must be a single file.
1624 1625
1625 1626 By default, this command copies the contents of files as they
1626 1627 exist in the working directory. If invoked with -A/--after, the
1627 1628 operation is recorded, but no copying is performed.
1628 1629
1629 1630 This command takes effect with the next commit. To undo a copy
1630 1631 before that, see :hg:`revert`.
1631 1632
1632 1633 Returns 0 on success, 1 if errors are encountered.
1633 1634 """
1634 1635 wlock = repo.wlock(False)
1635 1636 try:
1636 1637 return cmdutil.copy(ui, repo, pats, opts)
1637 1638 finally:
1638 1639 wlock.release()
1639 1640
1640 1641 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
1641 1642 def debugancestor(ui, repo, *args):
1642 1643 """find the ancestor revision of two revisions in a given index"""
1643 1644 if len(args) == 3:
1644 1645 index, rev1, rev2 = args
1645 1646 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1646 1647 lookup = r.lookup
1647 1648 elif len(args) == 2:
1648 1649 if not repo:
1649 1650 raise util.Abort(_("there is no Mercurial repository here "
1650 1651 "(.hg not found)"))
1651 1652 rev1, rev2 = args
1652 1653 r = repo.changelog
1653 1654 lookup = repo.lookup
1654 1655 else:
1655 1656 raise util.Abort(_('either two or three arguments required'))
1656 1657 a = r.ancestor(lookup(rev1), lookup(rev2))
1657 1658 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1658 1659
1659 1660 @command('debugbuilddag',
1660 1661 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1661 1662 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1662 1663 ('n', 'new-file', None, _('add new file at each rev'))],
1663 1664 _('[OPTION]... [TEXT]'))
1664 1665 def debugbuilddag(ui, repo, text=None,
1665 1666 mergeable_file=False,
1666 1667 overwritten_file=False,
1667 1668 new_file=False):
1668 1669 """builds a repo with a given DAG from scratch in the current empty repo
1669 1670
1670 1671 The description of the DAG is read from stdin if not given on the
1671 1672 command line.
1672 1673
1673 1674 Elements:
1674 1675
1675 1676 - "+n" is a linear run of n nodes based on the current default parent
1676 1677 - "." is a single node based on the current default parent
1677 1678 - "$" resets the default parent to null (implied at the start);
1678 1679 otherwise the default parent is always the last node created
1679 1680 - "<p" sets the default parent to the backref p
1680 1681 - "*p" is a fork at parent p, which is a backref
1681 1682 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1682 1683 - "/p2" is a merge of the preceding node and p2
1683 1684 - ":tag" defines a local tag for the preceding node
1684 1685 - "@branch" sets the named branch for subsequent nodes
1685 1686 - "#...\\n" is a comment up to the end of the line
1686 1687
1687 1688 Whitespace between the above elements is ignored.
1688 1689
1689 1690 A backref is either
1690 1691
1691 1692 - a number n, which references the node curr-n, where curr is the current
1692 1693 node, or
1693 1694 - the name of a local tag you placed earlier using ":tag", or
1694 1695 - empty to denote the default parent.
1695 1696
1696 1697 All string valued-elements are either strictly alphanumeric, or must
1697 1698 be enclosed in double quotes ("..."), with "\\" as escape character.
1698 1699 """
1699 1700
1700 1701 if text is None:
1701 1702 ui.status(_("reading DAG from stdin\n"))
1702 1703 text = ui.fin.read()
1703 1704
1704 1705 cl = repo.changelog
1705 1706 if len(cl) > 0:
1706 1707 raise util.Abort(_('repository is not empty'))
1707 1708
1708 1709 # determine number of revs in DAG
1709 1710 total = 0
1710 1711 for type, data in dagparser.parsedag(text):
1711 1712 if type == 'n':
1712 1713 total += 1
1713 1714
1714 1715 if mergeable_file:
1715 1716 linesperrev = 2
1716 1717 # make a file with k lines per rev
1717 1718 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1718 1719 initialmergedlines.append("")
1719 1720
1720 1721 tags = []
1721 1722
1722 1723 lock = tr = None
1723 1724 try:
1724 1725 lock = repo.lock()
1725 1726 tr = repo.transaction("builddag")
1726 1727
1727 1728 at = -1
1728 1729 atbranch = 'default'
1729 1730 nodeids = []
1730 1731 id = 0
1731 1732 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1732 1733 for type, data in dagparser.parsedag(text):
1733 1734 if type == 'n':
1734 1735 ui.note(('node %s\n' % str(data)))
1735 1736 id, ps = data
1736 1737
1737 1738 files = []
1738 1739 fctxs = {}
1739 1740
1740 1741 p2 = None
1741 1742 if mergeable_file:
1742 1743 fn = "mf"
1743 1744 p1 = repo[ps[0]]
1744 1745 if len(ps) > 1:
1745 1746 p2 = repo[ps[1]]
1746 1747 pa = p1.ancestor(p2)
1747 1748 base, local, other = [x[fn].data() for x in (pa, p1,
1748 1749 p2)]
1749 1750 m3 = simplemerge.Merge3Text(base, local, other)
1750 1751 ml = [l.strip() for l in m3.merge_lines()]
1751 1752 ml.append("")
1752 1753 elif at > 0:
1753 1754 ml = p1[fn].data().split("\n")
1754 1755 else:
1755 1756 ml = initialmergedlines
1756 1757 ml[id * linesperrev] += " r%i" % id
1757 1758 mergedtext = "\n".join(ml)
1758 1759 files.append(fn)
1759 1760 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
1760 1761
1761 1762 if overwritten_file:
1762 1763 fn = "of"
1763 1764 files.append(fn)
1764 1765 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1765 1766
1766 1767 if new_file:
1767 1768 fn = "nf%i" % id
1768 1769 files.append(fn)
1769 1770 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1770 1771 if len(ps) > 1:
1771 1772 if not p2:
1772 1773 p2 = repo[ps[1]]
1773 1774 for fn in p2:
1774 1775 if fn.startswith("nf"):
1775 1776 files.append(fn)
1776 1777 fctxs[fn] = p2[fn]
1777 1778
1778 1779 def fctxfn(repo, cx, path):
1779 1780 return fctxs.get(path)
1780 1781
1781 1782 if len(ps) == 0 or ps[0] < 0:
1782 1783 pars = [None, None]
1783 1784 elif len(ps) == 1:
1784 1785 pars = [nodeids[ps[0]], None]
1785 1786 else:
1786 1787 pars = [nodeids[p] for p in ps]
1787 1788 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1788 1789 date=(id, 0),
1789 1790 user="debugbuilddag",
1790 1791 extra={'branch': atbranch})
1791 1792 nodeid = repo.commitctx(cx)
1792 1793 nodeids.append(nodeid)
1793 1794 at = id
1794 1795 elif type == 'l':
1795 1796 id, name = data
1796 1797 ui.note(('tag %s\n' % name))
1797 1798 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1798 1799 elif type == 'a':
1799 1800 ui.note(('branch %s\n' % data))
1800 1801 atbranch = data
1801 1802 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1802 1803 tr.close()
1803 1804
1804 1805 if tags:
1805 1806 repo.opener.write("localtags", "".join(tags))
1806 1807 finally:
1807 1808 ui.progress(_('building'), None)
1808 1809 release(tr, lock)
1809 1810
1810 1811 @command('debugbundle',
1811 1812 [('a', 'all', None, _('show all details'))],
1812 1813 _('FILE'),
1813 1814 norepo=True)
1814 1815 def debugbundle(ui, bundlepath, all=None, **opts):
1815 1816 """lists the contents of a bundle"""
1816 1817 f = hg.openpath(ui, bundlepath)
1817 1818 try:
1818 1819 gen = exchange.readbundle(ui, f, bundlepath)
1819 1820 if all:
1820 1821 ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
1821 1822
1822 1823 def showchunks(named):
1823 1824 ui.write("\n%s\n" % named)
1824 1825 chain = None
1825 1826 while True:
1826 1827 chunkdata = gen.deltachunk(chain)
1827 1828 if not chunkdata:
1828 1829 break
1829 1830 node = chunkdata['node']
1830 1831 p1 = chunkdata['p1']
1831 1832 p2 = chunkdata['p2']
1832 1833 cs = chunkdata['cs']
1833 1834 deltabase = chunkdata['deltabase']
1834 1835 delta = chunkdata['delta']
1835 1836 ui.write("%s %s %s %s %s %s\n" %
1836 1837 (hex(node), hex(p1), hex(p2),
1837 1838 hex(cs), hex(deltabase), len(delta)))
1838 1839 chain = node
1839 1840
1840 1841 chunkdata = gen.changelogheader()
1841 1842 showchunks("changelog")
1842 1843 chunkdata = gen.manifestheader()
1843 1844 showchunks("manifest")
1844 1845 while True:
1845 1846 chunkdata = gen.filelogheader()
1846 1847 if not chunkdata:
1847 1848 break
1848 1849 fname = chunkdata['filename']
1849 1850 showchunks(fname)
1850 1851 else:
1851 1852 chunkdata = gen.changelogheader()
1852 1853 chain = None
1853 1854 while True:
1854 1855 chunkdata = gen.deltachunk(chain)
1855 1856 if not chunkdata:
1856 1857 break
1857 1858 node = chunkdata['node']
1858 1859 ui.write("%s\n" % hex(node))
1859 1860 chain = node
1860 1861 finally:
1861 1862 f.close()
1862 1863
1863 1864 @command('debugcheckstate', [], '')
1864 1865 def debugcheckstate(ui, repo):
1865 1866 """validate the correctness of the current dirstate"""
1866 1867 parent1, parent2 = repo.dirstate.parents()
1867 1868 m1 = repo[parent1].manifest()
1868 1869 m2 = repo[parent2].manifest()
1869 1870 errors = 0
1870 1871 for f in repo.dirstate:
1871 1872 state = repo.dirstate[f]
1872 1873 if state in "nr" and f not in m1:
1873 1874 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1874 1875 errors += 1
1875 1876 if state in "a" and f in m1:
1876 1877 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1877 1878 errors += 1
1878 1879 if state in "m" and f not in m1 and f not in m2:
1879 1880 ui.warn(_("%s in state %s, but not in either manifest\n") %
1880 1881 (f, state))
1881 1882 errors += 1
1882 1883 for f in m1:
1883 1884 state = repo.dirstate[f]
1884 1885 if state not in "nrm":
1885 1886 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1886 1887 errors += 1
1887 1888 if errors:
1888 1889 error = _(".hg/dirstate inconsistent with current parent's manifest")
1889 1890 raise util.Abort(error)
1890 1891
1891 1892 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1892 1893 def debugcommands(ui, cmd='', *args):
1893 1894 """list all available commands and options"""
1894 1895 for cmd, vals in sorted(table.iteritems()):
1895 1896 cmd = cmd.split('|')[0].strip('^')
1896 1897 opts = ', '.join([i[1] for i in vals[1]])
1897 1898 ui.write('%s: %s\n' % (cmd, opts))
1898 1899
1899 1900 @command('debugcomplete',
1900 1901 [('o', 'options', None, _('show the command options'))],
1901 1902 _('[-o] CMD'),
1902 1903 norepo=True)
1903 1904 def debugcomplete(ui, cmd='', **opts):
1904 1905 """returns the completion list associated with the given command"""
1905 1906
1906 1907 if opts.get('options'):
1907 1908 options = []
1908 1909 otables = [globalopts]
1909 1910 if cmd:
1910 1911 aliases, entry = cmdutil.findcmd(cmd, table, False)
1911 1912 otables.append(entry[1])
1912 1913 for t in otables:
1913 1914 for o in t:
1914 1915 if "(DEPRECATED)" in o[3]:
1915 1916 continue
1916 1917 if o[0]:
1917 1918 options.append('-%s' % o[0])
1918 1919 options.append('--%s' % o[1])
1919 1920 ui.write("%s\n" % "\n".join(options))
1920 1921 return
1921 1922
1922 1923 cmdlist = cmdutil.findpossible(cmd, table)
1923 1924 if ui.verbose:
1924 1925 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1925 1926 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1926 1927
1927 1928 @command('debugdag',
1928 1929 [('t', 'tags', None, _('use tags as labels')),
1929 1930 ('b', 'branches', None, _('annotate with branch names')),
1930 1931 ('', 'dots', None, _('use dots for runs')),
1931 1932 ('s', 'spaces', None, _('separate elements by spaces'))],
1932 1933 _('[OPTION]... [FILE [REV]...]'),
1933 1934 optionalrepo=True)
1934 1935 def debugdag(ui, repo, file_=None, *revs, **opts):
1935 1936 """format the changelog or an index DAG as a concise textual description
1936 1937
1937 1938 If you pass a revlog index, the revlog's DAG is emitted. If you list
1938 1939 revision numbers, they get labeled in the output as rN.
1939 1940
1940 1941 Otherwise, the changelog DAG of the current repo is emitted.
1941 1942 """
1942 1943 spaces = opts.get('spaces')
1943 1944 dots = opts.get('dots')
1944 1945 if file_:
1945 1946 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1946 1947 revs = set((int(r) for r in revs))
1947 1948 def events():
1948 1949 for r in rlog:
1949 1950 yield 'n', (r, list(p for p in rlog.parentrevs(r)
1950 1951 if p != -1))
1951 1952 if r in revs:
1952 1953 yield 'l', (r, "r%i" % r)
1953 1954 elif repo:
1954 1955 cl = repo.changelog
1955 1956 tags = opts.get('tags')
1956 1957 branches = opts.get('branches')
1957 1958 if tags:
1958 1959 labels = {}
1959 1960 for l, n in repo.tags().items():
1960 1961 labels.setdefault(cl.rev(n), []).append(l)
1961 1962 def events():
1962 1963 b = "default"
1963 1964 for r in cl:
1964 1965 if branches:
1965 1966 newb = cl.read(cl.node(r))[5]['branch']
1966 1967 if newb != b:
1967 1968 yield 'a', newb
1968 1969 b = newb
1969 1970 yield 'n', (r, list(p for p in cl.parentrevs(r)
1970 1971 if p != -1))
1971 1972 if tags:
1972 1973 ls = labels.get(r)
1973 1974 if ls:
1974 1975 for l in ls:
1975 1976 yield 'l', (r, l)
1976 1977 else:
1977 1978 raise util.Abort(_('need repo for changelog dag'))
1978 1979
1979 1980 for line in dagparser.dagtextlines(events(),
1980 1981 addspaces=spaces,
1981 1982 wraplabels=True,
1982 1983 wrapannotations=True,
1983 1984 wrapnonlinear=dots,
1984 1985 usedots=dots,
1985 1986 maxlinewidth=70):
1986 1987 ui.write(line)
1987 1988 ui.write("\n")
1988 1989
1989 1990 @command('debugdata',
1990 1991 [('c', 'changelog', False, _('open changelog')),
1991 1992 ('m', 'manifest', False, _('open manifest'))],
1992 1993 _('-c|-m|FILE REV'))
1993 1994 def debugdata(ui, repo, file_, rev=None, **opts):
1994 1995 """dump the contents of a data file revision"""
1995 1996 if opts.get('changelog') or opts.get('manifest'):
1996 1997 file_, rev = None, file_
1997 1998 elif rev is None:
1998 1999 raise error.CommandError('debugdata', _('invalid arguments'))
1999 2000 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
2000 2001 try:
2001 2002 ui.write(r.revision(r.lookup(rev)))
2002 2003 except KeyError:
2003 2004 raise util.Abort(_('invalid revision identifier %s') % rev)
2004 2005
2005 2006 @command('debugdate',
2006 2007 [('e', 'extended', None, _('try extended date formats'))],
2007 2008 _('[-e] DATE [RANGE]'),
2008 2009 norepo=True, optionalrepo=True)
2009 2010 def debugdate(ui, date, range=None, **opts):
2010 2011 """parse and display a date"""
2011 2012 if opts["extended"]:
2012 2013 d = util.parsedate(date, util.extendeddateformats)
2013 2014 else:
2014 2015 d = util.parsedate(date)
2015 2016 ui.write(("internal: %s %s\n") % d)
2016 2017 ui.write(("standard: %s\n") % util.datestr(d))
2017 2018 if range:
2018 2019 m = util.matchdate(range)
2019 2020 ui.write(("match: %s\n") % m(d[0]))
2020 2021
2021 2022 @command('debugdiscovery',
2022 2023 [('', 'old', None, _('use old-style discovery')),
2023 2024 ('', 'nonheads', None,
2024 2025 _('use old-style discovery with non-heads included')),
2025 2026 ] + remoteopts,
2026 2027 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
2027 2028 def debugdiscovery(ui, repo, remoteurl="default", **opts):
2028 2029 """runs the changeset discovery protocol in isolation"""
2029 2030 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
2030 2031 opts.get('branch'))
2031 2032 remote = hg.peer(repo, opts, remoteurl)
2032 2033 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
2033 2034
2034 2035 # make sure tests are repeatable
2035 2036 random.seed(12323)
2036 2037
2037 2038 def doit(localheads, remoteheads, remote=remote):
2038 2039 if opts.get('old'):
2039 2040 if localheads:
2040 2041 raise util.Abort('cannot use localheads with old style '
2041 2042 'discovery')
2042 2043 if not util.safehasattr(remote, 'branches'):
2043 2044 # enable in-client legacy support
2044 2045 remote = localrepo.locallegacypeer(remote.local())
2045 2046 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
2046 2047 force=True)
2047 2048 common = set(common)
2048 2049 if not opts.get('nonheads'):
2049 2050 ui.write(("unpruned common: %s\n") %
2050 2051 " ".join(sorted(short(n) for n in common)))
2051 2052 dag = dagutil.revlogdag(repo.changelog)
2052 2053 all = dag.ancestorset(dag.internalizeall(common))
2053 2054 common = dag.externalizeall(dag.headsetofconnecteds(all))
2054 2055 else:
2055 2056 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
2056 2057 common = set(common)
2057 2058 rheads = set(hds)
2058 2059 lheads = set(repo.heads())
2059 2060 ui.write(("common heads: %s\n") %
2060 2061 " ".join(sorted(short(n) for n in common)))
2061 2062 if lheads <= common:
2062 2063 ui.write(("local is subset\n"))
2063 2064 elif rheads <= common:
2064 2065 ui.write(("remote is subset\n"))
2065 2066
2066 2067 serverlogs = opts.get('serverlog')
2067 2068 if serverlogs:
2068 2069 for filename in serverlogs:
2069 2070 logfile = open(filename, 'r')
2070 2071 try:
2071 2072 line = logfile.readline()
2072 2073 while line:
2073 2074 parts = line.strip().split(';')
2074 2075 op = parts[1]
2075 2076 if op == 'cg':
2076 2077 pass
2077 2078 elif op == 'cgss':
2078 2079 doit(parts[2].split(' '), parts[3].split(' '))
2079 2080 elif op == 'unb':
2080 2081 doit(parts[3].split(' '), parts[2].split(' '))
2081 2082 line = logfile.readline()
2082 2083 finally:
2083 2084 logfile.close()
2084 2085
2085 2086 else:
2086 2087 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2087 2088 opts.get('remote_head'))
2088 2089 localrevs = opts.get('local_head')
2089 2090 doit(localrevs, remoterevs)
2090 2091
2091 2092 @command('debugfileset',
2092 2093 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2093 2094 _('[-r REV] FILESPEC'))
2094 2095 def debugfileset(ui, repo, expr, **opts):
2095 2096 '''parse and apply a fileset specification'''
2096 2097 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2097 2098 if ui.verbose:
2098 2099 tree = fileset.parse(expr)[0]
2099 2100 ui.note(tree, "\n")
2100 2101
2101 2102 for f in ctx.getfileset(expr):
2102 2103 ui.write("%s\n" % f)
2103 2104
2104 2105 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
2105 2106 def debugfsinfo(ui, path="."):
2106 2107 """show information detected about current filesystem"""
2107 2108 util.writefile('.debugfsinfo', '')
2108 2109 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2109 2110 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2110 2111 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2111 2112 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2112 2113 and 'yes' or 'no'))
2113 2114 os.unlink('.debugfsinfo')
2114 2115
2115 2116 @command('debuggetbundle',
2116 2117 [('H', 'head', [], _('id of head node'), _('ID')),
2117 2118 ('C', 'common', [], _('id of common node'), _('ID')),
2118 2119 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2119 2120 _('REPO FILE [-H|-C ID]...'),
2120 2121 norepo=True)
2121 2122 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2122 2123 """retrieves a bundle from a repo
2123 2124
2124 2125 Every ID must be a full-length hex node id string. Saves the bundle to the
2125 2126 given file.
2126 2127 """
2127 2128 repo = hg.peer(ui, opts, repopath)
2128 2129 if not repo.capable('getbundle'):
2129 2130 raise util.Abort("getbundle() not supported by target repository")
2130 2131 args = {}
2131 2132 if common:
2132 2133 args['common'] = [bin(s) for s in common]
2133 2134 if head:
2134 2135 args['heads'] = [bin(s) for s in head]
2135 2136 # TODO: get desired bundlecaps from command line.
2136 2137 args['bundlecaps'] = None
2137 2138 bundle = repo.getbundle('debug', **args)
2138 2139
2139 2140 bundletype = opts.get('type', 'bzip2').lower()
2140 2141 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
2141 2142 bundletype = btypes.get(bundletype)
2142 2143 if bundletype not in changegroup.bundletypes:
2143 2144 raise util.Abort(_('unknown bundle type specified with --type'))
2144 2145 changegroup.writebundle(bundle, bundlepath, bundletype)
2145 2146
2146 2147 @command('debugignore', [], '')
2147 2148 def debugignore(ui, repo, *values, **opts):
2148 2149 """display the combined ignore pattern"""
2149 2150 ignore = repo.dirstate._ignore
2150 2151 includepat = getattr(ignore, 'includepat', None)
2151 2152 if includepat is not None:
2152 2153 ui.write("%s\n" % includepat)
2153 2154 else:
2154 2155 raise util.Abort(_("no ignore patterns found"))
2155 2156
2156 2157 @command('debugindex',
2157 2158 [('c', 'changelog', False, _('open changelog')),
2158 2159 ('m', 'manifest', False, _('open manifest')),
2159 2160 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2160 2161 _('[-f FORMAT] -c|-m|FILE'),
2161 2162 optionalrepo=True)
2162 2163 def debugindex(ui, repo, file_=None, **opts):
2163 2164 """dump the contents of an index file"""
2164 2165 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2165 2166 format = opts.get('format', 0)
2166 2167 if format not in (0, 1):
2167 2168 raise util.Abort(_("unknown format %d") % format)
2168 2169
2169 2170 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2170 2171 if generaldelta:
2171 2172 basehdr = ' delta'
2172 2173 else:
2173 2174 basehdr = ' base'
2174 2175
2175 2176 if format == 0:
2176 2177 ui.write(" rev offset length " + basehdr + " linkrev"
2177 2178 " nodeid p1 p2\n")
2178 2179 elif format == 1:
2179 2180 ui.write(" rev flag offset length"
2180 2181 " size " + basehdr + " link p1 p2"
2181 2182 " nodeid\n")
2182 2183
2183 2184 for i in r:
2184 2185 node = r.node(i)
2185 2186 if generaldelta:
2186 2187 base = r.deltaparent(i)
2187 2188 else:
2188 2189 base = r.chainbase(i)
2189 2190 if format == 0:
2190 2191 try:
2191 2192 pp = r.parents(node)
2192 2193 except Exception:
2193 2194 pp = [nullid, nullid]
2194 2195 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2195 2196 i, r.start(i), r.length(i), base, r.linkrev(i),
2196 2197 short(node), short(pp[0]), short(pp[1])))
2197 2198 elif format == 1:
2198 2199 pr = r.parentrevs(i)
2199 2200 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2200 2201 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2201 2202 base, r.linkrev(i), pr[0], pr[1], short(node)))
2202 2203
2203 2204 @command('debugindexdot', [], _('FILE'), optionalrepo=True)
2204 2205 def debugindexdot(ui, repo, file_):
2205 2206 """dump an index DAG as a graphviz dot file"""
2206 2207 r = None
2207 2208 if repo:
2208 2209 filelog = repo.file(file_)
2209 2210 if len(filelog):
2210 2211 r = filelog
2211 2212 if not r:
2212 2213 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2213 2214 ui.write(("digraph G {\n"))
2214 2215 for i in r:
2215 2216 node = r.node(i)
2216 2217 pp = r.parents(node)
2217 2218 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2218 2219 if pp[1] != nullid:
2219 2220 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2220 2221 ui.write("}\n")
2221 2222
2222 2223 @command('debuginstall', [], '', norepo=True)
2223 2224 def debuginstall(ui):
2224 2225 '''test Mercurial installation
2225 2226
2226 2227 Returns 0 on success.
2227 2228 '''
2228 2229
2229 2230 def writetemp(contents):
2230 2231 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2231 2232 f = os.fdopen(fd, "wb")
2232 2233 f.write(contents)
2233 2234 f.close()
2234 2235 return name
2235 2236
2236 2237 problems = 0
2237 2238
2238 2239 # encoding
2239 2240 ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
2240 2241 try:
2241 2242 encoding.fromlocal("test")
2242 2243 except util.Abort, inst:
2243 2244 ui.write(" %s\n" % inst)
2244 2245 ui.write(_(" (check that your locale is properly set)\n"))
2245 2246 problems += 1
2246 2247
2247 2248 # Python
2248 2249 ui.status(_("checking Python executable (%s)\n") % sys.executable)
2249 2250 ui.status(_("checking Python version (%s)\n")
2250 2251 % ("%s.%s.%s" % sys.version_info[:3]))
2251 2252 ui.status(_("checking Python lib (%s)...\n")
2252 2253 % os.path.dirname(os.__file__))
2253 2254
2254 2255 # compiled modules
2255 2256 ui.status(_("checking installed modules (%s)...\n")
2256 2257 % os.path.dirname(__file__))
2257 2258 try:
2258 2259 import bdiff, mpatch, base85, osutil
2259 2260 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2260 2261 except Exception, inst:
2261 2262 ui.write(" %s\n" % inst)
2262 2263 ui.write(_(" One or more extensions could not be found"))
2263 2264 ui.write(_(" (check that you compiled the extensions)\n"))
2264 2265 problems += 1
2265 2266
2266 2267 # templates
2267 2268 import templater
2268 2269 p = templater.templatepaths()
2269 2270 ui.status(_("checking templates (%s)...\n") % ' '.join(p))
2270 2271 if p:
2271 2272 m = templater.templatepath("map-cmdline.default")
2272 2273 if m:
2273 2274 # template found, check if it is working
2274 2275 try:
2275 2276 templater.templater(m)
2276 2277 except Exception, inst:
2277 2278 ui.write(" %s\n" % inst)
2278 2279 p = None
2279 2280 else:
2280 2281 ui.write(_(" template 'default' not found\n"))
2281 2282 p = None
2282 2283 else:
2283 2284 ui.write(_(" no template directories found\n"))
2284 2285 if not p:
2285 2286 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
2286 2287 problems += 1
2287 2288
2288 2289 # editor
2289 2290 ui.status(_("checking commit editor...\n"))
2290 2291 editor = ui.geteditor()
2291 2292 cmdpath = util.findexe(shlex.split(editor)[0])
2292 2293 if not cmdpath:
2293 2294 if editor == 'vi':
2294 2295 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2295 2296 ui.write(_(" (specify a commit editor in your configuration"
2296 2297 " file)\n"))
2297 2298 else:
2298 2299 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2299 2300 ui.write(_(" (specify a commit editor in your configuration"
2300 2301 " file)\n"))
2301 2302 problems += 1
2302 2303
2303 2304 # check username
2304 2305 ui.status(_("checking username...\n"))
2305 2306 try:
2306 2307 ui.username()
2307 2308 except util.Abort, e:
2308 2309 ui.write(" %s\n" % e)
2309 2310 ui.write(_(" (specify a username in your configuration file)\n"))
2310 2311 problems += 1
2311 2312
2312 2313 if not problems:
2313 2314 ui.status(_("no problems detected\n"))
2314 2315 else:
2315 2316 ui.write(_("%s problems detected,"
2316 2317 " please check your install!\n") % problems)
2317 2318
2318 2319 return problems
2319 2320
2320 2321 @command('debugknown', [], _('REPO ID...'), norepo=True)
2321 2322 def debugknown(ui, repopath, *ids, **opts):
2322 2323 """test whether node ids are known to a repo
2323 2324
2324 2325 Every ID must be a full-length hex node id string. Returns a list of 0s
2325 2326 and 1s indicating unknown/known.
2326 2327 """
2327 2328 repo = hg.peer(ui, opts, repopath)
2328 2329 if not repo.capable('known'):
2329 2330 raise util.Abort("known() not supported by target repository")
2330 2331 flags = repo.known([bin(s) for s in ids])
2331 2332 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2332 2333
2333 2334 @command('debuglabelcomplete', [], _('LABEL...'))
2334 2335 def debuglabelcomplete(ui, repo, *args):
2335 2336 '''complete "labels" - tags, open branch names, bookmark names'''
2336 2337
2337 2338 labels = set()
2338 2339 labels.update(t[0] for t in repo.tagslist())
2339 2340 labels.update(repo._bookmarks.keys())
2340 2341 labels.update(tag for (tag, heads, tip, closed)
2341 2342 in repo.branchmap().iterbranches() if not closed)
2342 2343 completions = set()
2343 2344 if not args:
2344 2345 args = ['']
2345 2346 for a in args:
2346 2347 completions.update(l for l in labels if l.startswith(a))
2347 2348 ui.write('\n'.join(sorted(completions)))
2348 2349 ui.write('\n')
2349 2350
2350 2351 @command('debuglocks',
2351 2352 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
2352 2353 ('W', 'force-wlock', None,
2353 2354 _('free the working state lock (DANGEROUS)'))],
2354 2355 _('[OPTION]...'))
2355 2356 def debuglocks(ui, repo, **opts):
2356 2357 """show or modify state of locks
2357 2358
2358 2359 By default, this command will show which locks are held. This
2359 2360 includes the user and process holding the lock, the amount of time
2360 2361 the lock has been held, and the machine name where the process is
2361 2362 running if it's not local.
2362 2363
2363 2364 Locks protect the integrity of Mercurial's data, so should be
2364 2365 treated with care. System crashes or other interruptions may cause
2365 2366 locks to not be properly released, though Mercurial will usually
2366 2367 detect and remove such stale locks automatically.
2367 2368
2368 2369 However, detecting stale locks may not always be possible (for
2369 2370 instance, on a shared filesystem). Removing locks may also be
2370 2371 blocked by filesystem permissions.
2371 2372
2372 2373 Returns 0 if no locks are held.
2373 2374
2374 2375 """
2375 2376
2376 2377 if opts.get('force_lock'):
2377 2378 repo.svfs.unlink('lock')
2378 2379 if opts.get('force_wlock'):
2379 2380 repo.vfs.unlink('wlock')
2380 2381 if opts.get('force_lock') or opts.get('force_lock'):
2381 2382 return 0
2382 2383
2383 2384 now = time.time()
2384 2385 held = 0
2385 2386
2386 2387 def report(vfs, name, method):
2387 2388 # this causes stale locks to get reaped for more accurate reporting
2388 2389 try:
2389 2390 l = method(False)
2390 2391 except error.LockHeld:
2391 2392 l = None
2392 2393
2393 2394 if l:
2394 2395 l.release()
2395 2396 else:
2396 2397 try:
2397 2398 stat = repo.svfs.lstat(name)
2398 2399 age = now - stat.st_mtime
2399 2400 user = util.username(stat.st_uid)
2400 2401 locker = vfs.readlock(name)
2401 2402 if ":" in locker:
2402 2403 host, pid = locker.split(':')
2403 2404 if host == socket.gethostname():
2404 2405 locker = 'user %s, process %s' % (user, pid)
2405 2406 else:
2406 2407 locker = 'user %s, process %s, host %s' \
2407 2408 % (user, pid, host)
2408 2409 ui.write("%-6s %s (%ds)\n" % (name + ":", locker, age))
2409 2410 return 1
2410 2411 except OSError, e:
2411 2412 if e.errno != errno.ENOENT:
2412 2413 raise
2413 2414
2414 2415 ui.write("%-6s free\n" % (name + ":"))
2415 2416 return 0
2416 2417
2417 2418 held += report(repo.svfs, "lock", repo.lock)
2418 2419 held += report(repo.vfs, "wlock", repo.wlock)
2419 2420
2420 2421 return held
2421 2422
2422 2423 @command('debugobsolete',
2423 2424 [('', 'flags', 0, _('markers flag')),
2424 2425 ('', 'record-parents', False,
2425 2426 _('record parent information for the precursor')),
2426 2427 ('r', 'rev', [], _('display markers relevant to REV')),
2427 2428 ] + commitopts2,
2428 2429 _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
2429 2430 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2430 2431 """create arbitrary obsolete marker
2431 2432
2432 2433 With no arguments, displays the list of obsolescence markers."""
2433 2434
2434 2435 def parsenodeid(s):
2435 2436 try:
2436 2437 # We do not use revsingle/revrange functions here to accept
2437 2438 # arbitrary node identifiers, possibly not present in the
2438 2439 # local repository.
2439 2440 n = bin(s)
2440 2441 if len(n) != len(nullid):
2441 2442 raise TypeError()
2442 2443 return n
2443 2444 except TypeError:
2444 2445 raise util.Abort('changeset references must be full hexadecimal '
2445 2446 'node identifiers')
2446 2447
2447 2448 if precursor is not None:
2448 2449 if opts['rev']:
2449 2450 raise util.Abort('cannot select revision when creating marker')
2450 2451 metadata = {}
2451 2452 metadata['user'] = opts['user'] or ui.username()
2452 2453 succs = tuple(parsenodeid(succ) for succ in successors)
2453 2454 l = repo.lock()
2454 2455 try:
2455 2456 tr = repo.transaction('debugobsolete')
2456 2457 try:
2457 2458 try:
2458 2459 date = opts.get('date')
2459 2460 if date:
2460 2461 date = util.parsedate(date)
2461 2462 else:
2462 2463 date = None
2463 2464 prec = parsenodeid(precursor)
2464 2465 parents = None
2465 2466 if opts['record_parents']:
2466 2467 if prec not in repo.unfiltered():
2467 2468 raise util.Abort('cannot used --record-parents on '
2468 2469 'unknown changesets')
2469 2470 parents = repo.unfiltered()[prec].parents()
2470 2471 parents = tuple(p.node() for p in parents)
2471 2472 repo.obsstore.create(tr, prec, succs, opts['flags'],
2472 2473 parents=parents, date=date,
2473 2474 metadata=metadata)
2474 2475 tr.close()
2475 2476 except ValueError, exc:
2476 2477 raise util.Abort(_('bad obsmarker input: %s') % exc)
2477 2478 finally:
2478 2479 tr.release()
2479 2480 finally:
2480 2481 l.release()
2481 2482 else:
2482 2483 if opts['rev']:
2483 2484 revs = scmutil.revrange(repo, opts['rev'])
2484 2485 nodes = [repo[r].node() for r in revs]
2485 2486 markers = list(obsolete.getmarkers(repo, nodes=nodes))
2486 2487 markers.sort(key=lambda x: x._data)
2487 2488 else:
2488 2489 markers = obsolete.getmarkers(repo)
2489 2490
2490 2491 for m in markers:
2491 2492 cmdutil.showmarker(ui, m)
2492 2493
2493 2494 @command('debugpathcomplete',
2494 2495 [('f', 'full', None, _('complete an entire path')),
2495 2496 ('n', 'normal', None, _('show only normal files')),
2496 2497 ('a', 'added', None, _('show only added files')),
2497 2498 ('r', 'removed', None, _('show only removed files'))],
2498 2499 _('FILESPEC...'))
2499 2500 def debugpathcomplete(ui, repo, *specs, **opts):
2500 2501 '''complete part or all of a tracked path
2501 2502
2502 2503 This command supports shells that offer path name completion. It
2503 2504 currently completes only files already known to the dirstate.
2504 2505
2505 2506 Completion extends only to the next path segment unless
2506 2507 --full is specified, in which case entire paths are used.'''
2507 2508
2508 2509 def complete(path, acceptable):
2509 2510 dirstate = repo.dirstate
2510 2511 spec = os.path.normpath(os.path.join(os.getcwd(), path))
2511 2512 rootdir = repo.root + os.sep
2512 2513 if spec != repo.root and not spec.startswith(rootdir):
2513 2514 return [], []
2514 2515 if os.path.isdir(spec):
2515 2516 spec += '/'
2516 2517 spec = spec[len(rootdir):]
2517 2518 fixpaths = os.sep != '/'
2518 2519 if fixpaths:
2519 2520 spec = spec.replace(os.sep, '/')
2520 2521 speclen = len(spec)
2521 2522 fullpaths = opts['full']
2522 2523 files, dirs = set(), set()
2523 2524 adddir, addfile = dirs.add, files.add
2524 2525 for f, st in dirstate.iteritems():
2525 2526 if f.startswith(spec) and st[0] in acceptable:
2526 2527 if fixpaths:
2527 2528 f = f.replace('/', os.sep)
2528 2529 if fullpaths:
2529 2530 addfile(f)
2530 2531 continue
2531 2532 s = f.find(os.sep, speclen)
2532 2533 if s >= 0:
2533 2534 adddir(f[:s])
2534 2535 else:
2535 2536 addfile(f)
2536 2537 return files, dirs
2537 2538
2538 2539 acceptable = ''
2539 2540 if opts['normal']:
2540 2541 acceptable += 'nm'
2541 2542 if opts['added']:
2542 2543 acceptable += 'a'
2543 2544 if opts['removed']:
2544 2545 acceptable += 'r'
2545 2546 cwd = repo.getcwd()
2546 2547 if not specs:
2547 2548 specs = ['.']
2548 2549
2549 2550 files, dirs = set(), set()
2550 2551 for spec in specs:
2551 2552 f, d = complete(spec, acceptable or 'nmar')
2552 2553 files.update(f)
2553 2554 dirs.update(d)
2554 2555 files.update(dirs)
2555 2556 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2556 2557 ui.write('\n')
2557 2558
2558 2559 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2559 2560 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2560 2561 '''access the pushkey key/value protocol
2561 2562
2562 2563 With two args, list the keys in the given namespace.
2563 2564
2564 2565 With five args, set a key to new if it currently is set to old.
2565 2566 Reports success or failure.
2566 2567 '''
2567 2568
2568 2569 target = hg.peer(ui, {}, repopath)
2569 2570 if keyinfo:
2570 2571 key, old, new = keyinfo
2571 2572 r = target.pushkey(namespace, key, old, new)
2572 2573 ui.status(str(r) + '\n')
2573 2574 return not r
2574 2575 else:
2575 2576 for k, v in sorted(target.listkeys(namespace).iteritems()):
2576 2577 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2577 2578 v.encode('string-escape')))
2578 2579
2579 2580 @command('debugpvec', [], _('A B'))
2580 2581 def debugpvec(ui, repo, a, b=None):
2581 2582 ca = scmutil.revsingle(repo, a)
2582 2583 cb = scmutil.revsingle(repo, b)
2583 2584 pa = pvec.ctxpvec(ca)
2584 2585 pb = pvec.ctxpvec(cb)
2585 2586 if pa == pb:
2586 2587 rel = "="
2587 2588 elif pa > pb:
2588 2589 rel = ">"
2589 2590 elif pa < pb:
2590 2591 rel = "<"
2591 2592 elif pa | pb:
2592 2593 rel = "|"
2593 2594 ui.write(_("a: %s\n") % pa)
2594 2595 ui.write(_("b: %s\n") % pb)
2595 2596 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2596 2597 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2597 2598 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2598 2599 pa.distance(pb), rel))
2599 2600
2600 2601 @command('debugrebuilddirstate|debugrebuildstate',
2601 2602 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2602 2603 _('[-r REV]'))
2603 2604 def debugrebuilddirstate(ui, repo, rev):
2604 2605 """rebuild the dirstate as it would look like for the given revision
2605 2606
2606 2607 If no revision is specified the first current parent will be used.
2607 2608
2608 2609 The dirstate will be set to the files of the given revision.
2609 2610 The actual working directory content or existing dirstate
2610 2611 information such as adds or removes is not considered.
2611 2612
2612 2613 One use of this command is to make the next :hg:`status` invocation
2613 2614 check the actual file content.
2614 2615 """
2615 2616 ctx = scmutil.revsingle(repo, rev)
2616 2617 wlock = repo.wlock()
2617 2618 try:
2618 2619 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2619 2620 finally:
2620 2621 wlock.release()
2621 2622
2622 2623 @command('debugrename',
2623 2624 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2624 2625 _('[-r REV] FILE'))
2625 2626 def debugrename(ui, repo, file1, *pats, **opts):
2626 2627 """dump rename information"""
2627 2628
2628 2629 ctx = scmutil.revsingle(repo, opts.get('rev'))
2629 2630 m = scmutil.match(ctx, (file1,) + pats, opts)
2630 2631 for abs in ctx.walk(m):
2631 2632 fctx = ctx[abs]
2632 2633 o = fctx.filelog().renamed(fctx.filenode())
2633 2634 rel = m.rel(abs)
2634 2635 if o:
2635 2636 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2636 2637 else:
2637 2638 ui.write(_("%s not renamed\n") % rel)
2638 2639
2639 2640 @command('debugrevlog',
2640 2641 [('c', 'changelog', False, _('open changelog')),
2641 2642 ('m', 'manifest', False, _('open manifest')),
2642 2643 ('d', 'dump', False, _('dump index data'))],
2643 2644 _('-c|-m|FILE'),
2644 2645 optionalrepo=True)
2645 2646 def debugrevlog(ui, repo, file_=None, **opts):
2646 2647 """show data and statistics about a revlog"""
2647 2648 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2648 2649
2649 2650 if opts.get("dump"):
2650 2651 numrevs = len(r)
2651 2652 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2652 2653 " rawsize totalsize compression heads chainlen\n")
2653 2654 ts = 0
2654 2655 heads = set()
2655 2656
2656 2657 for rev in xrange(numrevs):
2657 2658 dbase = r.deltaparent(rev)
2658 2659 if dbase == -1:
2659 2660 dbase = rev
2660 2661 cbase = r.chainbase(rev)
2661 2662 clen = r.chainlen(rev)
2662 2663 p1, p2 = r.parentrevs(rev)
2663 2664 rs = r.rawsize(rev)
2664 2665 ts = ts + rs
2665 2666 heads -= set(r.parentrevs(rev))
2666 2667 heads.add(rev)
2667 2668 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2668 2669 "%11d %5d %8d\n" %
2669 2670 (rev, p1, p2, r.start(rev), r.end(rev),
2670 2671 r.start(dbase), r.start(cbase),
2671 2672 r.start(p1), r.start(p2),
2672 2673 rs, ts, ts / r.end(rev), len(heads), clen))
2673 2674 return 0
2674 2675
2675 2676 v = r.version
2676 2677 format = v & 0xFFFF
2677 2678 flags = []
2678 2679 gdelta = False
2679 2680 if v & revlog.REVLOGNGINLINEDATA:
2680 2681 flags.append('inline')
2681 2682 if v & revlog.REVLOGGENERALDELTA:
2682 2683 gdelta = True
2683 2684 flags.append('generaldelta')
2684 2685 if not flags:
2685 2686 flags = ['(none)']
2686 2687
2687 2688 nummerges = 0
2688 2689 numfull = 0
2689 2690 numprev = 0
2690 2691 nump1 = 0
2691 2692 nump2 = 0
2692 2693 numother = 0
2693 2694 nump1prev = 0
2694 2695 nump2prev = 0
2695 2696 chainlengths = []
2696 2697
2697 2698 datasize = [None, 0, 0L]
2698 2699 fullsize = [None, 0, 0L]
2699 2700 deltasize = [None, 0, 0L]
2700 2701
2701 2702 def addsize(size, l):
2702 2703 if l[0] is None or size < l[0]:
2703 2704 l[0] = size
2704 2705 if size > l[1]:
2705 2706 l[1] = size
2706 2707 l[2] += size
2707 2708
2708 2709 numrevs = len(r)
2709 2710 for rev in xrange(numrevs):
2710 2711 p1, p2 = r.parentrevs(rev)
2711 2712 delta = r.deltaparent(rev)
2712 2713 if format > 0:
2713 2714 addsize(r.rawsize(rev), datasize)
2714 2715 if p2 != nullrev:
2715 2716 nummerges += 1
2716 2717 size = r.length(rev)
2717 2718 if delta == nullrev:
2718 2719 chainlengths.append(0)
2719 2720 numfull += 1
2720 2721 addsize(size, fullsize)
2721 2722 else:
2722 2723 chainlengths.append(chainlengths[delta] + 1)
2723 2724 addsize(size, deltasize)
2724 2725 if delta == rev - 1:
2725 2726 numprev += 1
2726 2727 if delta == p1:
2727 2728 nump1prev += 1
2728 2729 elif delta == p2:
2729 2730 nump2prev += 1
2730 2731 elif delta == p1:
2731 2732 nump1 += 1
2732 2733 elif delta == p2:
2733 2734 nump2 += 1
2734 2735 elif delta != nullrev:
2735 2736 numother += 1
2736 2737
2737 2738 # Adjust size min value for empty cases
2738 2739 for size in (datasize, fullsize, deltasize):
2739 2740 if size[0] is None:
2740 2741 size[0] = 0
2741 2742
2742 2743 numdeltas = numrevs - numfull
2743 2744 numoprev = numprev - nump1prev - nump2prev
2744 2745 totalrawsize = datasize[2]
2745 2746 datasize[2] /= numrevs
2746 2747 fulltotal = fullsize[2]
2747 2748 fullsize[2] /= numfull
2748 2749 deltatotal = deltasize[2]
2749 2750 if numrevs - numfull > 0:
2750 2751 deltasize[2] /= numrevs - numfull
2751 2752 totalsize = fulltotal + deltatotal
2752 2753 avgchainlen = sum(chainlengths) / numrevs
2753 2754 compratio = totalrawsize / totalsize
2754 2755
2755 2756 basedfmtstr = '%%%dd\n'
2756 2757 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2757 2758
2758 2759 def dfmtstr(max):
2759 2760 return basedfmtstr % len(str(max))
2760 2761 def pcfmtstr(max, padding=0):
2761 2762 return basepcfmtstr % (len(str(max)), ' ' * padding)
2762 2763
2763 2764 def pcfmt(value, total):
2764 2765 return (value, 100 * float(value) / total)
2765 2766
2766 2767 ui.write(('format : %d\n') % format)
2767 2768 ui.write(('flags : %s\n') % ', '.join(flags))
2768 2769
2769 2770 ui.write('\n')
2770 2771 fmt = pcfmtstr(totalsize)
2771 2772 fmt2 = dfmtstr(totalsize)
2772 2773 ui.write(('revisions : ') + fmt2 % numrevs)
2773 2774 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2774 2775 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2775 2776 ui.write(('revisions : ') + fmt2 % numrevs)
2776 2777 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2777 2778 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2778 2779 ui.write(('revision size : ') + fmt2 % totalsize)
2779 2780 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2780 2781 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2781 2782
2782 2783 ui.write('\n')
2783 2784 fmt = dfmtstr(max(avgchainlen, compratio))
2784 2785 ui.write(('avg chain length : ') + fmt % avgchainlen)
2785 2786 ui.write(('compression ratio : ') + fmt % compratio)
2786 2787
2787 2788 if format > 0:
2788 2789 ui.write('\n')
2789 2790 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2790 2791 % tuple(datasize))
2791 2792 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2792 2793 % tuple(fullsize))
2793 2794 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2794 2795 % tuple(deltasize))
2795 2796
2796 2797 if numdeltas > 0:
2797 2798 ui.write('\n')
2798 2799 fmt = pcfmtstr(numdeltas)
2799 2800 fmt2 = pcfmtstr(numdeltas, 4)
2800 2801 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2801 2802 if numprev > 0:
2802 2803 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2803 2804 numprev))
2804 2805 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2805 2806 numprev))
2806 2807 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2807 2808 numprev))
2808 2809 if gdelta:
2809 2810 ui.write(('deltas against p1 : ')
2810 2811 + fmt % pcfmt(nump1, numdeltas))
2811 2812 ui.write(('deltas against p2 : ')
2812 2813 + fmt % pcfmt(nump2, numdeltas))
2813 2814 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2814 2815 numdeltas))
2815 2816
2816 2817 @command('debugrevspec',
2817 2818 [('', 'optimize', None, _('print parsed tree after optimizing'))],
2818 2819 ('REVSPEC'))
2819 2820 def debugrevspec(ui, repo, expr, **opts):
2820 2821 """parse and apply a revision specification
2821 2822
2822 2823 Use --verbose to print the parsed tree before and after aliases
2823 2824 expansion.
2824 2825 """
2825 2826 if ui.verbose:
2826 2827 tree = revset.parse(expr)[0]
2827 2828 ui.note(revset.prettyformat(tree), "\n")
2828 2829 newtree = revset.findaliases(ui, tree)
2829 2830 if newtree != tree:
2830 2831 ui.note(revset.prettyformat(newtree), "\n")
2831 2832 if opts["optimize"]:
2832 2833 weight, optimizedtree = revset.optimize(newtree, True)
2833 2834 ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
2834 2835 func = revset.match(ui, expr)
2835 2836 for c in func(repo, revset.spanset(repo)):
2836 2837 ui.write("%s\n" % c)
2837 2838
2838 2839 @command('debugsetparents', [], _('REV1 [REV2]'))
2839 2840 def debugsetparents(ui, repo, rev1, rev2=None):
2840 2841 """manually set the parents of the current working directory
2841 2842
2842 2843 This is useful for writing repository conversion tools, but should
2843 2844 be used with care.
2844 2845
2845 2846 Returns 0 on success.
2846 2847 """
2847 2848
2848 2849 r1 = scmutil.revsingle(repo, rev1).node()
2849 2850 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2850 2851
2851 2852 wlock = repo.wlock()
2852 2853 try:
2853 2854 repo.dirstate.beginparentchange()
2854 2855 repo.setparents(r1, r2)
2855 2856 repo.dirstate.endparentchange()
2856 2857 finally:
2857 2858 wlock.release()
2858 2859
2859 2860 @command('debugdirstate|debugstate',
2860 2861 [('', 'nodates', None, _('do not display the saved mtime')),
2861 2862 ('', 'datesort', None, _('sort by saved mtime'))],
2862 2863 _('[OPTION]...'))
2863 2864 def debugstate(ui, repo, nodates=None, datesort=None):
2864 2865 """show the contents of the current dirstate"""
2865 2866 timestr = ""
2866 2867 showdate = not nodates
2867 2868 if datesort:
2868 2869 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2869 2870 else:
2870 2871 keyfunc = None # sort by filename
2871 2872 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2872 2873 if showdate:
2873 2874 if ent[3] == -1:
2874 2875 # Pad or slice to locale representation
2875 2876 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2876 2877 time.localtime(0)))
2877 2878 timestr = 'unset'
2878 2879 timestr = (timestr[:locale_len] +
2879 2880 ' ' * (locale_len - len(timestr)))
2880 2881 else:
2881 2882 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2882 2883 time.localtime(ent[3]))
2883 2884 if ent[1] & 020000:
2884 2885 mode = 'lnk'
2885 2886 else:
2886 2887 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2887 2888 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2888 2889 for f in repo.dirstate.copies():
2889 2890 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2890 2891
2891 2892 @command('debugsub',
2892 2893 [('r', 'rev', '',
2893 2894 _('revision to check'), _('REV'))],
2894 2895 _('[-r REV] [REV]'))
2895 2896 def debugsub(ui, repo, rev=None):
2896 2897 ctx = scmutil.revsingle(repo, rev, None)
2897 2898 for k, v in sorted(ctx.substate.items()):
2898 2899 ui.write(('path %s\n') % k)
2899 2900 ui.write((' source %s\n') % v[0])
2900 2901 ui.write((' revision %s\n') % v[1])
2901 2902
2902 2903 @command('debugsuccessorssets',
2903 2904 [],
2904 2905 _('[REV]'))
2905 2906 def debugsuccessorssets(ui, repo, *revs):
2906 2907 """show set of successors for revision
2907 2908
2908 2909 A successors set of changeset A is a consistent group of revisions that
2909 2910 succeed A. It contains non-obsolete changesets only.
2910 2911
2911 2912 In most cases a changeset A has a single successors set containing a single
2912 2913 successor (changeset A replaced by A').
2913 2914
2914 2915 A changeset that is made obsolete with no successors are called "pruned".
2915 2916 Such changesets have no successors sets at all.
2916 2917
2917 2918 A changeset that has been "split" will have a successors set containing
2918 2919 more than one successor.
2919 2920
2920 2921 A changeset that has been rewritten in multiple different ways is called
2921 2922 "divergent". Such changesets have multiple successor sets (each of which
2922 2923 may also be split, i.e. have multiple successors).
2923 2924
2924 2925 Results are displayed as follows::
2925 2926
2926 2927 <rev1>
2927 2928 <successors-1A>
2928 2929 <rev2>
2929 2930 <successors-2A>
2930 2931 <successors-2B1> <successors-2B2> <successors-2B3>
2931 2932
2932 2933 Here rev2 has two possible (i.e. divergent) successors sets. The first
2933 2934 holds one element, whereas the second holds three (i.e. the changeset has
2934 2935 been split).
2935 2936 """
2936 2937 # passed to successorssets caching computation from one call to another
2937 2938 cache = {}
2938 2939 ctx2str = str
2939 2940 node2str = short
2940 2941 if ui.debug():
2941 2942 def ctx2str(ctx):
2942 2943 return ctx.hex()
2943 2944 node2str = hex
2944 2945 for rev in scmutil.revrange(repo, revs):
2945 2946 ctx = repo[rev]
2946 2947 ui.write('%s\n'% ctx2str(ctx))
2947 2948 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2948 2949 if succsset:
2949 2950 ui.write(' ')
2950 2951 ui.write(node2str(succsset[0]))
2951 2952 for node in succsset[1:]:
2952 2953 ui.write(' ')
2953 2954 ui.write(node2str(node))
2954 2955 ui.write('\n')
2955 2956
2956 2957 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True)
2957 2958 def debugwalk(ui, repo, *pats, **opts):
2958 2959 """show how files match on given patterns"""
2959 2960 m = scmutil.match(repo[None], pats, opts)
2960 2961 items = list(repo.walk(m))
2961 2962 if not items:
2962 2963 return
2963 2964 f = lambda fn: fn
2964 2965 if ui.configbool('ui', 'slash') and os.sep != '/':
2965 2966 f = lambda fn: util.normpath(fn)
2966 2967 fmt = 'f %%-%ds %%-%ds %%s' % (
2967 2968 max([len(abs) for abs in items]),
2968 2969 max([len(m.rel(abs)) for abs in items]))
2969 2970 for abs in items:
2970 2971 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2971 2972 ui.write("%s\n" % line.rstrip())
2972 2973
2973 2974 @command('debugwireargs',
2974 2975 [('', 'three', '', 'three'),
2975 2976 ('', 'four', '', 'four'),
2976 2977 ('', 'five', '', 'five'),
2977 2978 ] + remoteopts,
2978 2979 _('REPO [OPTIONS]... [ONE [TWO]]'),
2979 2980 norepo=True)
2980 2981 def debugwireargs(ui, repopath, *vals, **opts):
2981 2982 repo = hg.peer(ui, opts, repopath)
2982 2983 for opt in remoteopts:
2983 2984 del opts[opt[1]]
2984 2985 args = {}
2985 2986 for k, v in opts.iteritems():
2986 2987 if v:
2987 2988 args[k] = v
2988 2989 # run twice to check that we don't mess up the stream for the next command
2989 2990 res1 = repo.debugwireargs(*vals, **args)
2990 2991 res2 = repo.debugwireargs(*vals, **args)
2991 2992 ui.write("%s\n" % res1)
2992 2993 if res1 != res2:
2993 2994 ui.warn("%s\n" % res2)
2994 2995
2995 2996 @command('^diff',
2996 2997 [('r', 'rev', [], _('revision'), _('REV')),
2997 2998 ('c', 'change', '', _('change made by revision'), _('REV'))
2998 2999 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2999 3000 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
3000 3001 inferrepo=True)
3001 3002 def diff(ui, repo, *pats, **opts):
3002 3003 """diff repository (or selected files)
3003 3004
3004 3005 Show differences between revisions for the specified files.
3005 3006
3006 3007 Differences between files are shown using the unified diff format.
3007 3008
3008 3009 .. note::
3009 3010
3010 3011 diff may generate unexpected results for merges, as it will
3011 3012 default to comparing against the working directory's first
3012 3013 parent changeset if no revisions are specified.
3013 3014
3014 3015 When two revision arguments are given, then changes are shown
3015 3016 between those revisions. If only one revision is specified then
3016 3017 that revision is compared to the working directory, and, when no
3017 3018 revisions are specified, the working directory files are compared
3018 3019 to its parent.
3019 3020
3020 3021 Alternatively you can specify -c/--change with a revision to see
3021 3022 the changes in that changeset relative to its first parent.
3022 3023
3023 3024 Without the -a/--text option, diff will avoid generating diffs of
3024 3025 files it detects as binary. With -a, diff will generate a diff
3025 3026 anyway, probably with undesirable results.
3026 3027
3027 3028 Use the -g/--git option to generate diffs in the git extended diff
3028 3029 format. For more information, read :hg:`help diffs`.
3029 3030
3030 3031 .. container:: verbose
3031 3032
3032 3033 Examples:
3033 3034
3034 3035 - compare a file in the current working directory to its parent::
3035 3036
3036 3037 hg diff foo.c
3037 3038
3038 3039 - compare two historical versions of a directory, with rename info::
3039 3040
3040 3041 hg diff --git -r 1.0:1.2 lib/
3041 3042
3042 3043 - get change stats relative to the last change on some date::
3043 3044
3044 3045 hg diff --stat -r "date('may 2')"
3045 3046
3046 3047 - diff all newly-added files that contain a keyword::
3047 3048
3048 3049 hg diff "set:added() and grep(GNU)"
3049 3050
3050 3051 - compare a revision and its parents::
3051 3052
3052 3053 hg diff -c 9353 # compare against first parent
3053 3054 hg diff -r 9353^:9353 # same using revset syntax
3054 3055 hg diff -r 9353^2:9353 # compare against the second parent
3055 3056
3056 3057 Returns 0 on success.
3057 3058 """
3058 3059
3059 3060 revs = opts.get('rev')
3060 3061 change = opts.get('change')
3061 3062 stat = opts.get('stat')
3062 3063 reverse = opts.get('reverse')
3063 3064
3064 3065 if revs and change:
3065 3066 msg = _('cannot specify --rev and --change at the same time')
3066 3067 raise util.Abort(msg)
3067 3068 elif change:
3068 3069 node2 = scmutil.revsingle(repo, change, None).node()
3069 3070 node1 = repo[node2].p1().node()
3070 3071 else:
3071 3072 node1, node2 = scmutil.revpair(repo, revs)
3072 3073
3073 3074 if reverse:
3074 3075 node1, node2 = node2, node1
3075 3076
3076 3077 diffopts = patch.diffallopts(ui, opts)
3077 3078 m = scmutil.match(repo[node2], pats, opts)
3078 3079 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
3079 3080 listsubrepos=opts.get('subrepos'))
3080 3081
3081 3082 @command('^export',
3082 3083 [('o', 'output', '',
3083 3084 _('print output to file with formatted name'), _('FORMAT')),
3084 3085 ('', 'switch-parent', None, _('diff against the second parent')),
3085 3086 ('r', 'rev', [], _('revisions to export'), _('REV')),
3086 3087 ] + diffopts,
3087 3088 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
3088 3089 def export(ui, repo, *changesets, **opts):
3089 3090 """dump the header and diffs for one or more changesets
3090 3091
3091 3092 Print the changeset header and diffs for one or more revisions.
3092 3093 If no revision is given, the parent of the working directory is used.
3093 3094
3094 3095 The information shown in the changeset header is: author, date,
3095 3096 branch name (if non-default), changeset hash, parent(s) and commit
3096 3097 comment.
3097 3098
3098 3099 .. note::
3099 3100
3100 3101 export may generate unexpected diff output for merge
3101 3102 changesets, as it will compare the merge changeset against its
3102 3103 first parent only.
3103 3104
3104 3105 Output may be to a file, in which case the name of the file is
3105 3106 given using a format string. The formatting rules are as follows:
3106 3107
3107 3108 :``%%``: literal "%" character
3108 3109 :``%H``: changeset hash (40 hexadecimal digits)
3109 3110 :``%N``: number of patches being generated
3110 3111 :``%R``: changeset revision number
3111 3112 :``%b``: basename of the exporting repository
3112 3113 :``%h``: short-form changeset hash (12 hexadecimal digits)
3113 3114 :``%m``: first line of the commit message (only alphanumeric characters)
3114 3115 :``%n``: zero-padded sequence number, starting at 1
3115 3116 :``%r``: zero-padded changeset revision number
3116 3117
3117 3118 Without the -a/--text option, export will avoid generating diffs
3118 3119 of files it detects as binary. With -a, export will generate a
3119 3120 diff anyway, probably with undesirable results.
3120 3121
3121 3122 Use the -g/--git option to generate diffs in the git extended diff
3122 3123 format. See :hg:`help diffs` for more information.
3123 3124
3124 3125 With the --switch-parent option, the diff will be against the
3125 3126 second parent. It can be useful to review a merge.
3126 3127
3127 3128 .. container:: verbose
3128 3129
3129 3130 Examples:
3130 3131
3131 3132 - use export and import to transplant a bugfix to the current
3132 3133 branch::
3133 3134
3134 3135 hg export -r 9353 | hg import -
3135 3136
3136 3137 - export all the changesets between two revisions to a file with
3137 3138 rename information::
3138 3139
3139 3140 hg export --git -r 123:150 > changes.txt
3140 3141
3141 3142 - split outgoing changes into a series of patches with
3142 3143 descriptive names::
3143 3144
3144 3145 hg export -r "outgoing()" -o "%n-%m.patch"
3145 3146
3146 3147 Returns 0 on success.
3147 3148 """
3148 3149 changesets += tuple(opts.get('rev', []))
3149 3150 if not changesets:
3150 3151 changesets = ['.']
3151 3152 revs = scmutil.revrange(repo, changesets)
3152 3153 if not revs:
3153 3154 raise util.Abort(_("export requires at least one changeset"))
3154 3155 if len(revs) > 1:
3155 3156 ui.note(_('exporting patches:\n'))
3156 3157 else:
3157 3158 ui.note(_('exporting patch:\n'))
3158 3159 cmdutil.export(repo, revs, template=opts.get('output'),
3159 3160 switch_parent=opts.get('switch_parent'),
3160 3161 opts=patch.diffopts(ui, opts))
3161 3162
3162 3163 @command('files',
3163 3164 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3164 3165 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3165 3166 ] + walkopts + formatteropts,
3166 3167 _('[OPTION]... [PATTERN]...'))
3167 3168 def files(ui, repo, *pats, **opts):
3168 3169 """list tracked files
3169 3170
3170 3171 Print files under Mercurial control in the working directory or
3171 3172 specified revision whose names match the given patterns (excluding
3172 3173 removed files).
3173 3174
3174 3175 If no patterns are given to match, this command prints the names
3175 3176 of all files under Mercurial control in the working copy.
3176 3177
3177 3178 .. container:: verbose
3178 3179
3179 3180 Examples:
3180 3181
3181 3182 - list all files under the current directory::
3182 3183
3183 3184 hg files .
3184 3185
3185 3186 - shows sizes and flags for current revision::
3186 3187
3187 3188 hg files -vr .
3188 3189
3189 3190 - list all files named README::
3190 3191
3191 3192 hg files -I "**/README"
3192 3193
3193 3194 - list all binary files::
3194 3195
3195 3196 hg files "set:binary()"
3196 3197
3197 3198 - find files containing a regular expression::
3198 3199
3199 3200 hg files "set:grep('bob')"
3200 3201
3201 3202 - search tracked file contents with xargs and grep::
3202 3203
3203 3204 hg files -0 | xargs -0 grep foo
3204 3205
3205 3206 See :hg:`help patterns` and :hg:`help filesets` for more information
3206 3207 on specifying file patterns.
3207 3208
3208 3209 Returns 0 if a match is found, 1 otherwise.
3209 3210
3210 3211 """
3211 3212 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3212 3213 rev = ctx.rev()
3213 3214 ret = 1
3214 3215
3215 3216 end = '\n'
3216 3217 if opts.get('print0'):
3217 3218 end = '\0'
3218 3219 fm = ui.formatter('files', opts)
3219 3220 fmt = '%s' + end
3220 3221
3221 3222 m = scmutil.match(ctx, pats, opts)
3222 3223 ds = repo.dirstate
3223 3224 for f in ctx.matches(m):
3224 3225 if rev is None and ds[f] == 'r':
3225 3226 continue
3226 3227 fm.startitem()
3227 3228 if ui.verbose:
3228 3229 fc = ctx[f]
3229 3230 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
3230 3231 fm.data(abspath=f)
3231 3232 fm.write('path', fmt, m.rel(f))
3232 3233 ret = 0
3233 3234
3234 3235 fm.end()
3235 3236
3236 3237 return ret
3237 3238
3238 3239 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
3239 3240 def forget(ui, repo, *pats, **opts):
3240 3241 """forget the specified files on the next commit
3241 3242
3242 3243 Mark the specified files so they will no longer be tracked
3243 3244 after the next commit.
3244 3245
3245 3246 This only removes files from the current branch, not from the
3246 3247 entire project history, and it does not delete them from the
3247 3248 working directory.
3248 3249
3249 3250 To undo a forget before the next commit, see :hg:`add`.
3250 3251
3251 3252 .. container:: verbose
3252 3253
3253 3254 Examples:
3254 3255
3255 3256 - forget newly-added binary files::
3256 3257
3257 3258 hg forget "set:added() and binary()"
3258 3259
3259 3260 - forget files that would be excluded by .hgignore::
3260 3261
3261 3262 hg forget "set:hgignore()"
3262 3263
3263 3264 Returns 0 on success.
3264 3265 """
3265 3266
3266 3267 if not pats:
3267 3268 raise util.Abort(_('no files specified'))
3268 3269
3269 3270 m = scmutil.match(repo[None], pats, opts)
3270 3271 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3271 3272 return rejected and 1 or 0
3272 3273
3273 3274 @command(
3274 3275 'graft',
3275 3276 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3276 3277 ('c', 'continue', False, _('resume interrupted graft')),
3277 3278 ('e', 'edit', False, _('invoke editor on commit messages')),
3278 3279 ('', 'log', None, _('append graft info to log message')),
3279 3280 ('f', 'force', False, _('force graft')),
3280 3281 ('D', 'currentdate', False,
3281 3282 _('record the current date as commit date')),
3282 3283 ('U', 'currentuser', False,
3283 3284 _('record the current user as committer'), _('DATE'))]
3284 3285 + commitopts2 + mergetoolopts + dryrunopts,
3285 3286 _('[OPTION]... [-r] REV...'))
3286 3287 def graft(ui, repo, *revs, **opts):
3287 3288 '''copy changes from other branches onto the current branch
3288 3289
3289 3290 This command uses Mercurial's merge logic to copy individual
3290 3291 changes from other branches without merging branches in the
3291 3292 history graph. This is sometimes known as 'backporting' or
3292 3293 'cherry-picking'. By default, graft will copy user, date, and
3293 3294 description from the source changesets.
3294 3295
3295 3296 Changesets that are ancestors of the current revision, that have
3296 3297 already been grafted, or that are merges will be skipped.
3297 3298
3298 3299 If --log is specified, log messages will have a comment appended
3299 3300 of the form::
3300 3301
3301 3302 (grafted from CHANGESETHASH)
3302 3303
3303 3304 If --force is specified, revisions will be grafted even if they
3304 3305 are already ancestors of or have been grafted to the destination.
3305 3306 This is useful when the revisions have since been backed out.
3306 3307
3307 3308 If a graft merge results in conflicts, the graft process is
3308 3309 interrupted so that the current merge can be manually resolved.
3309 3310 Once all conflicts are addressed, the graft process can be
3310 3311 continued with the -c/--continue option.
3311 3312
3312 3313 .. note::
3313 3314
3314 3315 The -c/--continue option does not reapply earlier options, except
3315 3316 for --force.
3316 3317
3317 3318 .. container:: verbose
3318 3319
3319 3320 Examples:
3320 3321
3321 3322 - copy a single change to the stable branch and edit its description::
3322 3323
3323 3324 hg update stable
3324 3325 hg graft --edit 9393
3325 3326
3326 3327 - graft a range of changesets with one exception, updating dates::
3327 3328
3328 3329 hg graft -D "2085::2093 and not 2091"
3329 3330
3330 3331 - continue a graft after resolving conflicts::
3331 3332
3332 3333 hg graft -c
3333 3334
3334 3335 - show the source of a grafted changeset::
3335 3336
3336 3337 hg log --debug -r .
3337 3338
3338 3339 See :hg:`help revisions` and :hg:`help revsets` for more about
3339 3340 specifying revisions.
3340 3341
3341 3342 Returns 0 on successful completion.
3342 3343 '''
3343 3344
3344 3345 revs = list(revs)
3345 3346 revs.extend(opts['rev'])
3346 3347
3347 3348 if not opts.get('user') and opts.get('currentuser'):
3348 3349 opts['user'] = ui.username()
3349 3350 if not opts.get('date') and opts.get('currentdate'):
3350 3351 opts['date'] = "%d %d" % util.makedate()
3351 3352
3352 3353 editor = cmdutil.getcommiteditor(editform='graft', **opts)
3353 3354
3354 3355 cont = False
3355 3356 if opts['continue']:
3356 3357 cont = True
3357 3358 if revs:
3358 3359 raise util.Abort(_("can't specify --continue and revisions"))
3359 3360 # read in unfinished revisions
3360 3361 try:
3361 3362 nodes = repo.opener.read('graftstate').splitlines()
3362 3363 revs = [repo[node].rev() for node in nodes]
3363 3364 except IOError, inst:
3364 3365 if inst.errno != errno.ENOENT:
3365 3366 raise
3366 3367 raise util.Abort(_("no graft state found, can't continue"))
3367 3368 else:
3368 3369 cmdutil.checkunfinished(repo)
3369 3370 cmdutil.bailifchanged(repo)
3370 3371 if not revs:
3371 3372 raise util.Abort(_('no revisions specified'))
3372 3373 revs = scmutil.revrange(repo, revs)
3373 3374
3374 3375 skipped = set()
3375 3376 # check for merges
3376 3377 for rev in repo.revs('%ld and merge()', revs):
3377 3378 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3378 3379 skipped.add(rev)
3379 3380 revs = [r for r in revs if r not in skipped]
3380 3381 if not revs:
3381 3382 return -1
3382 3383
3383 3384 # Don't check in the --continue case, in effect retaining --force across
3384 3385 # --continues. That's because without --force, any revisions we decided to
3385 3386 # skip would have been filtered out here, so they wouldn't have made their
3386 3387 # way to the graftstate. With --force, any revisions we would have otherwise
3387 3388 # skipped would not have been filtered out, and if they hadn't been applied
3388 3389 # already, they'd have been in the graftstate.
3389 3390 if not (cont or opts.get('force')):
3390 3391 # check for ancestors of dest branch
3391 3392 crev = repo['.'].rev()
3392 3393 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3393 3394 # Cannot use x.remove(y) on smart set, this has to be a list.
3394 3395 # XXX make this lazy in the future
3395 3396 revs = list(revs)
3396 3397 # don't mutate while iterating, create a copy
3397 3398 for rev in list(revs):
3398 3399 if rev in ancestors:
3399 3400 ui.warn(_('skipping ancestor revision %d:%s\n') %
3400 3401 (rev, repo[rev]))
3401 3402 # XXX remove on list is slow
3402 3403 revs.remove(rev)
3403 3404 if not revs:
3404 3405 return -1
3405 3406
3406 3407 # analyze revs for earlier grafts
3407 3408 ids = {}
3408 3409 for ctx in repo.set("%ld", revs):
3409 3410 ids[ctx.hex()] = ctx.rev()
3410 3411 n = ctx.extra().get('source')
3411 3412 if n:
3412 3413 ids[n] = ctx.rev()
3413 3414
3414 3415 # check ancestors for earlier grafts
3415 3416 ui.debug('scanning for duplicate grafts\n')
3416 3417
3417 3418 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3418 3419 ctx = repo[rev]
3419 3420 n = ctx.extra().get('source')
3420 3421 if n in ids:
3421 3422 try:
3422 3423 r = repo[n].rev()
3423 3424 except error.RepoLookupError:
3424 3425 r = None
3425 3426 if r in revs:
3426 3427 ui.warn(_('skipping revision %d:%s '
3427 3428 '(already grafted to %d:%s)\n')
3428 3429 % (r, repo[r], rev, ctx))
3429 3430 revs.remove(r)
3430 3431 elif ids[n] in revs:
3431 3432 if r is None:
3432 3433 ui.warn(_('skipping already grafted revision %d:%s '
3433 3434 '(%d:%s also has unknown origin %s)\n')
3434 3435 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
3435 3436 else:
3436 3437 ui.warn(_('skipping already grafted revision %d:%s '
3437 3438 '(%d:%s also has origin %d:%s)\n')
3438 3439 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
3439 3440 revs.remove(ids[n])
3440 3441 elif ctx.hex() in ids:
3441 3442 r = ids[ctx.hex()]
3442 3443 ui.warn(_('skipping already grafted revision %d:%s '
3443 3444 '(was grafted from %d:%s)\n') %
3444 3445 (r, repo[r], rev, ctx))
3445 3446 revs.remove(r)
3446 3447 if not revs:
3447 3448 return -1
3448 3449
3449 3450 wlock = repo.wlock()
3450 3451 try:
3451 3452 for pos, ctx in enumerate(repo.set("%ld", revs)):
3452 3453 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
3453 3454 ctx.description().split('\n', 1)[0])
3454 3455 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
3455 3456 if names:
3456 3457 desc += ' (%s)' % ' '.join(names)
3457 3458 ui.status(_('grafting %s\n') % desc)
3458 3459 if opts.get('dry_run'):
3459 3460 continue
3460 3461
3461 3462 source = ctx.extra().get('source')
3462 3463 if not source:
3463 3464 source = ctx.hex()
3464 3465 extra = {'source': source}
3465 3466 user = ctx.user()
3466 3467 if opts.get('user'):
3467 3468 user = opts['user']
3468 3469 date = ctx.date()
3469 3470 if opts.get('date'):
3470 3471 date = opts['date']
3471 3472 message = ctx.description()
3472 3473 if opts.get('log'):
3473 3474 message += '\n(grafted from %s)' % ctx.hex()
3474 3475
3475 3476 # we don't merge the first commit when continuing
3476 3477 if not cont:
3477 3478 # perform the graft merge with p1(rev) as 'ancestor'
3478 3479 try:
3479 3480 # ui.forcemerge is an internal variable, do not document
3480 3481 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
3481 3482 'graft')
3482 3483 stats = mergemod.graft(repo, ctx, ctx.p1(),
3483 3484 ['local', 'graft'])
3484 3485 finally:
3485 3486 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
3486 3487 # report any conflicts
3487 3488 if stats and stats[3] > 0:
3488 3489 # write out state for --continue
3489 3490 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3490 3491 repo.opener.write('graftstate', ''.join(nodelines))
3491 3492 raise util.Abort(
3492 3493 _("unresolved conflicts, can't continue"),
3493 3494 hint=_('use hg resolve and hg graft --continue'))
3494 3495 else:
3495 3496 cont = False
3496 3497
3497 3498 # commit
3498 3499 node = repo.commit(text=message, user=user,
3499 3500 date=date, extra=extra, editor=editor)
3500 3501 if node is None:
3501 3502 ui.warn(
3502 3503 _('note: graft of %d:%s created no changes to commit\n') %
3503 3504 (ctx.rev(), ctx))
3504 3505 finally:
3505 3506 wlock.release()
3506 3507
3507 3508 # remove state when we complete successfully
3508 3509 if not opts.get('dry_run'):
3509 3510 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3510 3511
3511 3512 return 0
3512 3513
3513 3514 @command('grep',
3514 3515 [('0', 'print0', None, _('end fields with NUL')),
3515 3516 ('', 'all', None, _('print all revisions that match')),
3516 3517 ('a', 'text', None, _('treat all files as text')),
3517 3518 ('f', 'follow', None,
3518 3519 _('follow changeset history,'
3519 3520 ' or file history across copies and renames')),
3520 3521 ('i', 'ignore-case', None, _('ignore case when matching')),
3521 3522 ('l', 'files-with-matches', None,
3522 3523 _('print only filenames and revisions that match')),
3523 3524 ('n', 'line-number', None, _('print matching line numbers')),
3524 3525 ('r', 'rev', [],
3525 3526 _('only search files changed within revision range'), _('REV')),
3526 3527 ('u', 'user', None, _('list the author (long with -v)')),
3527 3528 ('d', 'date', None, _('list the date (short with -q)')),
3528 3529 ] + walkopts,
3529 3530 _('[OPTION]... PATTERN [FILE]...'),
3530 3531 inferrepo=True)
3531 3532 def grep(ui, repo, pattern, *pats, **opts):
3532 3533 """search for a pattern in specified files and revisions
3533 3534
3534 3535 Search revisions of files for a regular expression.
3535 3536
3536 3537 This command behaves differently than Unix grep. It only accepts
3537 3538 Python/Perl regexps. It searches repository history, not the
3538 3539 working directory. It always prints the revision number in which a
3539 3540 match appears.
3540 3541
3541 3542 By default, grep only prints output for the first revision of a
3542 3543 file in which it finds a match. To get it to print every revision
3543 3544 that contains a change in match status ("-" for a match that
3544 3545 becomes a non-match, or "+" for a non-match that becomes a match),
3545 3546 use the --all flag.
3546 3547
3547 3548 Returns 0 if a match is found, 1 otherwise.
3548 3549 """
3549 3550 reflags = re.M
3550 3551 if opts.get('ignore_case'):
3551 3552 reflags |= re.I
3552 3553 try:
3553 3554 regexp = util.re.compile(pattern, reflags)
3554 3555 except re.error, inst:
3555 3556 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3556 3557 return 1
3557 3558 sep, eol = ':', '\n'
3558 3559 if opts.get('print0'):
3559 3560 sep = eol = '\0'
3560 3561
3561 3562 getfile = util.lrucachefunc(repo.file)
3562 3563
3563 3564 def matchlines(body):
3564 3565 begin = 0
3565 3566 linenum = 0
3566 3567 while begin < len(body):
3567 3568 match = regexp.search(body, begin)
3568 3569 if not match:
3569 3570 break
3570 3571 mstart, mend = match.span()
3571 3572 linenum += body.count('\n', begin, mstart) + 1
3572 3573 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3573 3574 begin = body.find('\n', mend) + 1 or len(body) + 1
3574 3575 lend = begin - 1
3575 3576 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3576 3577
3577 3578 class linestate(object):
3578 3579 def __init__(self, line, linenum, colstart, colend):
3579 3580 self.line = line
3580 3581 self.linenum = linenum
3581 3582 self.colstart = colstart
3582 3583 self.colend = colend
3583 3584
3584 3585 def __hash__(self):
3585 3586 return hash((self.linenum, self.line))
3586 3587
3587 3588 def __eq__(self, other):
3588 3589 return self.line == other.line
3589 3590
3590 3591 def __iter__(self):
3591 3592 yield (self.line[:self.colstart], '')
3592 3593 yield (self.line[self.colstart:self.colend], 'grep.match')
3593 3594 rest = self.line[self.colend:]
3594 3595 while rest != '':
3595 3596 match = regexp.search(rest)
3596 3597 if not match:
3597 3598 yield (rest, '')
3598 3599 break
3599 3600 mstart, mend = match.span()
3600 3601 yield (rest[:mstart], '')
3601 3602 yield (rest[mstart:mend], 'grep.match')
3602 3603 rest = rest[mend:]
3603 3604
3604 3605 matches = {}
3605 3606 copies = {}
3606 3607 def grepbody(fn, rev, body):
3607 3608 matches[rev].setdefault(fn, [])
3608 3609 m = matches[rev][fn]
3609 3610 for lnum, cstart, cend, line in matchlines(body):
3610 3611 s = linestate(line, lnum, cstart, cend)
3611 3612 m.append(s)
3612 3613
3613 3614 def difflinestates(a, b):
3614 3615 sm = difflib.SequenceMatcher(None, a, b)
3615 3616 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3616 3617 if tag == 'insert':
3617 3618 for i in xrange(blo, bhi):
3618 3619 yield ('+', b[i])
3619 3620 elif tag == 'delete':
3620 3621 for i in xrange(alo, ahi):
3621 3622 yield ('-', a[i])
3622 3623 elif tag == 'replace':
3623 3624 for i in xrange(alo, ahi):
3624 3625 yield ('-', a[i])
3625 3626 for i in xrange(blo, bhi):
3626 3627 yield ('+', b[i])
3627 3628
3628 3629 def display(fn, ctx, pstates, states):
3629 3630 rev = ctx.rev()
3630 3631 datefunc = ui.quiet and util.shortdate or util.datestr
3631 3632 found = False
3632 3633 @util.cachefunc
3633 3634 def binary():
3634 3635 flog = getfile(fn)
3635 3636 return util.binary(flog.read(ctx.filenode(fn)))
3636 3637
3637 3638 if opts.get('all'):
3638 3639 iter = difflinestates(pstates, states)
3639 3640 else:
3640 3641 iter = [('', l) for l in states]
3641 3642 for change, l in iter:
3642 3643 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
3643 3644
3644 3645 if opts.get('line_number'):
3645 3646 cols.append((str(l.linenum), 'grep.linenumber'))
3646 3647 if opts.get('all'):
3647 3648 cols.append((change, 'grep.change'))
3648 3649 if opts.get('user'):
3649 3650 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
3650 3651 if opts.get('date'):
3651 3652 cols.append((datefunc(ctx.date()), 'grep.date'))
3652 3653 for col, label in cols[:-1]:
3653 3654 ui.write(col, label=label)
3654 3655 ui.write(sep, label='grep.sep')
3655 3656 ui.write(cols[-1][0], label=cols[-1][1])
3656 3657 if not opts.get('files_with_matches'):
3657 3658 ui.write(sep, label='grep.sep')
3658 3659 if not opts.get('text') and binary():
3659 3660 ui.write(" Binary file matches")
3660 3661 else:
3661 3662 for s, label in l:
3662 3663 ui.write(s, label=label)
3663 3664 ui.write(eol)
3664 3665 found = True
3665 3666 if opts.get('files_with_matches'):
3666 3667 break
3667 3668 return found
3668 3669
3669 3670 skip = {}
3670 3671 revfiles = {}
3671 3672 matchfn = scmutil.match(repo[None], pats, opts)
3672 3673 found = False
3673 3674 follow = opts.get('follow')
3674 3675
3675 3676 def prep(ctx, fns):
3676 3677 rev = ctx.rev()
3677 3678 pctx = ctx.p1()
3678 3679 parent = pctx.rev()
3679 3680 matches.setdefault(rev, {})
3680 3681 matches.setdefault(parent, {})
3681 3682 files = revfiles.setdefault(rev, [])
3682 3683 for fn in fns:
3683 3684 flog = getfile(fn)
3684 3685 try:
3685 3686 fnode = ctx.filenode(fn)
3686 3687 except error.LookupError:
3687 3688 continue
3688 3689
3689 3690 copied = flog.renamed(fnode)
3690 3691 copy = follow and copied and copied[0]
3691 3692 if copy:
3692 3693 copies.setdefault(rev, {})[fn] = copy
3693 3694 if fn in skip:
3694 3695 if copy:
3695 3696 skip[copy] = True
3696 3697 continue
3697 3698 files.append(fn)
3698 3699
3699 3700 if fn not in matches[rev]:
3700 3701 grepbody(fn, rev, flog.read(fnode))
3701 3702
3702 3703 pfn = copy or fn
3703 3704 if pfn not in matches[parent]:
3704 3705 try:
3705 3706 fnode = pctx.filenode(pfn)
3706 3707 grepbody(pfn, parent, flog.read(fnode))
3707 3708 except error.LookupError:
3708 3709 pass
3709 3710
3710 3711 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3711 3712 rev = ctx.rev()
3712 3713 parent = ctx.p1().rev()
3713 3714 for fn in sorted(revfiles.get(rev, [])):
3714 3715 states = matches[rev][fn]
3715 3716 copy = copies.get(rev, {}).get(fn)
3716 3717 if fn in skip:
3717 3718 if copy:
3718 3719 skip[copy] = True
3719 3720 continue
3720 3721 pstates = matches.get(parent, {}).get(copy or fn, [])
3721 3722 if pstates or states:
3722 3723 r = display(fn, ctx, pstates, states)
3723 3724 found = found or r
3724 3725 if r and not opts.get('all'):
3725 3726 skip[fn] = True
3726 3727 if copy:
3727 3728 skip[copy] = True
3728 3729 del matches[rev]
3729 3730 del revfiles[rev]
3730 3731
3731 3732 return not found
3732 3733
3733 3734 @command('heads',
3734 3735 [('r', 'rev', '',
3735 3736 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3736 3737 ('t', 'topo', False, _('show topological heads only')),
3737 3738 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3738 3739 ('c', 'closed', False, _('show normal and closed branch heads')),
3739 3740 ] + templateopts,
3740 3741 _('[-ct] [-r STARTREV] [REV]...'))
3741 3742 def heads(ui, repo, *branchrevs, **opts):
3742 3743 """show branch heads
3743 3744
3744 3745 With no arguments, show all open branch heads in the repository.
3745 3746 Branch heads are changesets that have no descendants on the
3746 3747 same branch. They are where development generally takes place and
3747 3748 are the usual targets for update and merge operations.
3748 3749
3749 3750 If one or more REVs are given, only open branch heads on the
3750 3751 branches associated with the specified changesets are shown. This
3751 3752 means that you can use :hg:`heads .` to see the heads on the
3752 3753 currently checked-out branch.
3753 3754
3754 3755 If -c/--closed is specified, also show branch heads marked closed
3755 3756 (see :hg:`commit --close-branch`).
3756 3757
3757 3758 If STARTREV is specified, only those heads that are descendants of
3758 3759 STARTREV will be displayed.
3759 3760
3760 3761 If -t/--topo is specified, named branch mechanics will be ignored and only
3761 3762 topological heads (changesets with no children) will be shown.
3762 3763
3763 3764 Returns 0 if matching heads are found, 1 if not.
3764 3765 """
3765 3766
3766 3767 start = None
3767 3768 if 'rev' in opts:
3768 3769 start = scmutil.revsingle(repo, opts['rev'], None).node()
3769 3770
3770 3771 if opts.get('topo'):
3771 3772 heads = [repo[h] for h in repo.heads(start)]
3772 3773 else:
3773 3774 heads = []
3774 3775 for branch in repo.branchmap():
3775 3776 heads += repo.branchheads(branch, start, opts.get('closed'))
3776 3777 heads = [repo[h] for h in heads]
3777 3778
3778 3779 if branchrevs:
3779 3780 branches = set(repo[br].branch() for br in branchrevs)
3780 3781 heads = [h for h in heads if h.branch() in branches]
3781 3782
3782 3783 if opts.get('active') and branchrevs:
3783 3784 dagheads = repo.heads(start)
3784 3785 heads = [h for h in heads if h.node() in dagheads]
3785 3786
3786 3787 if branchrevs:
3787 3788 haveheads = set(h.branch() for h in heads)
3788 3789 if branches - haveheads:
3789 3790 headless = ', '.join(b for b in branches - haveheads)
3790 3791 msg = _('no open branch heads found on branches %s')
3791 3792 if opts.get('rev'):
3792 3793 msg += _(' (started at %s)') % opts['rev']
3793 3794 ui.warn((msg + '\n') % headless)
3794 3795
3795 3796 if not heads:
3796 3797 return 1
3797 3798
3798 3799 heads = sorted(heads, key=lambda x: -x.rev())
3799 3800 displayer = cmdutil.show_changeset(ui, repo, opts)
3800 3801 for ctx in heads:
3801 3802 displayer.show(ctx)
3802 3803 displayer.close()
3803 3804
3804 3805 @command('help',
3805 3806 [('e', 'extension', None, _('show only help for extensions')),
3806 3807 ('c', 'command', None, _('show only help for commands')),
3807 3808 ('k', 'keyword', '', _('show topics matching keyword')),
3808 3809 ],
3809 3810 _('[-ec] [TOPIC]'),
3810 3811 norepo=True)
3811 3812 def help_(ui, name=None, **opts):
3812 3813 """show help for a given topic or a help overview
3813 3814
3814 3815 With no arguments, print a list of commands with short help messages.
3815 3816
3816 3817 Given a topic, extension, or command name, print help for that
3817 3818 topic.
3818 3819
3819 3820 Returns 0 if successful.
3820 3821 """
3821 3822
3822 3823 textwidth = min(ui.termwidth(), 80) - 2
3823 3824
3824 3825 keep = []
3825 3826 if ui.verbose:
3826 3827 keep.append('verbose')
3827 3828 if sys.platform.startswith('win'):
3828 3829 keep.append('windows')
3829 3830 elif sys.platform == 'OpenVMS':
3830 3831 keep.append('vms')
3831 3832 elif sys.platform == 'plan9':
3832 3833 keep.append('plan9')
3833 3834 else:
3834 3835 keep.append('unix')
3835 3836 keep.append(sys.platform.lower())
3836 3837
3837 3838 section = None
3838 3839 if name and '.' in name:
3839 3840 name, section = name.split('.', 1)
3840 3841
3841 3842 text = help.help_(ui, name, **opts)
3842 3843
3843 3844 formatted, pruned = minirst.format(text, textwidth, keep=keep,
3844 3845 section=section)
3845 3846 if section and not formatted:
3846 3847 raise util.Abort(_("help section not found"))
3847 3848
3848 3849 if 'verbose' in pruned:
3849 3850 keep.append('omitted')
3850 3851 else:
3851 3852 keep.append('notomitted')
3852 3853 formatted, pruned = minirst.format(text, textwidth, keep=keep,
3853 3854 section=section)
3854 3855 ui.write(formatted)
3855 3856
3856 3857
3857 3858 @command('identify|id',
3858 3859 [('r', 'rev', '',
3859 3860 _('identify the specified revision'), _('REV')),
3860 3861 ('n', 'num', None, _('show local revision number')),
3861 3862 ('i', 'id', None, _('show global revision id')),
3862 3863 ('b', 'branch', None, _('show branch')),
3863 3864 ('t', 'tags', None, _('show tags')),
3864 3865 ('B', 'bookmarks', None, _('show bookmarks')),
3865 3866 ] + remoteopts,
3866 3867 _('[-nibtB] [-r REV] [SOURCE]'),
3867 3868 optionalrepo=True)
3868 3869 def identify(ui, repo, source=None, rev=None,
3869 3870 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3870 3871 """identify the working copy or specified revision
3871 3872
3872 3873 Print a summary identifying the repository state at REV using one or
3873 3874 two parent hash identifiers, followed by a "+" if the working
3874 3875 directory has uncommitted changes, the branch name (if not default),
3875 3876 a list of tags, and a list of bookmarks.
3876 3877
3877 3878 When REV is not given, print a summary of the current state of the
3878 3879 repository.
3879 3880
3880 3881 Specifying a path to a repository root or Mercurial bundle will
3881 3882 cause lookup to operate on that repository/bundle.
3882 3883
3883 3884 .. container:: verbose
3884 3885
3885 3886 Examples:
3886 3887
3887 3888 - generate a build identifier for the working directory::
3888 3889
3889 3890 hg id --id > build-id.dat
3890 3891
3891 3892 - find the revision corresponding to a tag::
3892 3893
3893 3894 hg id -n -r 1.3
3894 3895
3895 3896 - check the most recent revision of a remote repository::
3896 3897
3897 3898 hg id -r tip http://selenic.com/hg/
3898 3899
3899 3900 Returns 0 if successful.
3900 3901 """
3901 3902
3902 3903 if not repo and not source:
3903 3904 raise util.Abort(_("there is no Mercurial repository here "
3904 3905 "(.hg not found)"))
3905 3906
3906 3907 hexfunc = ui.debugflag and hex or short
3907 3908 default = not (num or id or branch or tags or bookmarks)
3908 3909 output = []
3909 3910 revs = []
3910 3911
3911 3912 if source:
3912 3913 source, branches = hg.parseurl(ui.expandpath(source))
3913 3914 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
3914 3915 repo = peer.local()
3915 3916 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3916 3917
3917 3918 if not repo:
3918 3919 if num or branch or tags:
3919 3920 raise util.Abort(
3920 3921 _("can't query remote revision number, branch, or tags"))
3921 3922 if not rev and revs:
3922 3923 rev = revs[0]
3923 3924 if not rev:
3924 3925 rev = "tip"
3925 3926
3926 3927 remoterev = peer.lookup(rev)
3927 3928 if default or id:
3928 3929 output = [hexfunc(remoterev)]
3929 3930
3930 3931 def getbms():
3931 3932 bms = []
3932 3933
3933 3934 if 'bookmarks' in peer.listkeys('namespaces'):
3934 3935 hexremoterev = hex(remoterev)
3935 3936 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
3936 3937 if bmr == hexremoterev]
3937 3938
3938 3939 return sorted(bms)
3939 3940
3940 3941 if bookmarks:
3941 3942 output.extend(getbms())
3942 3943 elif default and not ui.quiet:
3943 3944 # multiple bookmarks for a single parent separated by '/'
3944 3945 bm = '/'.join(getbms())
3945 3946 if bm:
3946 3947 output.append(bm)
3947 3948 else:
3948 3949 if not rev:
3949 3950 ctx = repo[None]
3950 3951 parents = ctx.parents()
3951 3952 changed = ""
3952 3953 if default or id or num:
3953 3954 if (util.any(repo.status())
3954 3955 or util.any(ctx.sub(s).dirty() for s in ctx.substate)):
3955 3956 changed = '+'
3956 3957 if default or id:
3957 3958 output = ["%s%s" %
3958 3959 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3959 3960 if num:
3960 3961 output.append("%s%s" %
3961 3962 ('+'.join([str(p.rev()) for p in parents]), changed))
3962 3963 else:
3963 3964 ctx = scmutil.revsingle(repo, rev)
3964 3965 if default or id:
3965 3966 output = [hexfunc(ctx.node())]
3966 3967 if num:
3967 3968 output.append(str(ctx.rev()))
3968 3969
3969 3970 if default and not ui.quiet:
3970 3971 b = ctx.branch()
3971 3972 if b != 'default':
3972 3973 output.append("(%s)" % b)
3973 3974
3974 3975 # multiple tags for a single parent separated by '/'
3975 3976 t = '/'.join(ctx.tags())
3976 3977 if t:
3977 3978 output.append(t)
3978 3979
3979 3980 # multiple bookmarks for a single parent separated by '/'
3980 3981 bm = '/'.join(ctx.bookmarks())
3981 3982 if bm:
3982 3983 output.append(bm)
3983 3984 else:
3984 3985 if branch:
3985 3986 output.append(ctx.branch())
3986 3987
3987 3988 if tags:
3988 3989 output.extend(ctx.tags())
3989 3990
3990 3991 if bookmarks:
3991 3992 output.extend(ctx.bookmarks())
3992 3993
3993 3994 ui.write("%s\n" % ' '.join(output))
3994 3995
3995 3996 @command('import|patch',
3996 3997 [('p', 'strip', 1,
3997 3998 _('directory strip option for patch. This has the same '
3998 3999 'meaning as the corresponding patch option'), _('NUM')),
3999 4000 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
4000 4001 ('e', 'edit', False, _('invoke editor on commit messages')),
4001 4002 ('f', 'force', None,
4002 4003 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
4003 4004 ('', 'no-commit', None,
4004 4005 _("don't commit, just update the working directory")),
4005 4006 ('', 'bypass', None,
4006 4007 _("apply patch without touching the working directory")),
4007 4008 ('', 'partial', None,
4008 4009 _('commit even if some hunks fail')),
4009 4010 ('', 'exact', None,
4010 4011 _('apply patch to the nodes from which it was generated')),
4011 4012 ('', 'import-branch', None,
4012 4013 _('use any branch information in patch (implied by --exact)'))] +
4013 4014 commitopts + commitopts2 + similarityopts,
4014 4015 _('[OPTION]... PATCH...'))
4015 4016 def import_(ui, repo, patch1=None, *patches, **opts):
4016 4017 """import an ordered set of patches
4017 4018
4018 4019 Import a list of patches and commit them individually (unless
4019 4020 --no-commit is specified).
4020 4021
4021 4022 Because import first applies changes to the working directory,
4022 4023 import will abort if there are outstanding changes.
4023 4024
4024 4025 You can import a patch straight from a mail message. Even patches
4025 4026 as attachments work (to use the body part, it must have type
4026 4027 text/plain or text/x-patch). From and Subject headers of email
4027 4028 message are used as default committer and commit message. All
4028 4029 text/plain body parts before first diff are added to commit
4029 4030 message.
4030 4031
4031 4032 If the imported patch was generated by :hg:`export`, user and
4032 4033 description from patch override values from message headers and
4033 4034 body. Values given on command line with -m/--message and -u/--user
4034 4035 override these.
4035 4036
4036 4037 If --exact is specified, import will set the working directory to
4037 4038 the parent of each patch before applying it, and will abort if the
4038 4039 resulting changeset has a different ID than the one recorded in
4039 4040 the patch. This may happen due to character set problems or other
4040 4041 deficiencies in the text patch format.
4041 4042
4042 4043 Use --bypass to apply and commit patches directly to the
4043 4044 repository, not touching the working directory. Without --exact,
4044 4045 patches will be applied on top of the working directory parent
4045 4046 revision.
4046 4047
4047 4048 With -s/--similarity, hg will attempt to discover renames and
4048 4049 copies in the patch in the same way as :hg:`addremove`.
4049 4050
4050 4051 Use --partial to ensure a changeset will be created from the patch
4051 4052 even if some hunks fail to apply. Hunks that fail to apply will be
4052 4053 written to a <target-file>.rej file. Conflicts can then be resolved
4053 4054 by hand before :hg:`commit --amend` is run to update the created
4054 4055 changeset. This flag exists to let people import patches that
4055 4056 partially apply without losing the associated metadata (author,
4056 4057 date, description, ...). Note that when none of the hunk applies
4057 4058 cleanly, :hg:`import --partial` will create an empty changeset,
4058 4059 importing only the patch metadata.
4059 4060
4060 4061 To read a patch from standard input, use "-" as the patch name. If
4061 4062 a URL is specified, the patch will be downloaded from it.
4062 4063 See :hg:`help dates` for a list of formats valid for -d/--date.
4063 4064
4064 4065 .. container:: verbose
4065 4066
4066 4067 Examples:
4067 4068
4068 4069 - import a traditional patch from a website and detect renames::
4069 4070
4070 4071 hg import -s 80 http://example.com/bugfix.patch
4071 4072
4072 4073 - import a changeset from an hgweb server::
4073 4074
4074 4075 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
4075 4076
4076 4077 - import all the patches in an Unix-style mbox::
4077 4078
4078 4079 hg import incoming-patches.mbox
4079 4080
4080 4081 - attempt to exactly restore an exported changeset (not always
4081 4082 possible)::
4082 4083
4083 4084 hg import --exact proposed-fix.patch
4084 4085
4085 4086 Returns 0 on success, 1 on partial success (see --partial).
4086 4087 """
4087 4088
4088 4089 if not patch1:
4089 4090 raise util.Abort(_('need at least one patch to import'))
4090 4091
4091 4092 patches = (patch1,) + patches
4092 4093
4093 4094 date = opts.get('date')
4094 4095 if date:
4095 4096 opts['date'] = util.parsedate(date)
4096 4097
4097 4098 update = not opts.get('bypass')
4098 4099 if not update and opts.get('no_commit'):
4099 4100 raise util.Abort(_('cannot use --no-commit with --bypass'))
4100 4101 try:
4101 4102 sim = float(opts.get('similarity') or 0)
4102 4103 except ValueError:
4103 4104 raise util.Abort(_('similarity must be a number'))
4104 4105 if sim < 0 or sim > 100:
4105 4106 raise util.Abort(_('similarity must be between 0 and 100'))
4106 4107 if sim and not update:
4107 4108 raise util.Abort(_('cannot use --similarity with --bypass'))
4108 4109 if opts.get('exact') and opts.get('edit'):
4109 4110 raise util.Abort(_('cannot use --exact with --edit'))
4110 4111
4111 4112 if update:
4112 4113 cmdutil.checkunfinished(repo)
4113 4114 if (opts.get('exact') or not opts.get('force')) and update:
4114 4115 cmdutil.bailifchanged(repo)
4115 4116
4116 4117 base = opts["base"]
4117 4118 wlock = lock = tr = None
4118 4119 msgs = []
4119 4120 ret = 0
4120 4121
4121 4122
4122 4123 try:
4123 4124 try:
4124 4125 wlock = repo.wlock()
4125 4126 repo.dirstate.beginparentchange()
4126 4127 if not opts.get('no_commit'):
4127 4128 lock = repo.lock()
4128 4129 tr = repo.transaction('import')
4129 4130 parents = repo.parents()
4130 4131 for patchurl in patches:
4131 4132 if patchurl == '-':
4132 4133 ui.status(_('applying patch from stdin\n'))
4133 4134 patchfile = ui.fin
4134 4135 patchurl = 'stdin' # for error message
4135 4136 else:
4136 4137 patchurl = os.path.join(base, patchurl)
4137 4138 ui.status(_('applying %s\n') % patchurl)
4138 4139 patchfile = hg.openpath(ui, patchurl)
4139 4140
4140 4141 haspatch = False
4141 4142 for hunk in patch.split(patchfile):
4142 4143 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
4143 4144 parents, opts,
4144 4145 msgs, hg.clean)
4145 4146 if msg:
4146 4147 haspatch = True
4147 4148 ui.note(msg + '\n')
4148 4149 if update or opts.get('exact'):
4149 4150 parents = repo.parents()
4150 4151 else:
4151 4152 parents = [repo[node]]
4152 4153 if rej:
4153 4154 ui.write_err(_("patch applied partially\n"))
4154 4155 ui.write_err(_("(fix the .rej files and run "
4155 4156 "`hg commit --amend`)\n"))
4156 4157 ret = 1
4157 4158 break
4158 4159
4159 4160 if not haspatch:
4160 4161 raise util.Abort(_('%s: no diffs found') % patchurl)
4161 4162
4162 4163 if tr:
4163 4164 tr.close()
4164 4165 if msgs:
4165 4166 repo.savecommitmessage('\n* * *\n'.join(msgs))
4166 4167 repo.dirstate.endparentchange()
4167 4168 return ret
4168 4169 except: # re-raises
4169 4170 # wlock.release() indirectly calls dirstate.write(): since
4170 4171 # we're crashing, we do not want to change the working dir
4171 4172 # parent after all, so make sure it writes nothing
4172 4173 repo.dirstate.invalidate()
4173 4174 raise
4174 4175 finally:
4175 4176 if tr:
4176 4177 tr.release()
4177 4178 release(lock, wlock)
4178 4179
4179 4180 @command('incoming|in',
4180 4181 [('f', 'force', None,
4181 4182 _('run even if remote repository is unrelated')),
4182 4183 ('n', 'newest-first', None, _('show newest record first')),
4183 4184 ('', 'bundle', '',
4184 4185 _('file to store the bundles into'), _('FILE')),
4185 4186 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4186 4187 ('B', 'bookmarks', False, _("compare bookmarks")),
4187 4188 ('b', 'branch', [],
4188 4189 _('a specific branch you would like to pull'), _('BRANCH')),
4189 4190 ] + logopts + remoteopts + subrepoopts,
4190 4191 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
4191 4192 def incoming(ui, repo, source="default", **opts):
4192 4193 """show new changesets found in source
4193 4194
4194 4195 Show new changesets found in the specified path/URL or the default
4195 4196 pull location. These are the changesets that would have been pulled
4196 4197 if a pull at the time you issued this command.
4197 4198
4198 4199 For remote repository, using --bundle avoids downloading the
4199 4200 changesets twice if the incoming is followed by a pull.
4200 4201
4201 4202 See pull for valid source format details.
4202 4203
4203 4204 .. container:: verbose
4204 4205
4205 4206 Examples:
4206 4207
4207 4208 - show incoming changes with patches and full description::
4208 4209
4209 4210 hg incoming -vp
4210 4211
4211 4212 - show incoming changes excluding merges, store a bundle::
4212 4213
4213 4214 hg in -vpM --bundle incoming.hg
4214 4215 hg pull incoming.hg
4215 4216
4216 4217 - briefly list changes inside a bundle::
4217 4218
4218 4219 hg in changes.hg -T "{desc|firstline}\\n"
4219 4220
4220 4221 Returns 0 if there are incoming changes, 1 otherwise.
4221 4222 """
4222 4223 if opts.get('graph'):
4223 4224 cmdutil.checkunsupportedgraphflags([], opts)
4224 4225 def display(other, chlist, displayer):
4225 4226 revdag = cmdutil.graphrevs(other, chlist, opts)
4226 4227 showparents = [ctx.node() for ctx in repo[None].parents()]
4227 4228 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4228 4229 graphmod.asciiedges)
4229 4230
4230 4231 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
4231 4232 return 0
4232 4233
4233 4234 if opts.get('bundle') and opts.get('subrepos'):
4234 4235 raise util.Abort(_('cannot combine --bundle and --subrepos'))
4235 4236
4236 4237 if opts.get('bookmarks'):
4237 4238 source, branches = hg.parseurl(ui.expandpath(source),
4238 4239 opts.get('branch'))
4239 4240 other = hg.peer(repo, opts, source)
4240 4241 if 'bookmarks' not in other.listkeys('namespaces'):
4241 4242 ui.warn(_("remote doesn't support bookmarks\n"))
4242 4243 return 0
4243 4244 ui.status(_('comparing with %s\n') % util.hidepassword(source))
4244 4245 return bookmarks.diff(ui, repo, other)
4245 4246
4246 4247 repo._subtoppath = ui.expandpath(source)
4247 4248 try:
4248 4249 return hg.incoming(ui, repo, source, opts)
4249 4250 finally:
4250 4251 del repo._subtoppath
4251 4252
4252 4253
4253 4254 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
4254 4255 norepo=True)
4255 4256 def init(ui, dest=".", **opts):
4256 4257 """create a new repository in the given directory
4257 4258
4258 4259 Initialize a new repository in the given directory. If the given
4259 4260 directory does not exist, it will be created.
4260 4261
4261 4262 If no directory is given, the current directory is used.
4262 4263
4263 4264 It is possible to specify an ``ssh://`` URL as the destination.
4264 4265 See :hg:`help urls` for more information.
4265 4266
4266 4267 Returns 0 on success.
4267 4268 """
4268 4269 hg.peer(ui, opts, ui.expandpath(dest), create=True)
4269 4270
4270 4271 @command('locate',
4271 4272 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
4272 4273 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4273 4274 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
4274 4275 ] + walkopts,
4275 4276 _('[OPTION]... [PATTERN]...'))
4276 4277 def locate(ui, repo, *pats, **opts):
4277 4278 """locate files matching specific patterns (DEPRECATED)
4278 4279
4279 4280 Print files under Mercurial control in the working directory whose
4280 4281 names match the given patterns.
4281 4282
4282 4283 By default, this command searches all directories in the working
4283 4284 directory. To search just the current directory and its
4284 4285 subdirectories, use "--include .".
4285 4286
4286 4287 If no patterns are given to match, this command prints the names
4287 4288 of all files under Mercurial control in the working directory.
4288 4289
4289 4290 If you want to feed the output of this command into the "xargs"
4290 4291 command, use the -0 option to both this command and "xargs". This
4291 4292 will avoid the problem of "xargs" treating single filenames that
4292 4293 contain whitespace as multiple filenames.
4293 4294
4294 4295 See :hg:`help files` for a more versatile command.
4295 4296
4296 4297 Returns 0 if a match is found, 1 otherwise.
4297 4298 """
4298 4299 end = opts.get('print0') and '\0' or '\n'
4299 4300 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
4300 4301
4301 4302 ret = 1
4302 4303 ctx = repo[rev]
4303 4304 m = scmutil.match(ctx, pats, opts, default='relglob')
4304 4305 m.bad = lambda x, y: False
4305 4306
4306 4307 for abs in ctx.matches(m):
4307 4308 if opts.get('fullpath'):
4308 4309 ui.write(repo.wjoin(abs), end)
4309 4310 else:
4310 4311 ui.write(((pats and m.rel(abs)) or abs), end)
4311 4312 ret = 0
4312 4313
4313 4314 return ret
4314 4315
4315 4316 @command('^log|history',
4316 4317 [('f', 'follow', None,
4317 4318 _('follow changeset history, or file history across copies and renames')),
4318 4319 ('', 'follow-first', None,
4319 4320 _('only follow the first parent of merge changesets (DEPRECATED)')),
4320 4321 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
4321 4322 ('C', 'copies', None, _('show copied files')),
4322 4323 ('k', 'keyword', [],
4323 4324 _('do case-insensitive search for a given text'), _('TEXT')),
4324 4325 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
4325 4326 ('', 'removed', None, _('include revisions where files were removed')),
4326 4327 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
4327 4328 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4328 4329 ('', 'only-branch', [],
4329 4330 _('show only changesets within the given named branch (DEPRECATED)'),
4330 4331 _('BRANCH')),
4331 4332 ('b', 'branch', [],
4332 4333 _('show changesets within the given named branch'), _('BRANCH')),
4333 4334 ('P', 'prune', [],
4334 4335 _('do not display revision or any of its ancestors'), _('REV')),
4335 4336 ] + logopts + walkopts,
4336 4337 _('[OPTION]... [FILE]'),
4337 4338 inferrepo=True)
4338 4339 def log(ui, repo, *pats, **opts):
4339 4340 """show revision history of entire repository or files
4340 4341
4341 4342 Print the revision history of the specified files or the entire
4342 4343 project.
4343 4344
4344 4345 If no revision range is specified, the default is ``tip:0`` unless
4345 4346 --follow is set, in which case the working directory parent is
4346 4347 used as the starting revision.
4347 4348
4348 4349 File history is shown without following rename or copy history of
4349 4350 files. Use -f/--follow with a filename to follow history across
4350 4351 renames and copies. --follow without a filename will only show
4351 4352 ancestors or descendants of the starting revision.
4352 4353
4353 4354 By default this command prints revision number and changeset id,
4354 4355 tags, non-trivial parents, user, date and time, and a summary for
4355 4356 each commit. When the -v/--verbose switch is used, the list of
4356 4357 changed files and full commit message are shown.
4357 4358
4358 4359 With --graph the revisions are shown as an ASCII art DAG with the most
4359 4360 recent changeset at the top.
4360 4361 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
4361 4362 and '+' represents a fork where the changeset from the lines below is a
4362 4363 parent of the 'o' merge on the same line.
4363 4364
4364 4365 .. note::
4365 4366
4366 4367 log -p/--patch may generate unexpected diff output for merge
4367 4368 changesets, as it will only compare the merge changeset against
4368 4369 its first parent. Also, only files different from BOTH parents
4369 4370 will appear in files:.
4370 4371
4371 4372 .. note::
4372 4373
4373 4374 for performance reasons, log FILE may omit duplicate changes
4374 4375 made on branches and will not show removals or mode changes. To
4375 4376 see all such changes, use the --removed switch.
4376 4377
4377 4378 .. container:: verbose
4378 4379
4379 4380 Some examples:
4380 4381
4381 4382 - changesets with full descriptions and file lists::
4382 4383
4383 4384 hg log -v
4384 4385
4385 4386 - changesets ancestral to the working directory::
4386 4387
4387 4388 hg log -f
4388 4389
4389 4390 - last 10 commits on the current branch::
4390 4391
4391 4392 hg log -l 10 -b .
4392 4393
4393 4394 - changesets showing all modifications of a file, including removals::
4394 4395
4395 4396 hg log --removed file.c
4396 4397
4397 4398 - all changesets that touch a directory, with diffs, excluding merges::
4398 4399
4399 4400 hg log -Mp lib/
4400 4401
4401 4402 - all revision numbers that match a keyword::
4402 4403
4403 4404 hg log -k bug --template "{rev}\\n"
4404 4405
4405 4406 - list available log templates::
4406 4407
4407 4408 hg log -T list
4408 4409
4409 4410 - check if a given changeset is included in a tagged release::
4410 4411
4411 4412 hg log -r "a21ccf and ancestor(1.9)"
4412 4413
4413 4414 - find all changesets by some user in a date range::
4414 4415
4415 4416 hg log -k alice -d "may 2008 to jul 2008"
4416 4417
4417 4418 - summary of all changesets after the last tag::
4418 4419
4419 4420 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4420 4421
4421 4422 See :hg:`help dates` for a list of formats valid for -d/--date.
4422 4423
4423 4424 See :hg:`help revisions` and :hg:`help revsets` for more about
4424 4425 specifying revisions.
4425 4426
4426 4427 See :hg:`help templates` for more about pre-packaged styles and
4427 4428 specifying custom templates.
4428 4429
4429 4430 Returns 0 on success.
4430 4431
4431 4432 """
4432 4433 if opts.get('graph'):
4433 4434 return cmdutil.graphlog(ui, repo, *pats, **opts)
4434 4435
4435 4436 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
4436 4437 limit = cmdutil.loglimit(opts)
4437 4438 count = 0
4438 4439
4439 4440 getrenamed = None
4440 4441 if opts.get('copies'):
4441 4442 endrev = None
4442 4443 if opts.get('rev'):
4443 4444 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
4444 4445 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4445 4446
4446 4447 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4447 4448 for rev in revs:
4448 4449 if count == limit:
4449 4450 break
4450 4451 ctx = repo[rev]
4451 4452 copies = None
4452 4453 if getrenamed is not None and rev:
4453 4454 copies = []
4454 4455 for fn in ctx.files():
4455 4456 rename = getrenamed(fn, rev)
4456 4457 if rename:
4457 4458 copies.append((fn, rename[0]))
4458 4459 revmatchfn = filematcher and filematcher(ctx.rev()) or None
4459 4460 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4460 4461 if displayer.flush(rev):
4461 4462 count += 1
4462 4463
4463 4464 displayer.close()
4464 4465
4465 4466 @command('manifest',
4466 4467 [('r', 'rev', '', _('revision to display'), _('REV')),
4467 4468 ('', 'all', False, _("list files from all revisions"))]
4468 4469 + formatteropts,
4469 4470 _('[-r REV]'))
4470 4471 def manifest(ui, repo, node=None, rev=None, **opts):
4471 4472 """output the current or given revision of the project manifest
4472 4473
4473 4474 Print a list of version controlled files for the given revision.
4474 4475 If no revision is given, the first parent of the working directory
4475 4476 is used, or the null revision if no revision is checked out.
4476 4477
4477 4478 With -v, print file permissions, symlink and executable bits.
4478 4479 With --debug, print file revision hashes.
4479 4480
4480 4481 If option --all is specified, the list of all files from all revisions
4481 4482 is printed. This includes deleted and renamed files.
4482 4483
4483 4484 Returns 0 on success.
4484 4485 """
4485 4486
4486 4487 fm = ui.formatter('manifest', opts)
4487 4488
4488 4489 if opts.get('all'):
4489 4490 if rev or node:
4490 4491 raise util.Abort(_("can't specify a revision with --all"))
4491 4492
4492 4493 res = []
4493 4494 prefix = "data/"
4494 4495 suffix = ".i"
4495 4496 plen = len(prefix)
4496 4497 slen = len(suffix)
4497 4498 lock = repo.lock()
4498 4499 try:
4499 4500 for fn, b, size in repo.store.datafiles():
4500 4501 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4501 4502 res.append(fn[plen:-slen])
4502 4503 finally:
4503 4504 lock.release()
4504 4505 for f in res:
4505 4506 fm.startitem()
4506 4507 fm.write("path", '%s\n', f)
4507 4508 fm.end()
4508 4509 return
4509 4510
4510 4511 if rev and node:
4511 4512 raise util.Abort(_("please specify just one revision"))
4512 4513
4513 4514 if not node:
4514 4515 node = rev
4515 4516
4516 4517 char = {'l': '@', 'x': '*', '': ''}
4517 4518 mode = {'l': '644', 'x': '755', '': '644'}
4518 4519 ctx = scmutil.revsingle(repo, node)
4519 4520 mf = ctx.manifest()
4520 4521 for f in ctx:
4521 4522 fm.startitem()
4522 4523 fl = ctx[f].flags()
4523 4524 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4524 4525 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4525 4526 fm.write('path', '%s\n', f)
4526 4527 fm.end()
4527 4528
4528 4529 @command('^merge',
4529 4530 [('f', 'force', None,
4530 4531 _('force a merge including outstanding changes (DEPRECATED)')),
4531 4532 ('r', 'rev', '', _('revision to merge'), _('REV')),
4532 4533 ('P', 'preview', None,
4533 4534 _('review revisions to merge (no merge is performed)'))
4534 4535 ] + mergetoolopts,
4535 4536 _('[-P] [-f] [[-r] REV]'))
4536 4537 def merge(ui, repo, node=None, **opts):
4537 4538 """merge another revision into working directory
4538 4539
4539 4540 The current working directory is updated with all changes made in
4540 4541 the requested revision since the last common predecessor revision.
4541 4542
4542 4543 Files that changed between either parent are marked as changed for
4543 4544 the next commit and a commit must be performed before any further
4544 4545 updates to the repository are allowed. The next commit will have
4545 4546 two parents.
4546 4547
4547 4548 ``--tool`` can be used to specify the merge tool used for file
4548 4549 merges. It overrides the HGMERGE environment variable and your
4549 4550 configuration files. See :hg:`help merge-tools` for options.
4550 4551
4551 4552 If no revision is specified, the working directory's parent is a
4552 4553 head revision, and the current branch contains exactly one other
4553 4554 head, the other head is merged with by default. Otherwise, an
4554 4555 explicit revision with which to merge with must be provided.
4555 4556
4556 4557 :hg:`resolve` must be used to resolve unresolved files.
4557 4558
4558 4559 To undo an uncommitted merge, use :hg:`update --clean .` which
4559 4560 will check out a clean copy of the original merge parent, losing
4560 4561 all changes.
4561 4562
4562 4563 Returns 0 on success, 1 if there are unresolved files.
4563 4564 """
4564 4565
4565 4566 if opts.get('rev') and node:
4566 4567 raise util.Abort(_("please specify just one revision"))
4567 4568 if not node:
4568 4569 node = opts.get('rev')
4569 4570
4570 4571 if node:
4571 4572 node = scmutil.revsingle(repo, node).node()
4572 4573
4573 4574 if not node and repo._bookmarkcurrent:
4574 4575 bmheads = repo.bookmarkheads(repo._bookmarkcurrent)
4575 4576 curhead = repo[repo._bookmarkcurrent].node()
4576 4577 if len(bmheads) == 2:
4577 4578 if curhead == bmheads[0]:
4578 4579 node = bmheads[1]
4579 4580 else:
4580 4581 node = bmheads[0]
4581 4582 elif len(bmheads) > 2:
4582 4583 raise util.Abort(_("multiple matching bookmarks to merge - "
4583 4584 "please merge with an explicit rev or bookmark"),
4584 4585 hint=_("run 'hg heads' to see all heads"))
4585 4586 elif len(bmheads) <= 1:
4586 4587 raise util.Abort(_("no matching bookmark to merge - "
4587 4588 "please merge with an explicit rev or bookmark"),
4588 4589 hint=_("run 'hg heads' to see all heads"))
4589 4590
4590 4591 if not node and not repo._bookmarkcurrent:
4591 4592 branch = repo[None].branch()
4592 4593 bheads = repo.branchheads(branch)
4593 4594 nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
4594 4595
4595 4596 if len(nbhs) > 2:
4596 4597 raise util.Abort(_("branch '%s' has %d heads - "
4597 4598 "please merge with an explicit rev")
4598 4599 % (branch, len(bheads)),
4599 4600 hint=_("run 'hg heads .' to see heads"))
4600 4601
4601 4602 parent = repo.dirstate.p1()
4602 4603 if len(nbhs) <= 1:
4603 4604 if len(bheads) > 1:
4604 4605 raise util.Abort(_("heads are bookmarked - "
4605 4606 "please merge with an explicit rev"),
4606 4607 hint=_("run 'hg heads' to see all heads"))
4607 4608 if len(repo.heads()) > 1:
4608 4609 raise util.Abort(_("branch '%s' has one head - "
4609 4610 "please merge with an explicit rev")
4610 4611 % branch,
4611 4612 hint=_("run 'hg heads' to see all heads"))
4612 4613 msg, hint = _('nothing to merge'), None
4613 4614 if parent != repo.lookup(branch):
4614 4615 hint = _("use 'hg update' instead")
4615 4616 raise util.Abort(msg, hint=hint)
4616 4617
4617 4618 if parent not in bheads:
4618 4619 raise util.Abort(_('working directory not at a head revision'),
4619 4620 hint=_("use 'hg update' or merge with an "
4620 4621 "explicit revision"))
4621 4622 if parent == nbhs[0]:
4622 4623 node = nbhs[-1]
4623 4624 else:
4624 4625 node = nbhs[0]
4625 4626
4626 4627 if opts.get('preview'):
4627 4628 # find nodes that are ancestors of p2 but not of p1
4628 4629 p1 = repo.lookup('.')
4629 4630 p2 = repo.lookup(node)
4630 4631 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4631 4632
4632 4633 displayer = cmdutil.show_changeset(ui, repo, opts)
4633 4634 for node in nodes:
4634 4635 displayer.show(repo[node])
4635 4636 displayer.close()
4636 4637 return 0
4637 4638
4638 4639 try:
4639 4640 # ui.forcemerge is an internal variable, do not document
4640 4641 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
4641 4642 return hg.merge(repo, node, force=opts.get('force'))
4642 4643 finally:
4643 4644 ui.setconfig('ui', 'forcemerge', '', 'merge')
4644 4645
4645 4646 @command('outgoing|out',
4646 4647 [('f', 'force', None, _('run even when the destination is unrelated')),
4647 4648 ('r', 'rev', [],
4648 4649 _('a changeset intended to be included in the destination'), _('REV')),
4649 4650 ('n', 'newest-first', None, _('show newest record first')),
4650 4651 ('B', 'bookmarks', False, _('compare bookmarks')),
4651 4652 ('b', 'branch', [], _('a specific branch you would like to push'),
4652 4653 _('BRANCH')),
4653 4654 ] + logopts + remoteopts + subrepoopts,
4654 4655 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4655 4656 def outgoing(ui, repo, dest=None, **opts):
4656 4657 """show changesets not found in the destination
4657 4658
4658 4659 Show changesets not found in the specified destination repository
4659 4660 or the default push location. These are the changesets that would
4660 4661 be pushed if a push was requested.
4661 4662
4662 4663 See pull for details of valid destination formats.
4663 4664
4664 4665 Returns 0 if there are outgoing changes, 1 otherwise.
4665 4666 """
4666 4667 if opts.get('graph'):
4667 4668 cmdutil.checkunsupportedgraphflags([], opts)
4668 4669 o, other = hg._outgoing(ui, repo, dest, opts)
4669 4670 if not o:
4670 4671 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4671 4672 return
4672 4673
4673 4674 revdag = cmdutil.graphrevs(repo, o, opts)
4674 4675 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4675 4676 showparents = [ctx.node() for ctx in repo[None].parents()]
4676 4677 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4677 4678 graphmod.asciiedges)
4678 4679 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4679 4680 return 0
4680 4681
4681 4682 if opts.get('bookmarks'):
4682 4683 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4683 4684 dest, branches = hg.parseurl(dest, opts.get('branch'))
4684 4685 other = hg.peer(repo, opts, dest)
4685 4686 if 'bookmarks' not in other.listkeys('namespaces'):
4686 4687 ui.warn(_("remote doesn't support bookmarks\n"))
4687 4688 return 0
4688 4689 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4689 4690 return bookmarks.diff(ui, other, repo)
4690 4691
4691 4692 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4692 4693 try:
4693 4694 return hg.outgoing(ui, repo, dest, opts)
4694 4695 finally:
4695 4696 del repo._subtoppath
4696 4697
4697 4698 @command('parents',
4698 4699 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4699 4700 ] + templateopts,
4700 4701 _('[-r REV] [FILE]'),
4701 4702 inferrepo=True)
4702 4703 def parents(ui, repo, file_=None, **opts):
4703 4704 """show the parents of the working directory or revision (DEPRECATED)
4704 4705
4705 4706 Print the working directory's parent revisions. If a revision is
4706 4707 given via -r/--rev, the parent of that revision will be printed.
4707 4708 If a file argument is given, the revision in which the file was
4708 4709 last changed (before the working directory revision or the
4709 4710 argument to --rev if given) is printed.
4710 4711
4711 4712 See :hg:`summary` and :hg:`help revsets` for related information.
4712 4713
4713 4714 Returns 0 on success.
4714 4715 """
4715 4716
4716 4717 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4717 4718
4718 4719 if file_:
4719 4720 m = scmutil.match(ctx, (file_,), opts)
4720 4721 if m.anypats() or len(m.files()) != 1:
4721 4722 raise util.Abort(_('can only specify an explicit filename'))
4722 4723 file_ = m.files()[0]
4723 4724 filenodes = []
4724 4725 for cp in ctx.parents():
4725 4726 if not cp:
4726 4727 continue
4727 4728 try:
4728 4729 filenodes.append(cp.filenode(file_))
4729 4730 except error.LookupError:
4730 4731 pass
4731 4732 if not filenodes:
4732 4733 raise util.Abort(_("'%s' not found in manifest!") % file_)
4733 4734 p = []
4734 4735 for fn in filenodes:
4735 4736 fctx = repo.filectx(file_, fileid=fn)
4736 4737 p.append(fctx.node())
4737 4738 else:
4738 4739 p = [cp.node() for cp in ctx.parents()]
4739 4740
4740 4741 displayer = cmdutil.show_changeset(ui, repo, opts)
4741 4742 for n in p:
4742 4743 if n != nullid:
4743 4744 displayer.show(repo[n])
4744 4745 displayer.close()
4745 4746
4746 4747 @command('paths', [], _('[NAME]'), optionalrepo=True)
4747 4748 def paths(ui, repo, search=None):
4748 4749 """show aliases for remote repositories
4749 4750
4750 4751 Show definition of symbolic path name NAME. If no name is given,
4751 4752 show definition of all available names.
4752 4753
4753 4754 Option -q/--quiet suppresses all output when searching for NAME
4754 4755 and shows only the path names when listing all definitions.
4755 4756
4756 4757 Path names are defined in the [paths] section of your
4757 4758 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4758 4759 repository, ``.hg/hgrc`` is used, too.
4759 4760
4760 4761 The path names ``default`` and ``default-push`` have a special
4761 4762 meaning. When performing a push or pull operation, they are used
4762 4763 as fallbacks if no location is specified on the command-line.
4763 4764 When ``default-push`` is set, it will be used for push and
4764 4765 ``default`` will be used for pull; otherwise ``default`` is used
4765 4766 as the fallback for both. When cloning a repository, the clone
4766 4767 source is written as ``default`` in ``.hg/hgrc``. Note that
4767 4768 ``default`` and ``default-push`` apply to all inbound (e.g.
4768 4769 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4769 4770 :hg:`bundle`) operations.
4770 4771
4771 4772 See :hg:`help urls` for more information.
4772 4773
4773 4774 Returns 0 on success.
4774 4775 """
4775 4776 if search:
4776 4777 for name, path in ui.configitems("paths"):
4777 4778 if name == search:
4778 4779 ui.status("%s\n" % util.hidepassword(path))
4779 4780 return
4780 4781 if not ui.quiet:
4781 4782 ui.warn(_("not found!\n"))
4782 4783 return 1
4783 4784 else:
4784 4785 for name, path in ui.configitems("paths"):
4785 4786 if ui.quiet:
4786 4787 ui.write("%s\n" % name)
4787 4788 else:
4788 4789 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4789 4790
4790 4791 @command('phase',
4791 4792 [('p', 'public', False, _('set changeset phase to public')),
4792 4793 ('d', 'draft', False, _('set changeset phase to draft')),
4793 4794 ('s', 'secret', False, _('set changeset phase to secret')),
4794 4795 ('f', 'force', False, _('allow to move boundary backward')),
4795 4796 ('r', 'rev', [], _('target revision'), _('REV')),
4796 4797 ],
4797 4798 _('[-p|-d|-s] [-f] [-r] REV...'))
4798 4799 def phase(ui, repo, *revs, **opts):
4799 4800 """set or show the current phase name
4800 4801
4801 4802 With no argument, show the phase name of specified revisions.
4802 4803
4803 4804 With one of -p/--public, -d/--draft or -s/--secret, change the
4804 4805 phase value of the specified revisions.
4805 4806
4806 4807 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4807 4808 lower phase to an higher phase. Phases are ordered as follows::
4808 4809
4809 4810 public < draft < secret
4810 4811
4811 4812 Returns 0 on success, 1 if no phases were changed or some could not
4812 4813 be changed.
4813 4814 """
4814 4815 # search for a unique phase argument
4815 4816 targetphase = None
4816 4817 for idx, name in enumerate(phases.phasenames):
4817 4818 if opts[name]:
4818 4819 if targetphase is not None:
4819 4820 raise util.Abort(_('only one phase can be specified'))
4820 4821 targetphase = idx
4821 4822
4822 4823 # look for specified revision
4823 4824 revs = list(revs)
4824 4825 revs.extend(opts['rev'])
4825 4826 if not revs:
4826 4827 raise util.Abort(_('no revisions specified'))
4827 4828
4828 4829 revs = scmutil.revrange(repo, revs)
4829 4830
4830 4831 lock = None
4831 4832 ret = 0
4832 4833 if targetphase is None:
4833 4834 # display
4834 4835 for r in revs:
4835 4836 ctx = repo[r]
4836 4837 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4837 4838 else:
4838 4839 tr = None
4839 4840 lock = repo.lock()
4840 4841 try:
4841 4842 tr = repo.transaction("phase")
4842 4843 # set phase
4843 4844 if not revs:
4844 4845 raise util.Abort(_('empty revision set'))
4845 4846 nodes = [repo[r].node() for r in revs]
4846 4847 # moving revision from public to draft may hide them
4847 4848 # We have to check result on an unfiltered repository
4848 4849 unfi = repo.unfiltered()
4849 4850 getphase = unfi._phasecache.phase
4850 4851 olddata = [getphase(unfi, r) for r in unfi]
4851 4852 phases.advanceboundary(repo, tr, targetphase, nodes)
4852 4853 if opts['force']:
4853 4854 phases.retractboundary(repo, tr, targetphase, nodes)
4854 4855 tr.close()
4855 4856 finally:
4856 4857 if tr is not None:
4857 4858 tr.release()
4858 4859 lock.release()
4859 4860 getphase = unfi._phasecache.phase
4860 4861 newdata = [getphase(unfi, r) for r in unfi]
4861 4862 changes = sum(newdata[r] != olddata[r] for r in unfi)
4862 4863 cl = unfi.changelog
4863 4864 rejected = [n for n in nodes
4864 4865 if newdata[cl.rev(n)] < targetphase]
4865 4866 if rejected:
4866 4867 ui.warn(_('cannot move %i changesets to a higher '
4867 4868 'phase, use --force\n') % len(rejected))
4868 4869 ret = 1
4869 4870 if changes:
4870 4871 msg = _('phase changed for %i changesets\n') % changes
4871 4872 if ret:
4872 4873 ui.status(msg)
4873 4874 else:
4874 4875 ui.note(msg)
4875 4876 else:
4876 4877 ui.warn(_('no phases changed\n'))
4877 4878 ret = 1
4878 4879 return ret
4879 4880
4880 4881 def postincoming(ui, repo, modheads, optupdate, checkout):
4881 4882 if modheads == 0:
4882 4883 return
4883 4884 if optupdate:
4884 4885 checkout, movemarkfrom = bookmarks.calculateupdate(ui, repo, checkout)
4885 4886 try:
4886 4887 ret = hg.update(repo, checkout)
4887 4888 except util.Abort, inst:
4888 4889 ui.warn(_("not updating: %s\n") % str(inst))
4889 4890 if inst.hint:
4890 4891 ui.warn(_("(%s)\n") % inst.hint)
4891 4892 return 0
4892 4893 if not ret and not checkout:
4893 4894 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
4894 4895 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
4895 4896 return ret
4896 4897 if modheads > 1:
4897 4898 currentbranchheads = len(repo.branchheads())
4898 4899 if currentbranchheads == modheads:
4899 4900 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4900 4901 elif currentbranchheads > 1:
4901 4902 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
4902 4903 "merge)\n"))
4903 4904 else:
4904 4905 ui.status(_("(run 'hg heads' to see heads)\n"))
4905 4906 else:
4906 4907 ui.status(_("(run 'hg update' to get a working copy)\n"))
4907 4908
4908 4909 @command('^pull',
4909 4910 [('u', 'update', None,
4910 4911 _('update to new branch head if changesets were pulled')),
4911 4912 ('f', 'force', None, _('run even when remote repository is unrelated')),
4912 4913 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4913 4914 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4914 4915 ('b', 'branch', [], _('a specific branch you would like to pull'),
4915 4916 _('BRANCH')),
4916 4917 ] + remoteopts,
4917 4918 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4918 4919 def pull(ui, repo, source="default", **opts):
4919 4920 """pull changes from the specified source
4920 4921
4921 4922 Pull changes from a remote repository to a local one.
4922 4923
4923 4924 This finds all changes from the repository at the specified path
4924 4925 or URL and adds them to a local repository (the current one unless
4925 4926 -R is specified). By default, this does not update the copy of the
4926 4927 project in the working directory.
4927 4928
4928 4929 Use :hg:`incoming` if you want to see what would have been added
4929 4930 by a pull at the time you issued this command. If you then decide
4930 4931 to add those changes to the repository, you should use :hg:`pull
4931 4932 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4932 4933
4933 4934 If SOURCE is omitted, the 'default' path will be used.
4934 4935 See :hg:`help urls` for more information.
4935 4936
4936 4937 Returns 0 on success, 1 if an update had unresolved files.
4937 4938 """
4938 4939 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4939 4940 other = hg.peer(repo, opts, source)
4940 4941 try:
4941 4942 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4942 4943 revs, checkout = hg.addbranchrevs(repo, other, branches,
4943 4944 opts.get('rev'))
4944 4945
4945 4946 remotebookmarks = other.listkeys('bookmarks')
4946 4947
4947 4948 if opts.get('bookmark'):
4948 4949 if not revs:
4949 4950 revs = []
4950 4951 for b in opts['bookmark']:
4951 4952 if b not in remotebookmarks:
4952 4953 raise util.Abort(_('remote bookmark %s not found!') % b)
4953 4954 revs.append(remotebookmarks[b])
4954 4955
4955 4956 if revs:
4956 4957 try:
4957 4958 revs = [other.lookup(rev) for rev in revs]
4958 4959 except error.CapabilityError:
4959 4960 err = _("other repository doesn't support revision lookup, "
4960 4961 "so a rev cannot be specified.")
4961 4962 raise util.Abort(err)
4962 4963
4963 4964 modheads = exchange.pull(repo, other, heads=revs,
4964 4965 force=opts.get('force'),
4965 4966 bookmarks=opts.get('bookmark', ())).cgresult
4966 4967 if checkout:
4967 4968 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4968 4969 repo._subtoppath = source
4969 4970 try:
4970 4971 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4971 4972
4972 4973 finally:
4973 4974 del repo._subtoppath
4974 4975
4975 4976 finally:
4976 4977 other.close()
4977 4978 return ret
4978 4979
4979 4980 @command('^push',
4980 4981 [('f', 'force', None, _('force push')),
4981 4982 ('r', 'rev', [],
4982 4983 _('a changeset intended to be included in the destination'),
4983 4984 _('REV')),
4984 4985 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4985 4986 ('b', 'branch', [],
4986 4987 _('a specific branch you would like to push'), _('BRANCH')),
4987 4988 ('', 'new-branch', False, _('allow pushing a new branch')),
4988 4989 ] + remoteopts,
4989 4990 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4990 4991 def push(ui, repo, dest=None, **opts):
4991 4992 """push changes to the specified destination
4992 4993
4993 4994 Push changesets from the local repository to the specified
4994 4995 destination.
4995 4996
4996 4997 This operation is symmetrical to pull: it is identical to a pull
4997 4998 in the destination repository from the current one.
4998 4999
4999 5000 By default, push will not allow creation of new heads at the
5000 5001 destination, since multiple heads would make it unclear which head
5001 5002 to use. In this situation, it is recommended to pull and merge
5002 5003 before pushing.
5003 5004
5004 5005 Use --new-branch if you want to allow push to create a new named
5005 5006 branch that is not present at the destination. This allows you to
5006 5007 only create a new branch without forcing other changes.
5007 5008
5008 5009 .. note::
5009 5010
5010 5011 Extra care should be taken with the -f/--force option,
5011 5012 which will push all new heads on all branches, an action which will
5012 5013 almost always cause confusion for collaborators.
5013 5014
5014 5015 If -r/--rev is used, the specified revision and all its ancestors
5015 5016 will be pushed to the remote repository.
5016 5017
5017 5018 If -B/--bookmark is used, the specified bookmarked revision, its
5018 5019 ancestors, and the bookmark will be pushed to the remote
5019 5020 repository.
5020 5021
5021 5022 Please see :hg:`help urls` for important details about ``ssh://``
5022 5023 URLs. If DESTINATION is omitted, a default path will be used.
5023 5024
5024 5025 Returns 0 if push was successful, 1 if nothing to push.
5025 5026 """
5026 5027
5027 5028 if opts.get('bookmark'):
5028 5029 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
5029 5030 for b in opts['bookmark']:
5030 5031 # translate -B options to -r so changesets get pushed
5031 5032 if b in repo._bookmarks:
5032 5033 opts.setdefault('rev', []).append(b)
5033 5034 else:
5034 5035 # if we try to push a deleted bookmark, translate it to null
5035 5036 # this lets simultaneous -r, -b options continue working
5036 5037 opts.setdefault('rev', []).append("null")
5037 5038
5038 5039 dest = ui.expandpath(dest or 'default-push', dest or 'default')
5039 5040 dest, branches = hg.parseurl(dest, opts.get('branch'))
5040 5041 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
5041 5042 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
5042 5043 try:
5043 5044 other = hg.peer(repo, opts, dest)
5044 5045 except error.RepoError:
5045 5046 if dest == "default-push":
5046 5047 raise util.Abort(_("default repository not configured!"),
5047 5048 hint=_('see the "path" section in "hg help config"'))
5048 5049 else:
5049 5050 raise
5050 5051
5051 5052 if revs:
5052 5053 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
5053 5054
5054 5055 repo._subtoppath = dest
5055 5056 try:
5056 5057 # push subrepos depth-first for coherent ordering
5057 5058 c = repo['']
5058 5059 subs = c.substate # only repos that are committed
5059 5060 for s in sorted(subs):
5060 5061 result = c.sub(s).push(opts)
5061 5062 if result == 0:
5062 5063 return not result
5063 5064 finally:
5064 5065 del repo._subtoppath
5065 5066 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
5066 5067 newbranch=opts.get('new_branch'),
5067 5068 bookmarks=opts.get('bookmark', ()))
5068 5069
5069 5070 result = not pushop.cgresult
5070 5071
5071 5072 if pushop.bkresult is not None:
5072 5073 if pushop.bkresult == 2:
5073 5074 result = 2
5074 5075 elif not result and pushop.bkresult:
5075 5076 result = 2
5076 5077
5077 5078 return result
5078 5079
5079 5080 @command('recover', [])
5080 5081 def recover(ui, repo):
5081 5082 """roll back an interrupted transaction
5082 5083
5083 5084 Recover from an interrupted commit or pull.
5084 5085
5085 5086 This command tries to fix the repository status after an
5086 5087 interrupted operation. It should only be necessary when Mercurial
5087 5088 suggests it.
5088 5089
5089 5090 Returns 0 if successful, 1 if nothing to recover or verify fails.
5090 5091 """
5091 5092 if repo.recover():
5092 5093 return hg.verify(repo)
5093 5094 return 1
5094 5095
5095 5096 @command('^remove|rm',
5096 5097 [('A', 'after', None, _('record delete for missing files')),
5097 5098 ('f', 'force', None,
5098 5099 _('remove (and delete) file even if added or modified')),
5099 5100 ] + subrepoopts + walkopts,
5100 5101 _('[OPTION]... FILE...'),
5101 5102 inferrepo=True)
5102 5103 def remove(ui, repo, *pats, **opts):
5103 5104 """remove the specified files on the next commit
5104 5105
5105 5106 Schedule the indicated files for removal from the current branch.
5106 5107
5107 5108 This command schedules the files to be removed at the next commit.
5108 5109 To undo a remove before that, see :hg:`revert`. To undo added
5109 5110 files, see :hg:`forget`.
5110 5111
5111 5112 .. container:: verbose
5112 5113
5113 5114 -A/--after can be used to remove only files that have already
5114 5115 been deleted, -f/--force can be used to force deletion, and -Af
5115 5116 can be used to remove files from the next revision without
5116 5117 deleting them from the working directory.
5117 5118
5118 5119 The following table details the behavior of remove for different
5119 5120 file states (columns) and option combinations (rows). The file
5120 5121 states are Added [A], Clean [C], Modified [M] and Missing [!]
5121 5122 (as reported by :hg:`status`). The actions are Warn, Remove
5122 5123 (from branch) and Delete (from disk):
5123 5124
5124 5125 ========= == == == ==
5125 5126 opt/state A C M !
5126 5127 ========= == == == ==
5127 5128 none W RD W R
5128 5129 -f R RD RD R
5129 5130 -A W W W R
5130 5131 -Af R R R R
5131 5132 ========= == == == ==
5132 5133
5133 5134 Note that remove never deletes files in Added [A] state from the
5134 5135 working directory, not even if option --force is specified.
5135 5136
5136 5137 Returns 0 on success, 1 if any warnings encountered.
5137 5138 """
5138 5139
5139 5140 after, force = opts.get('after'), opts.get('force')
5140 5141 if not pats and not after:
5141 5142 raise util.Abort(_('no files specified'))
5142 5143
5143 5144 m = scmutil.match(repo[None], pats, opts)
5144 5145 subrepos = opts.get('subrepos')
5145 5146 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
5146 5147
5147 5148 @command('rename|move|mv',
5148 5149 [('A', 'after', None, _('record a rename that has already occurred')),
5149 5150 ('f', 'force', None, _('forcibly copy over an existing managed file')),
5150 5151 ] + walkopts + dryrunopts,
5151 5152 _('[OPTION]... SOURCE... DEST'))
5152 5153 def rename(ui, repo, *pats, **opts):
5153 5154 """rename files; equivalent of copy + remove
5154 5155
5155 5156 Mark dest as copies of sources; mark sources for deletion. If dest
5156 5157 is a directory, copies are put in that directory. If dest is a
5157 5158 file, there can only be one source.
5158 5159
5159 5160 By default, this command copies the contents of files as they
5160 5161 exist in the working directory. If invoked with -A/--after, the
5161 5162 operation is recorded, but no copying is performed.
5162 5163
5163 5164 This command takes effect at the next commit. To undo a rename
5164 5165 before that, see :hg:`revert`.
5165 5166
5166 5167 Returns 0 on success, 1 if errors are encountered.
5167 5168 """
5168 5169 wlock = repo.wlock(False)
5169 5170 try:
5170 5171 return cmdutil.copy(ui, repo, pats, opts, rename=True)
5171 5172 finally:
5172 5173 wlock.release()
5173 5174
5174 5175 @command('resolve',
5175 5176 [('a', 'all', None, _('select all unresolved files')),
5176 5177 ('l', 'list', None, _('list state of files needing merge')),
5177 5178 ('m', 'mark', None, _('mark files as resolved')),
5178 5179 ('u', 'unmark', None, _('mark files as unresolved')),
5179 5180 ('n', 'no-status', None, _('hide status prefix'))]
5180 5181 + mergetoolopts + walkopts,
5181 5182 _('[OPTION]... [FILE]...'),
5182 5183 inferrepo=True)
5183 5184 def resolve(ui, repo, *pats, **opts):
5184 5185 """redo merges or set/view the merge status of files
5185 5186
5186 5187 Merges with unresolved conflicts are often the result of
5187 5188 non-interactive merging using the ``internal:merge`` configuration
5188 5189 setting, or a command-line merge tool like ``diff3``. The resolve
5189 5190 command is used to manage the files involved in a merge, after
5190 5191 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
5191 5192 working directory must have two parents). See :hg:`help
5192 5193 merge-tools` for information on configuring merge tools.
5193 5194
5194 5195 The resolve command can be used in the following ways:
5195 5196
5196 5197 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
5197 5198 files, discarding any previous merge attempts. Re-merging is not
5198 5199 performed for files already marked as resolved. Use ``--all/-a``
5199 5200 to select all unresolved files. ``--tool`` can be used to specify
5200 5201 the merge tool used for the given files. It overrides the HGMERGE
5201 5202 environment variable and your configuration files. Previous file
5202 5203 contents are saved with a ``.orig`` suffix.
5203 5204
5204 5205 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
5205 5206 (e.g. after having manually fixed-up the files). The default is
5206 5207 to mark all unresolved files.
5207 5208
5208 5209 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
5209 5210 default is to mark all resolved files.
5210 5211
5211 5212 - :hg:`resolve -l`: list files which had or still have conflicts.
5212 5213 In the printed list, ``U`` = unresolved and ``R`` = resolved.
5213 5214
5214 5215 Note that Mercurial will not let you commit files with unresolved
5215 5216 merge conflicts. You must use :hg:`resolve -m ...` before you can
5216 5217 commit after a conflicting merge.
5217 5218
5218 5219 Returns 0 on success, 1 if any files fail a resolve attempt.
5219 5220 """
5220 5221
5221 5222 all, mark, unmark, show, nostatus = \
5222 5223 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
5223 5224
5224 5225 if (show and (mark or unmark)) or (mark and unmark):
5225 5226 raise util.Abort(_("too many options specified"))
5226 5227 if pats and all:
5227 5228 raise util.Abort(_("can't specify --all and patterns"))
5228 5229 if not (all or pats or show or mark or unmark):
5229 5230 raise util.Abort(_('no files or directories specified'),
5230 5231 hint=('use --all to remerge all files'))
5231 5232
5232 5233 wlock = repo.wlock()
5233 5234 try:
5234 5235 ms = mergemod.mergestate(repo)
5235 5236
5236 5237 if not (ms.active() or repo.dirstate.p2() != nullid) and not show:
5237 5238 raise util.Abort(
5238 5239 _('resolve command not applicable when not merging'))
5239 5240
5240 5241 m = scmutil.match(repo[None], pats, opts)
5241 5242 ret = 0
5242 5243 didwork = False
5243 5244
5244 5245 for f in ms:
5245 5246 if not m(f):
5246 5247 continue
5247 5248
5248 5249 didwork = True
5249 5250
5250 5251 if show:
5251 5252 if nostatus:
5252 5253 ui.write("%s\n" % f)
5253 5254 else:
5254 5255 ui.write("%s %s\n" % (ms[f].upper(), f),
5255 5256 label='resolve.' +
5256 5257 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
5257 5258 elif mark:
5258 5259 ms.mark(f, "r")
5259 5260 elif unmark:
5260 5261 ms.mark(f, "u")
5261 5262 else:
5262 5263 wctx = repo[None]
5263 5264
5264 5265 # backup pre-resolve (merge uses .orig for its own purposes)
5265 5266 a = repo.wjoin(f)
5266 5267 util.copyfile(a, a + ".resolve")
5267 5268
5268 5269 try:
5269 5270 # resolve file
5270 5271 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5271 5272 'resolve')
5272 5273 if ms.resolve(f, wctx):
5273 5274 ret = 1
5274 5275 finally:
5275 5276 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5276 5277 ms.commit()
5277 5278
5278 5279 # replace filemerge's .orig file with our resolve file
5279 5280 util.rename(a + ".resolve", a + ".orig")
5280 5281
5281 5282 ms.commit()
5282 5283
5283 5284 if not didwork and pats:
5284 5285 ui.warn(_("arguments do not match paths that need resolving\n"))
5285 5286
5286 5287 finally:
5287 5288 wlock.release()
5288 5289
5289 5290 # Nudge users into finishing an unfinished operation. We don't print
5290 5291 # this with the list/show operation because we want list/show to remain
5291 5292 # machine readable.
5292 5293 if not list(ms.unresolved()) and not show:
5293 5294 ui.status(_('(no more unresolved files)\n'))
5294 5295
5295 5296 return ret
5296 5297
5297 5298 @command('revert',
5298 5299 [('a', 'all', None, _('revert all changes when no arguments given')),
5299 5300 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5300 5301 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
5301 5302 ('C', 'no-backup', None, _('do not save backup copies of files')),
5302 5303 ] + walkopts + dryrunopts,
5303 5304 _('[OPTION]... [-r REV] [NAME]...'))
5304 5305 def revert(ui, repo, *pats, **opts):
5305 5306 """restore files to their checkout state
5306 5307
5307 5308 .. note::
5308 5309
5309 5310 To check out earlier revisions, you should use :hg:`update REV`.
5310 5311 To cancel an uncommitted merge (and lose your changes),
5311 5312 use :hg:`update --clean .`.
5312 5313
5313 5314 With no revision specified, revert the specified files or directories
5314 5315 to the contents they had in the parent of the working directory.
5315 5316 This restores the contents of files to an unmodified
5316 5317 state and unschedules adds, removes, copies, and renames. If the
5317 5318 working directory has two parents, you must explicitly specify a
5318 5319 revision.
5319 5320
5320 5321 Using the -r/--rev or -d/--date options, revert the given files or
5321 5322 directories to their states as of a specific revision. Because
5322 5323 revert does not change the working directory parents, this will
5323 5324 cause these files to appear modified. This can be helpful to "back
5324 5325 out" some or all of an earlier change. See :hg:`backout` for a
5325 5326 related method.
5326 5327
5327 5328 Modified files are saved with a .orig suffix before reverting.
5328 5329 To disable these backups, use --no-backup.
5329 5330
5330 5331 See :hg:`help dates` for a list of formats valid for -d/--date.
5331 5332
5332 5333 Returns 0 on success.
5333 5334 """
5334 5335
5335 5336 if opts.get("date"):
5336 5337 if opts.get("rev"):
5337 5338 raise util.Abort(_("you can't specify a revision and a date"))
5338 5339 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
5339 5340
5340 5341 parent, p2 = repo.dirstate.parents()
5341 5342 if not opts.get('rev') and p2 != nullid:
5342 5343 # revert after merge is a trap for new users (issue2915)
5343 5344 raise util.Abort(_('uncommitted merge with no revision specified'),
5344 5345 hint=_('use "hg update" or see "hg help revert"'))
5345 5346
5346 5347 ctx = scmutil.revsingle(repo, opts.get('rev'))
5347 5348
5348 5349 if not pats and not opts.get('all'):
5349 5350 msg = _("no files or directories specified")
5350 5351 if p2 != nullid:
5351 5352 hint = _("uncommitted merge, use --all to discard all changes,"
5352 5353 " or 'hg update -C .' to abort the merge")
5353 5354 raise util.Abort(msg, hint=hint)
5354 5355 dirty = util.any(repo.status())
5355 5356 node = ctx.node()
5356 5357 if node != parent:
5357 5358 if dirty:
5358 5359 hint = _("uncommitted changes, use --all to discard all"
5359 5360 " changes, or 'hg update %s' to update") % ctx.rev()
5360 5361 else:
5361 5362 hint = _("use --all to revert all files,"
5362 5363 " or 'hg update %s' to update") % ctx.rev()
5363 5364 elif dirty:
5364 5365 hint = _("uncommitted changes, use --all to discard all changes")
5365 5366 else:
5366 5367 hint = _("use --all to revert all files")
5367 5368 raise util.Abort(msg, hint=hint)
5368 5369
5369 5370 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
5370 5371
5371 5372 @command('rollback', dryrunopts +
5372 5373 [('f', 'force', False, _('ignore safety measures'))])
5373 5374 def rollback(ui, repo, **opts):
5374 5375 """roll back the last transaction (DANGEROUS) (DEPRECATED)
5375 5376
5376 5377 Please use :hg:`commit --amend` instead of rollback to correct
5377 5378 mistakes in the last commit.
5378 5379
5379 5380 This command should be used with care. There is only one level of
5380 5381 rollback, and there is no way to undo a rollback. It will also
5381 5382 restore the dirstate at the time of the last transaction, losing
5382 5383 any dirstate changes since that time. This command does not alter
5383 5384 the working directory.
5384 5385
5385 5386 Transactions are used to encapsulate the effects of all commands
5386 5387 that create new changesets or propagate existing changesets into a
5387 5388 repository.
5388 5389
5389 5390 .. container:: verbose
5390 5391
5391 5392 For example, the following commands are transactional, and their
5392 5393 effects can be rolled back:
5393 5394
5394 5395 - commit
5395 5396 - import
5396 5397 - pull
5397 5398 - push (with this repository as the destination)
5398 5399 - unbundle
5399 5400
5400 5401 To avoid permanent data loss, rollback will refuse to rollback a
5401 5402 commit transaction if it isn't checked out. Use --force to
5402 5403 override this protection.
5403 5404
5404 5405 This command is not intended for use on public repositories. Once
5405 5406 changes are visible for pull by other users, rolling a transaction
5406 5407 back locally is ineffective (someone else may already have pulled
5407 5408 the changes). Furthermore, a race is possible with readers of the
5408 5409 repository; for example an in-progress pull from the repository
5409 5410 may fail if a rollback is performed.
5410 5411
5411 5412 Returns 0 on success, 1 if no rollback data is available.
5412 5413 """
5413 5414 return repo.rollback(dryrun=opts.get('dry_run'),
5414 5415 force=opts.get('force'))
5415 5416
5416 5417 @command('root', [])
5417 5418 def root(ui, repo):
5418 5419 """print the root (top) of the current working directory
5419 5420
5420 5421 Print the root directory of the current repository.
5421 5422
5422 5423 Returns 0 on success.
5423 5424 """
5424 5425 ui.write(repo.root + "\n")
5425 5426
5426 5427 @command('^serve',
5427 5428 [('A', 'accesslog', '', _('name of access log file to write to'),
5428 5429 _('FILE')),
5429 5430 ('d', 'daemon', None, _('run server in background')),
5430 5431 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('FILE')),
5431 5432 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5432 5433 # use string type, then we can check if something was passed
5433 5434 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5434 5435 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5435 5436 _('ADDR')),
5436 5437 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5437 5438 _('PREFIX')),
5438 5439 ('n', 'name', '',
5439 5440 _('name to show in web pages (default: working directory)'), _('NAME')),
5440 5441 ('', 'web-conf', '',
5441 5442 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
5442 5443 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5443 5444 _('FILE')),
5444 5445 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5445 5446 ('', 'stdio', None, _('for remote clients')),
5446 5447 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5447 5448 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5448 5449 ('', 'style', '', _('template style to use'), _('STYLE')),
5449 5450 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5450 5451 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5451 5452 _('[OPTION]...'),
5452 5453 optionalrepo=True)
5453 5454 def serve(ui, repo, **opts):
5454 5455 """start stand-alone webserver
5455 5456
5456 5457 Start a local HTTP repository browser and pull server. You can use
5457 5458 this for ad-hoc sharing and browsing of repositories. It is
5458 5459 recommended to use a real web server to serve a repository for
5459 5460 longer periods of time.
5460 5461
5461 5462 Please note that the server does not implement access control.
5462 5463 This means that, by default, anybody can read from the server and
5463 5464 nobody can write to it by default. Set the ``web.allow_push``
5464 5465 option to ``*`` to allow everybody to push to the server. You
5465 5466 should use a real web server if you need to authenticate users.
5466 5467
5467 5468 By default, the server logs accesses to stdout and errors to
5468 5469 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5469 5470 files.
5470 5471
5471 5472 To have the server choose a free port number to listen on, specify
5472 5473 a port number of 0; in this case, the server will print the port
5473 5474 number it uses.
5474 5475
5475 5476 Returns 0 on success.
5476 5477 """
5477 5478
5478 5479 if opts["stdio"] and opts["cmdserver"]:
5479 5480 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5480 5481
5481 5482 if opts["stdio"]:
5482 5483 if repo is None:
5483 5484 raise error.RepoError(_("there is no Mercurial repository here"
5484 5485 " (.hg not found)"))
5485 5486 s = sshserver.sshserver(ui, repo)
5486 5487 s.serve_forever()
5487 5488
5488 5489 if opts["cmdserver"]:
5489 5490 service = commandserver.createservice(ui, repo, opts)
5490 5491 return cmdutil.service(opts, initfn=service.init, runfn=service.run)
5491 5492
5492 5493 # this way we can check if something was given in the command-line
5493 5494 if opts.get('port'):
5494 5495 opts['port'] = util.getport(opts.get('port'))
5495 5496
5496 5497 baseui = repo and repo.baseui or ui
5497 5498 optlist = ("name templates style address port prefix ipv6"
5498 5499 " accesslog errorlog certificate encoding")
5499 5500 for o in optlist.split():
5500 5501 val = opts.get(o, '')
5501 5502 if val in (None, ''): # should check against default options instead
5502 5503 continue
5503 5504 baseui.setconfig("web", o, val, 'serve')
5504 5505 if repo and repo.ui != baseui:
5505 5506 repo.ui.setconfig("web", o, val, 'serve')
5506 5507
5507 5508 o = opts.get('web_conf') or opts.get('webdir_conf')
5508 5509 if not o:
5509 5510 if not repo:
5510 5511 raise error.RepoError(_("there is no Mercurial repository"
5511 5512 " here (.hg not found)"))
5512 5513 o = repo
5513 5514
5514 5515 app = hgweb.hgweb(o, baseui=baseui)
5515 5516 service = httpservice(ui, app, opts)
5516 5517 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5517 5518
5518 5519 class httpservice(object):
5519 5520 def __init__(self, ui, app, opts):
5520 5521 self.ui = ui
5521 5522 self.app = app
5522 5523 self.opts = opts
5523 5524
5524 5525 def init(self):
5525 5526 util.setsignalhandler()
5526 5527 self.httpd = hgweb_server.create_server(self.ui, self.app)
5527 5528
5528 5529 if self.opts['port'] and not self.ui.verbose:
5529 5530 return
5530 5531
5531 5532 if self.httpd.prefix:
5532 5533 prefix = self.httpd.prefix.strip('/') + '/'
5533 5534 else:
5534 5535 prefix = ''
5535 5536
5536 5537 port = ':%d' % self.httpd.port
5537 5538 if port == ':80':
5538 5539 port = ''
5539 5540
5540 5541 bindaddr = self.httpd.addr
5541 5542 if bindaddr == '0.0.0.0':
5542 5543 bindaddr = '*'
5543 5544 elif ':' in bindaddr: # IPv6
5544 5545 bindaddr = '[%s]' % bindaddr
5545 5546
5546 5547 fqaddr = self.httpd.fqaddr
5547 5548 if ':' in fqaddr:
5548 5549 fqaddr = '[%s]' % fqaddr
5549 5550 if self.opts['port']:
5550 5551 write = self.ui.status
5551 5552 else:
5552 5553 write = self.ui.write
5553 5554 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5554 5555 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5555 5556 self.ui.flush() # avoid buffering of status message
5556 5557
5557 5558 def run(self):
5558 5559 self.httpd.serve_forever()
5559 5560
5560 5561
5561 5562 @command('^status|st',
5562 5563 [('A', 'all', None, _('show status of all files')),
5563 5564 ('m', 'modified', None, _('show only modified files')),
5564 5565 ('a', 'added', None, _('show only added files')),
5565 5566 ('r', 'removed', None, _('show only removed files')),
5566 5567 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5567 5568 ('c', 'clean', None, _('show only files without changes')),
5568 5569 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5569 5570 ('i', 'ignored', None, _('show only ignored files')),
5570 5571 ('n', 'no-status', None, _('hide status prefix')),
5571 5572 ('C', 'copies', None, _('show source of copied files')),
5572 5573 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5573 5574 ('', 'rev', [], _('show difference from revision'), _('REV')),
5574 5575 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5575 5576 ] + walkopts + subrepoopts + formatteropts,
5576 5577 _('[OPTION]... [FILE]...'),
5577 5578 inferrepo=True)
5578 5579 def status(ui, repo, *pats, **opts):
5579 5580 """show changed files in the working directory
5580 5581
5581 5582 Show status of files in the repository. If names are given, only
5582 5583 files that match are shown. Files that are clean or ignored or
5583 5584 the source of a copy/move operation, are not listed unless
5584 5585 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5585 5586 Unless options described with "show only ..." are given, the
5586 5587 options -mardu are used.
5587 5588
5588 5589 Option -q/--quiet hides untracked (unknown and ignored) files
5589 5590 unless explicitly requested with -u/--unknown or -i/--ignored.
5590 5591
5591 5592 .. note::
5592 5593
5593 5594 status may appear to disagree with diff if permissions have
5594 5595 changed or a merge has occurred. The standard diff format does
5595 5596 not report permission changes and diff only reports changes
5596 5597 relative to one merge parent.
5597 5598
5598 5599 If one revision is given, it is used as the base revision.
5599 5600 If two revisions are given, the differences between them are
5600 5601 shown. The --change option can also be used as a shortcut to list
5601 5602 the changed files of a revision from its first parent.
5602 5603
5603 5604 The codes used to show the status of files are::
5604 5605
5605 5606 M = modified
5606 5607 A = added
5607 5608 R = removed
5608 5609 C = clean
5609 5610 ! = missing (deleted by non-hg command, but still tracked)
5610 5611 ? = not tracked
5611 5612 I = ignored
5612 5613 = origin of the previous file (with --copies)
5613 5614
5614 5615 .. container:: verbose
5615 5616
5616 5617 Examples:
5617 5618
5618 5619 - show changes in the working directory relative to a
5619 5620 changeset::
5620 5621
5621 5622 hg status --rev 9353
5622 5623
5623 5624 - show all changes including copies in an existing changeset::
5624 5625
5625 5626 hg status --copies --change 9353
5626 5627
5627 5628 - get a NUL separated list of added files, suitable for xargs::
5628 5629
5629 5630 hg status -an0
5630 5631
5631 5632 Returns 0 on success.
5632 5633 """
5633 5634
5634 5635 revs = opts.get('rev')
5635 5636 change = opts.get('change')
5636 5637
5637 5638 if revs and change:
5638 5639 msg = _('cannot specify --rev and --change at the same time')
5639 5640 raise util.Abort(msg)
5640 5641 elif change:
5641 5642 node2 = scmutil.revsingle(repo, change, None).node()
5642 5643 node1 = repo[node2].p1().node()
5643 5644 else:
5644 5645 node1, node2 = scmutil.revpair(repo, revs)
5645 5646
5646 5647 cwd = (pats and repo.getcwd()) or ''
5647 5648 end = opts.get('print0') and '\0' or '\n'
5648 5649 copy = {}
5649 5650 states = 'modified added removed deleted unknown ignored clean'.split()
5650 5651 show = [k for k in states if opts.get(k)]
5651 5652 if opts.get('all'):
5652 5653 show += ui.quiet and (states[:4] + ['clean']) or states
5653 5654 if not show:
5654 5655 show = ui.quiet and states[:4] or states[:5]
5655 5656
5656 5657 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5657 5658 'ignored' in show, 'clean' in show, 'unknown' in show,
5658 5659 opts.get('subrepos'))
5659 5660 changestates = zip(states, 'MAR!?IC', stat)
5660 5661
5661 5662 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5662 5663 copy = copies.pathcopies(repo[node1], repo[node2])
5663 5664
5664 5665 fm = ui.formatter('status', opts)
5665 5666 fmt = '%s' + end
5666 5667 showchar = not opts.get('no_status')
5667 5668
5668 5669 for state, char, files in changestates:
5669 5670 if state in show:
5670 5671 label = 'status.' + state
5671 5672 for f in files:
5672 5673 fm.startitem()
5673 5674 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5674 5675 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5675 5676 if f in copy:
5676 5677 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5677 5678 label='status.copied')
5678 5679 fm.end()
5679 5680
5680 5681 @command('^summary|sum',
5681 5682 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5682 5683 def summary(ui, repo, **opts):
5683 5684 """summarize working directory state
5684 5685
5685 5686 This generates a brief summary of the working directory state,
5686 5687 including parents, branch, commit status, and available updates.
5687 5688
5688 5689 With the --remote option, this will check the default paths for
5689 5690 incoming and outgoing changes. This can be time-consuming.
5690 5691
5691 5692 Returns 0 on success.
5692 5693 """
5693 5694
5694 5695 ctx = repo[None]
5695 5696 parents = ctx.parents()
5696 5697 pnode = parents[0].node()
5697 5698 marks = []
5698 5699
5699 5700 for p in parents:
5700 5701 # label with log.changeset (instead of log.parent) since this
5701 5702 # shows a working directory parent *changeset*:
5702 5703 # i18n: column positioning for "hg summary"
5703 5704 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5704 5705 label='log.changeset changeset.%s' % p.phasestr())
5705 5706 ui.write(' '.join(p.tags()), label='log.tag')
5706 5707 if p.bookmarks():
5707 5708 marks.extend(p.bookmarks())
5708 5709 if p.rev() == -1:
5709 5710 if not len(repo):
5710 5711 ui.write(_(' (empty repository)'))
5711 5712 else:
5712 5713 ui.write(_(' (no revision checked out)'))
5713 5714 ui.write('\n')
5714 5715 if p.description():
5715 5716 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5716 5717 label='log.summary')
5717 5718
5718 5719 branch = ctx.branch()
5719 5720 bheads = repo.branchheads(branch)
5720 5721 # i18n: column positioning for "hg summary"
5721 5722 m = _('branch: %s\n') % branch
5722 5723 if branch != 'default':
5723 5724 ui.write(m, label='log.branch')
5724 5725 else:
5725 5726 ui.status(m, label='log.branch')
5726 5727
5727 5728 if marks:
5728 5729 current = repo._bookmarkcurrent
5729 5730 # i18n: column positioning for "hg summary"
5730 5731 ui.write(_('bookmarks:'), label='log.bookmark')
5731 5732 if current is not None:
5732 5733 if current in marks:
5733 5734 ui.write(' *' + current, label='bookmarks.current')
5734 5735 marks.remove(current)
5735 5736 else:
5736 5737 ui.write(' [%s]' % current, label='bookmarks.current')
5737 5738 for m in marks:
5738 5739 ui.write(' ' + m, label='log.bookmark')
5739 5740 ui.write('\n', label='log.bookmark')
5740 5741
5741 5742 status = repo.status(unknown=True)
5742 5743
5743 5744 c = repo.dirstate.copies()
5744 5745 copied, renamed = [], []
5745 5746 for d, s in c.iteritems():
5746 5747 if s in status.removed:
5747 5748 status.removed.remove(s)
5748 5749 renamed.append(d)
5749 5750 else:
5750 5751 copied.append(d)
5751 5752 if d in status.added:
5752 5753 status.added.remove(d)
5753 5754
5754 5755 ms = mergemod.mergestate(repo)
5755 5756 unresolved = [f for f in ms if ms[f] == 'u']
5756 5757
5757 5758 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5758 5759
5759 5760 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
5760 5761 (ui.label(_('%d added'), 'status.added'), status.added),
5761 5762 (ui.label(_('%d removed'), 'status.removed'), status.removed),
5762 5763 (ui.label(_('%d renamed'), 'status.copied'), renamed),
5763 5764 (ui.label(_('%d copied'), 'status.copied'), copied),
5764 5765 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
5765 5766 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
5766 5767 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
5767 5768 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
5768 5769 t = []
5769 5770 for l, s in labels:
5770 5771 if s:
5771 5772 t.append(l % len(s))
5772 5773
5773 5774 t = ', '.join(t)
5774 5775 cleanworkdir = False
5775 5776
5776 5777 if repo.vfs.exists('updatestate'):
5777 5778 t += _(' (interrupted update)')
5778 5779 elif len(parents) > 1:
5779 5780 t += _(' (merge)')
5780 5781 elif branch != parents[0].branch():
5781 5782 t += _(' (new branch)')
5782 5783 elif (parents[0].closesbranch() and
5783 5784 pnode in repo.branchheads(branch, closed=True)):
5784 5785 t += _(' (head closed)')
5785 5786 elif not (status.modified or status.added or status.removed or renamed or
5786 5787 copied or subs):
5787 5788 t += _(' (clean)')
5788 5789 cleanworkdir = True
5789 5790 elif pnode not in bheads:
5790 5791 t += _(' (new branch head)')
5791 5792
5792 5793 if cleanworkdir:
5793 5794 # i18n: column positioning for "hg summary"
5794 5795 ui.status(_('commit: %s\n') % t.strip())
5795 5796 else:
5796 5797 # i18n: column positioning for "hg summary"
5797 5798 ui.write(_('commit: %s\n') % t.strip())
5798 5799
5799 5800 # all ancestors of branch heads - all ancestors of parent = new csets
5800 5801 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
5801 5802 bheads))
5802 5803
5803 5804 if new == 0:
5804 5805 # i18n: column positioning for "hg summary"
5805 5806 ui.status(_('update: (current)\n'))
5806 5807 elif pnode not in bheads:
5807 5808 # i18n: column positioning for "hg summary"
5808 5809 ui.write(_('update: %d new changesets (update)\n') % new)
5809 5810 else:
5810 5811 # i18n: column positioning for "hg summary"
5811 5812 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5812 5813 (new, len(bheads)))
5813 5814
5814 5815 cmdutil.summaryhooks(ui, repo)
5815 5816
5816 5817 if opts.get('remote'):
5817 5818 needsincoming, needsoutgoing = True, True
5818 5819 else:
5819 5820 needsincoming, needsoutgoing = False, False
5820 5821 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
5821 5822 if i:
5822 5823 needsincoming = True
5823 5824 if o:
5824 5825 needsoutgoing = True
5825 5826 if not needsincoming and not needsoutgoing:
5826 5827 return
5827 5828
5828 5829 def getincoming():
5829 5830 source, branches = hg.parseurl(ui.expandpath('default'))
5830 5831 sbranch = branches[0]
5831 5832 try:
5832 5833 other = hg.peer(repo, {}, source)
5833 5834 except error.RepoError:
5834 5835 if opts.get('remote'):
5835 5836 raise
5836 5837 return source, sbranch, None, None, None
5837 5838 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5838 5839 if revs:
5839 5840 revs = [other.lookup(rev) for rev in revs]
5840 5841 ui.debug('comparing with %s\n' % util.hidepassword(source))
5841 5842 repo.ui.pushbuffer()
5842 5843 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5843 5844 repo.ui.popbuffer()
5844 5845 return source, sbranch, other, commoninc, commoninc[1]
5845 5846
5846 5847 if needsincoming:
5847 5848 source, sbranch, sother, commoninc, incoming = getincoming()
5848 5849 else:
5849 5850 source = sbranch = sother = commoninc = incoming = None
5850 5851
5851 5852 def getoutgoing():
5852 5853 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5853 5854 dbranch = branches[0]
5854 5855 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5855 5856 if source != dest:
5856 5857 try:
5857 5858 dother = hg.peer(repo, {}, dest)
5858 5859 except error.RepoError:
5859 5860 if opts.get('remote'):
5860 5861 raise
5861 5862 return dest, dbranch, None, None
5862 5863 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5863 5864 elif sother is None:
5864 5865 # there is no explicit destination peer, but source one is invalid
5865 5866 return dest, dbranch, None, None
5866 5867 else:
5867 5868 dother = sother
5868 5869 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5869 5870 common = None
5870 5871 else:
5871 5872 common = commoninc
5872 5873 if revs:
5873 5874 revs = [repo.lookup(rev) for rev in revs]
5874 5875 repo.ui.pushbuffer()
5875 5876 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5876 5877 commoninc=common)
5877 5878 repo.ui.popbuffer()
5878 5879 return dest, dbranch, dother, outgoing
5879 5880
5880 5881 if needsoutgoing:
5881 5882 dest, dbranch, dother, outgoing = getoutgoing()
5882 5883 else:
5883 5884 dest = dbranch = dother = outgoing = None
5884 5885
5885 5886 if opts.get('remote'):
5886 5887 t = []
5887 5888 if incoming:
5888 5889 t.append(_('1 or more incoming'))
5889 5890 o = outgoing.missing
5890 5891 if o:
5891 5892 t.append(_('%d outgoing') % len(o))
5892 5893 other = dother or sother
5893 5894 if 'bookmarks' in other.listkeys('namespaces'):
5894 5895 lmarks = repo.listkeys('bookmarks')
5895 5896 rmarks = other.listkeys('bookmarks')
5896 5897 diff = set(rmarks) - set(lmarks)
5897 5898 if len(diff) > 0:
5898 5899 t.append(_('%d incoming bookmarks') % len(diff))
5899 5900 diff = set(lmarks) - set(rmarks)
5900 5901 if len(diff) > 0:
5901 5902 t.append(_('%d outgoing bookmarks') % len(diff))
5902 5903
5903 5904 if t:
5904 5905 # i18n: column positioning for "hg summary"
5905 5906 ui.write(_('remote: %s\n') % (', '.join(t)))
5906 5907 else:
5907 5908 # i18n: column positioning for "hg summary"
5908 5909 ui.status(_('remote: (synced)\n'))
5909 5910
5910 5911 cmdutil.summaryremotehooks(ui, repo, opts,
5911 5912 ((source, sbranch, sother, commoninc),
5912 5913 (dest, dbranch, dother, outgoing)))
5913 5914
5914 5915 @command('tag',
5915 5916 [('f', 'force', None, _('force tag')),
5916 5917 ('l', 'local', None, _('make the tag local')),
5917 5918 ('r', 'rev', '', _('revision to tag'), _('REV')),
5918 5919 ('', 'remove', None, _('remove a tag')),
5919 5920 # -l/--local is already there, commitopts cannot be used
5920 5921 ('e', 'edit', None, _('invoke editor on commit messages')),
5921 5922 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5922 5923 ] + commitopts2,
5923 5924 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5924 5925 def tag(ui, repo, name1, *names, **opts):
5925 5926 """add one or more tags for the current or given revision
5926 5927
5927 5928 Name a particular revision using <name>.
5928 5929
5929 5930 Tags are used to name particular revisions of the repository and are
5930 5931 very useful to compare different revisions, to go back to significant
5931 5932 earlier versions or to mark branch points as releases, etc. Changing
5932 5933 an existing tag is normally disallowed; use -f/--force to override.
5933 5934
5934 5935 If no revision is given, the parent of the working directory is
5935 5936 used.
5936 5937
5937 5938 To facilitate version control, distribution, and merging of tags,
5938 5939 they are stored as a file named ".hgtags" which is managed similarly
5939 5940 to other project files and can be hand-edited if necessary. This
5940 5941 also means that tagging creates a new commit. The file
5941 5942 ".hg/localtags" is used for local tags (not shared among
5942 5943 repositories).
5943 5944
5944 5945 Tag commits are usually made at the head of a branch. If the parent
5945 5946 of the working directory is not a branch head, :hg:`tag` aborts; use
5946 5947 -f/--force to force the tag commit to be based on a non-head
5947 5948 changeset.
5948 5949
5949 5950 See :hg:`help dates` for a list of formats valid for -d/--date.
5950 5951
5951 5952 Since tag names have priority over branch names during revision
5952 5953 lookup, using an existing branch name as a tag name is discouraged.
5953 5954
5954 5955 Returns 0 on success.
5955 5956 """
5956 5957 wlock = lock = None
5957 5958 try:
5958 5959 wlock = repo.wlock()
5959 5960 lock = repo.lock()
5960 5961 rev_ = "."
5961 5962 names = [t.strip() for t in (name1,) + names]
5962 5963 if len(names) != len(set(names)):
5963 5964 raise util.Abort(_('tag names must be unique'))
5964 5965 for n in names:
5965 5966 scmutil.checknewlabel(repo, n, 'tag')
5966 5967 if not n:
5967 5968 raise util.Abort(_('tag names cannot consist entirely of '
5968 5969 'whitespace'))
5969 5970 if opts.get('rev') and opts.get('remove'):
5970 5971 raise util.Abort(_("--rev and --remove are incompatible"))
5971 5972 if opts.get('rev'):
5972 5973 rev_ = opts['rev']
5973 5974 message = opts.get('message')
5974 5975 if opts.get('remove'):
5975 5976 expectedtype = opts.get('local') and 'local' or 'global'
5976 5977 for n in names:
5977 5978 if not repo.tagtype(n):
5978 5979 raise util.Abort(_("tag '%s' does not exist") % n)
5979 5980 if repo.tagtype(n) != expectedtype:
5980 5981 if expectedtype == 'global':
5981 5982 raise util.Abort(_("tag '%s' is not a global tag") % n)
5982 5983 else:
5983 5984 raise util.Abort(_("tag '%s' is not a local tag") % n)
5984 5985 rev_ = nullid
5985 5986 if not message:
5986 5987 # we don't translate commit messages
5987 5988 message = 'Removed tag %s' % ', '.join(names)
5988 5989 elif not opts.get('force'):
5989 5990 for n in names:
5990 5991 if n in repo.tags():
5991 5992 raise util.Abort(_("tag '%s' already exists "
5992 5993 "(use -f to force)") % n)
5993 5994 if not opts.get('local'):
5994 5995 p1, p2 = repo.dirstate.parents()
5995 5996 if p2 != nullid:
5996 5997 raise util.Abort(_('uncommitted merge'))
5997 5998 bheads = repo.branchheads()
5998 5999 if not opts.get('force') and bheads and p1 not in bheads:
5999 6000 raise util.Abort(_('not at a branch head (use -f to force)'))
6000 6001 r = scmutil.revsingle(repo, rev_).node()
6001 6002
6002 6003 if not message:
6003 6004 # we don't translate commit messages
6004 6005 message = ('Added tag %s for changeset %s' %
6005 6006 (', '.join(names), short(r)))
6006 6007
6007 6008 date = opts.get('date')
6008 6009 if date:
6009 6010 date = util.parsedate(date)
6010 6011
6011 6012 if opts.get('remove'):
6012 6013 editform = 'tag.remove'
6013 6014 else:
6014 6015 editform = 'tag.add'
6015 6016 editor = cmdutil.getcommiteditor(editform=editform, **opts)
6016 6017
6017 6018 # don't allow tagging the null rev
6018 6019 if (not opts.get('remove') and
6019 6020 scmutil.revsingle(repo, rev_).rev() == nullrev):
6020 6021 raise util.Abort(_("cannot tag null revision"))
6021 6022
6022 6023 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
6023 6024 editor=editor)
6024 6025 finally:
6025 6026 release(lock, wlock)
6026 6027
6027 6028 @command('tags', formatteropts, '')
6028 6029 def tags(ui, repo, **opts):
6029 6030 """list repository tags
6030 6031
6031 6032 This lists both regular and local tags. When the -v/--verbose
6032 6033 switch is used, a third column "local" is printed for local tags.
6033 6034
6034 6035 Returns 0 on success.
6035 6036 """
6036 6037
6037 6038 fm = ui.formatter('tags', opts)
6038 6039 hexfunc = fm.hexfunc
6039 6040 tagtype = ""
6040 6041
6041 6042 for t, n in reversed(repo.tagslist()):
6042 6043 hn = hexfunc(n)
6043 6044 label = 'tags.normal'
6044 6045 tagtype = ''
6045 6046 if repo.tagtype(t) == 'local':
6046 6047 label = 'tags.local'
6047 6048 tagtype = 'local'
6048 6049
6049 6050 fm.startitem()
6050 6051 fm.write('tag', '%s', t, label=label)
6051 6052 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
6052 6053 fm.condwrite(not ui.quiet, 'rev node', fmt,
6053 6054 repo.changelog.rev(n), hn, label=label)
6054 6055 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
6055 6056 tagtype, label=label)
6056 6057 fm.plain('\n')
6057 6058 fm.end()
6058 6059
6059 6060 @command('tip',
6060 6061 [('p', 'patch', None, _('show patch')),
6061 6062 ('g', 'git', None, _('use git extended diff format')),
6062 6063 ] + templateopts,
6063 6064 _('[-p] [-g]'))
6064 6065 def tip(ui, repo, **opts):
6065 6066 """show the tip revision (DEPRECATED)
6066 6067
6067 6068 The tip revision (usually just called the tip) is the changeset
6068 6069 most recently added to the repository (and therefore the most
6069 6070 recently changed head).
6070 6071
6071 6072 If you have just made a commit, that commit will be the tip. If
6072 6073 you have just pulled changes from another repository, the tip of
6073 6074 that repository becomes the current tip. The "tip" tag is special
6074 6075 and cannot be renamed or assigned to a different changeset.
6075 6076
6076 6077 This command is deprecated, please use :hg:`heads` instead.
6077 6078
6078 6079 Returns 0 on success.
6079 6080 """
6080 6081 displayer = cmdutil.show_changeset(ui, repo, opts)
6081 6082 displayer.show(repo['tip'])
6082 6083 displayer.close()
6083 6084
6084 6085 @command('unbundle',
6085 6086 [('u', 'update', None,
6086 6087 _('update to new branch head if changesets were unbundled'))],
6087 6088 _('[-u] FILE...'))
6088 6089 def unbundle(ui, repo, fname1, *fnames, **opts):
6089 6090 """apply one or more changegroup files
6090 6091
6091 6092 Apply one or more compressed changegroup files generated by the
6092 6093 bundle command.
6093 6094
6094 6095 Returns 0 on success, 1 if an update has unresolved files.
6095 6096 """
6096 6097 fnames = (fname1,) + fnames
6097 6098
6098 6099 lock = repo.lock()
6099 6100 try:
6100 6101 for fname in fnames:
6101 6102 f = hg.openpath(ui, fname)
6102 6103 gen = exchange.readbundle(ui, f, fname)
6103 6104 modheads = changegroup.addchangegroup(repo, gen, 'unbundle',
6104 6105 'bundle:' + fname)
6105 6106 finally:
6106 6107 lock.release()
6107 6108
6108 6109 return postincoming(ui, repo, modheads, opts.get('update'), None)
6109 6110
6110 6111 @command('^update|up|checkout|co',
6111 6112 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
6112 6113 ('c', 'check', None,
6113 6114 _('update across branches if no uncommitted changes')),
6114 6115 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
6115 6116 ('r', 'rev', '', _('revision'), _('REV'))
6116 6117 ] + mergetoolopts,
6117 6118 _('[-c] [-C] [-d DATE] [[-r] REV]'))
6118 6119 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
6119 6120 tool=None):
6120 6121 """update working directory (or switch revisions)
6121 6122
6122 6123 Update the repository's working directory to the specified
6123 6124 changeset. If no changeset is specified, update to the tip of the
6124 6125 current named branch and move the current bookmark (see :hg:`help
6125 6126 bookmarks`).
6126 6127
6127 6128 Update sets the working directory's parent revision to the specified
6128 6129 changeset (see :hg:`help parents`).
6129 6130
6130 6131 If the changeset is not a descendant or ancestor of the working
6131 6132 directory's parent, the update is aborted. With the -c/--check
6132 6133 option, the working directory is checked for uncommitted changes; if
6133 6134 none are found, the working directory is updated to the specified
6134 6135 changeset.
6135 6136
6136 6137 .. container:: verbose
6137 6138
6138 6139 The following rules apply when the working directory contains
6139 6140 uncommitted changes:
6140 6141
6141 6142 1. If neither -c/--check nor -C/--clean is specified, and if
6142 6143 the requested changeset is an ancestor or descendant of
6143 6144 the working directory's parent, the uncommitted changes
6144 6145 are merged into the requested changeset and the merged
6145 6146 result is left uncommitted. If the requested changeset is
6146 6147 not an ancestor or descendant (that is, it is on another
6147 6148 branch), the update is aborted and the uncommitted changes
6148 6149 are preserved.
6149 6150
6150 6151 2. With the -c/--check option, the update is aborted and the
6151 6152 uncommitted changes are preserved.
6152 6153
6153 6154 3. With the -C/--clean option, uncommitted changes are discarded and
6154 6155 the working directory is updated to the requested changeset.
6155 6156
6156 6157 To cancel an uncommitted merge (and lose your changes), use
6157 6158 :hg:`update --clean .`.
6158 6159
6159 6160 Use null as the changeset to remove the working directory (like
6160 6161 :hg:`clone -U`).
6161 6162
6162 6163 If you want to revert just one file to an older revision, use
6163 6164 :hg:`revert [-r REV] NAME`.
6164 6165
6165 6166 See :hg:`help dates` for a list of formats valid for -d/--date.
6166 6167
6167 6168 Returns 0 on success, 1 if there are unresolved files.
6168 6169 """
6169 6170 if rev and node:
6170 6171 raise util.Abort(_("please specify just one revision"))
6171 6172
6172 6173 if rev is None or rev == '':
6173 6174 rev = node
6174 6175
6175 6176 cmdutil.clearunfinished(repo)
6176 6177
6177 6178 # with no argument, we also move the current bookmark, if any
6178 6179 rev, movemarkfrom = bookmarks.calculateupdate(ui, repo, rev)
6179 6180
6180 6181 # if we defined a bookmark, we have to remember the original bookmark name
6181 6182 brev = rev
6182 6183 rev = scmutil.revsingle(repo, rev, rev).rev()
6183 6184
6184 6185 if check and clean:
6185 6186 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
6186 6187
6187 6188 if date:
6188 6189 if rev is not None:
6189 6190 raise util.Abort(_("you can't specify a revision and a date"))
6190 6191 rev = cmdutil.finddate(ui, repo, date)
6191 6192
6192 6193 if check:
6193 6194 c = repo[None]
6194 6195 if c.dirty(merge=False, branch=False, missing=True):
6195 6196 raise util.Abort(_("uncommitted changes"))
6196 6197 if rev is None:
6197 6198 rev = repo[repo[None].branch()].rev()
6198 6199
6199 6200 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
6200 6201
6201 6202 if clean:
6202 6203 ret = hg.clean(repo, rev)
6203 6204 else:
6204 6205 ret = hg.update(repo, rev)
6205 6206
6206 6207 if not ret and movemarkfrom:
6207 6208 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
6208 6209 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
6209 6210 elif brev in repo._bookmarks:
6210 6211 bookmarks.setcurrent(repo, brev)
6211 6212 ui.status(_("(activating bookmark %s)\n") % brev)
6212 6213 elif brev:
6213 6214 if repo._bookmarkcurrent:
6214 6215 ui.status(_("(leaving bookmark %s)\n") %
6215 6216 repo._bookmarkcurrent)
6216 6217 bookmarks.unsetcurrent(repo)
6217 6218
6218 6219 return ret
6219 6220
6220 6221 @command('verify', [])
6221 6222 def verify(ui, repo):
6222 6223 """verify the integrity of the repository
6223 6224
6224 6225 Verify the integrity of the current repository.
6225 6226
6226 6227 This will perform an extensive check of the repository's
6227 6228 integrity, validating the hashes and checksums of each entry in
6228 6229 the changelog, manifest, and tracked files, as well as the
6229 6230 integrity of their crosslinks and indices.
6230 6231
6231 6232 Please see http://mercurial.selenic.com/wiki/RepositoryCorruption
6232 6233 for more information about recovery from corruption of the
6233 6234 repository.
6234 6235
6235 6236 Returns 0 on success, 1 if errors are encountered.
6236 6237 """
6237 6238 return hg.verify(repo)
6238 6239
6239 6240 @command('version', [], norepo=True)
6240 6241 def version_(ui):
6241 6242 """output version and copyright information"""
6242 6243 ui.write(_("Mercurial Distributed SCM (version %s)\n")
6243 6244 % util.version())
6244 6245 ui.status(_(
6245 6246 "(see http://mercurial.selenic.com for more information)\n"
6246 6247 "\nCopyright (C) 2005-2014 Matt Mackall and others\n"
6247 6248 "This is free software; see the source for copying conditions. "
6248 6249 "There is NO\nwarranty; "
6249 6250 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
6250 6251 ))
6251 6252
6252 6253 ui.note(_("\nEnabled extensions:\n\n"))
6253 6254 if ui.verbose:
6254 6255 # format names and versions into columns
6255 6256 names = []
6256 6257 vers = []
6257 6258 for name, module in extensions.extensions():
6258 6259 names.append(name)
6259 6260 vers.append(extensions.moduleversion(module))
6260 6261 if names:
6261 6262 maxnamelen = max(len(n) for n in names)
6262 6263 for i, name in enumerate(names):
6263 6264 ui.write(" %-*s %s\n" % (maxnamelen, name, vers[i]))
@@ -1,1066 +1,1066
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 from mercurial.node import nullrev
10 10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 11 import pathutil
12 12 import match as matchmod
13 13 import os, errno, re, glob, tempfile
14 14
15 15 if os.name == 'nt':
16 16 import scmwindows as scmplatform
17 17 else:
18 18 import scmposix as scmplatform
19 19
20 20 systemrcpath = scmplatform.systemrcpath
21 21 userrcpath = scmplatform.userrcpath
22 22
23 23 class status(tuple):
24 24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
25 25 and 'ignored' properties are only relevant to the working copy.
26 26 '''
27 27
28 28 __slots__ = ()
29 29
30 30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
31 31 clean):
32 32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
33 33 ignored, clean))
34 34
35 35 @property
36 36 def modified(self):
37 37 '''files that have been modified'''
38 38 return self[0]
39 39
40 40 @property
41 41 def added(self):
42 42 '''files that have been added'''
43 43 return self[1]
44 44
45 45 @property
46 46 def removed(self):
47 47 '''files that have been removed'''
48 48 return self[2]
49 49
50 50 @property
51 51 def deleted(self):
52 52 '''files that are in the dirstate, but have been deleted from the
53 53 working copy (aka "missing")
54 54 '''
55 55 return self[3]
56 56
57 57 @property
58 58 def unknown(self):
59 59 '''files not in the dirstate that are not ignored'''
60 60 return self[4]
61 61
62 62 @property
63 63 def ignored(self):
64 64 '''files not in the dirstate that are ignored (by _dirignore())'''
65 65 return self[5]
66 66
67 67 @property
68 68 def clean(self):
69 69 '''files that have not been modified'''
70 70 return self[6]
71 71
72 72 def __repr__(self, *args, **kwargs):
73 73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
74 74 'unknown=%r, ignored=%r, clean=%r>') % self)
75 75
76 76 def itersubrepos(ctx1, ctx2):
77 77 """find subrepos in ctx1 or ctx2"""
78 78 # Create a (subpath, ctx) mapping where we prefer subpaths from
79 79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
80 80 # has been modified (in ctx2) but not yet committed (in ctx1).
81 81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
82 82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
83 83 for subpath, ctx in sorted(subpaths.iteritems()):
84 84 yield subpath, ctx.sub(subpath)
85 85
86 86 def nochangesfound(ui, repo, excluded=None):
87 87 '''Report no changes for push/pull, excluded is None or a list of
88 88 nodes excluded from the push/pull.
89 89 '''
90 90 secretlist = []
91 91 if excluded:
92 92 for n in excluded:
93 93 if n not in repo:
94 94 # discovery should not have included the filtered revision,
95 95 # we have to explicitly exclude it until discovery is cleanup.
96 96 continue
97 97 ctx = repo[n]
98 98 if ctx.phase() >= phases.secret and not ctx.extinct():
99 99 secretlist.append(n)
100 100
101 101 if secretlist:
102 102 ui.status(_("no changes found (ignored %d secret changesets)\n")
103 103 % len(secretlist))
104 104 else:
105 105 ui.status(_("no changes found\n"))
106 106
107 107 def checknewlabel(repo, lbl, kind):
108 108 # Do not use the "kind" parameter in ui output.
109 109 # It makes strings difficult to translate.
110 110 if lbl in ['tip', '.', 'null']:
111 111 raise util.Abort(_("the name '%s' is reserved") % lbl)
112 112 for c in (':', '\0', '\n', '\r'):
113 113 if c in lbl:
114 114 raise util.Abort(_("%r cannot be used in a name") % c)
115 115 try:
116 116 int(lbl)
117 117 raise util.Abort(_("cannot use an integer as a name"))
118 118 except ValueError:
119 119 pass
120 120
121 121 def checkfilename(f):
122 122 '''Check that the filename f is an acceptable filename for a tracked file'''
123 123 if '\r' in f or '\n' in f:
124 124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
125 125
126 126 def checkportable(ui, f):
127 127 '''Check if filename f is portable and warn or abort depending on config'''
128 128 checkfilename(f)
129 129 abort, warn = checkportabilityalert(ui)
130 130 if abort or warn:
131 131 msg = util.checkwinfilename(f)
132 132 if msg:
133 133 msg = "%s: %r" % (msg, f)
134 134 if abort:
135 135 raise util.Abort(msg)
136 136 ui.warn(_("warning: %s\n") % msg)
137 137
138 138 def checkportabilityalert(ui):
139 139 '''check if the user's config requests nothing, a warning, or abort for
140 140 non-portable filenames'''
141 141 val = ui.config('ui', 'portablefilenames', 'warn')
142 142 lval = val.lower()
143 143 bval = util.parsebool(val)
144 144 abort = os.name == 'nt' or lval == 'abort'
145 145 warn = bval or lval == 'warn'
146 146 if bval is None and not (warn or abort or lval == 'ignore'):
147 147 raise error.ConfigError(
148 148 _("ui.portablefilenames value is invalid ('%s')") % val)
149 149 return abort, warn
150 150
151 151 class casecollisionauditor(object):
152 152 def __init__(self, ui, abort, dirstate):
153 153 self._ui = ui
154 154 self._abort = abort
155 155 allfiles = '\0'.join(dirstate._map)
156 156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
157 157 self._dirstate = dirstate
158 158 # The purpose of _newfiles is so that we don't complain about
159 159 # case collisions if someone were to call this object with the
160 160 # same filename twice.
161 161 self._newfiles = set()
162 162
163 163 def __call__(self, f):
164 164 if f in self._newfiles:
165 165 return
166 166 fl = encoding.lower(f)
167 167 if fl in self._loweredfiles and f not in self._dirstate:
168 168 msg = _('possible case-folding collision for %s') % f
169 169 if self._abort:
170 170 raise util.Abort(msg)
171 171 self._ui.warn(_("warning: %s\n") % msg)
172 172 self._loweredfiles.add(fl)
173 173 self._newfiles.add(f)
174 174
175 175 class abstractvfs(object):
176 176 """Abstract base class; cannot be instantiated"""
177 177
178 178 def __init__(self, *args, **kwargs):
179 179 '''Prevent instantiation; don't call this from subclasses.'''
180 180 raise NotImplementedError('attempted instantiating ' + str(type(self)))
181 181
182 182 def tryread(self, path):
183 183 '''gracefully return an empty string for missing files'''
184 184 try:
185 185 return self.read(path)
186 186 except IOError, inst:
187 187 if inst.errno != errno.ENOENT:
188 188 raise
189 189 return ""
190 190
191 191 def tryreadlines(self, path, mode='rb'):
192 192 '''gracefully return an empty array for missing files'''
193 193 try:
194 194 return self.readlines(path, mode=mode)
195 195 except IOError, inst:
196 196 if inst.errno != errno.ENOENT:
197 197 raise
198 198 return []
199 199
200 200 def open(self, path, mode="r", text=False, atomictemp=False,
201 201 notindexed=False):
202 202 '''Open ``path`` file, which is relative to vfs root.
203 203
204 204 Newly created directories are marked as "not to be indexed by
205 205 the content indexing service", if ``notindexed`` is specified
206 206 for "write" mode access.
207 207 '''
208 208 self.open = self.__call__
209 209 return self.__call__(path, mode, text, atomictemp, notindexed)
210 210
211 211 def read(self, path):
212 212 fp = self(path, 'rb')
213 213 try:
214 214 return fp.read()
215 215 finally:
216 216 fp.close()
217 217
218 218 def readlines(self, path, mode='rb'):
219 219 fp = self(path, mode=mode)
220 220 try:
221 221 return fp.readlines()
222 222 finally:
223 223 fp.close()
224 224
225 225 def write(self, path, data):
226 226 fp = self(path, 'wb')
227 227 try:
228 228 return fp.write(data)
229 229 finally:
230 230 fp.close()
231 231
232 232 def writelines(self, path, data, mode='wb', notindexed=False):
233 233 fp = self(path, mode=mode, notindexed=notindexed)
234 234 try:
235 235 return fp.writelines(data)
236 236 finally:
237 237 fp.close()
238 238
239 239 def append(self, path, data):
240 240 fp = self(path, 'ab')
241 241 try:
242 242 return fp.write(data)
243 243 finally:
244 244 fp.close()
245 245
246 246 def chmod(self, path, mode):
247 247 return os.chmod(self.join(path), mode)
248 248
249 249 def exists(self, path=None):
250 250 return os.path.exists(self.join(path))
251 251
252 252 def fstat(self, fp):
253 253 return util.fstat(fp)
254 254
255 255 def isdir(self, path=None):
256 256 return os.path.isdir(self.join(path))
257 257
258 258 def isfile(self, path=None):
259 259 return os.path.isfile(self.join(path))
260 260
261 261 def islink(self, path=None):
262 262 return os.path.islink(self.join(path))
263 263
264 264 def lexists(self, path=None):
265 265 return os.path.lexists(self.join(path))
266 266
267 267 def lstat(self, path=None):
268 268 return os.lstat(self.join(path))
269 269
270 270 def listdir(self, path=None):
271 271 return os.listdir(self.join(path))
272 272
273 273 def makedir(self, path=None, notindexed=True):
274 274 return util.makedir(self.join(path), notindexed)
275 275
276 276 def makedirs(self, path=None, mode=None):
277 277 return util.makedirs(self.join(path), mode)
278 278
279 279 def makelock(self, info, path):
280 280 return util.makelock(info, self.join(path))
281 281
282 282 def mkdir(self, path=None):
283 283 return os.mkdir(self.join(path))
284 284
285 285 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
286 286 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
287 287 dir=self.join(dir), text=text)
288 288 dname, fname = util.split(name)
289 289 if dir:
290 290 return fd, os.path.join(dir, fname)
291 291 else:
292 292 return fd, fname
293 293
294 294 def readdir(self, path=None, stat=None, skip=None):
295 295 return osutil.listdir(self.join(path), stat, skip)
296 296
297 297 def readlock(self, path):
298 298 return util.readlock(self.join(path))
299 299
300 300 def rename(self, src, dst):
301 301 return util.rename(self.join(src), self.join(dst))
302 302
303 303 def readlink(self, path):
304 304 return os.readlink(self.join(path))
305 305
306 306 def setflags(self, path, l, x):
307 307 return util.setflags(self.join(path), l, x)
308 308
309 309 def stat(self, path=None):
310 310 return os.stat(self.join(path))
311 311
312 312 def unlink(self, path=None):
313 313 return util.unlink(self.join(path))
314 314
315 315 def unlinkpath(self, path=None, ignoremissing=False):
316 316 return util.unlinkpath(self.join(path), ignoremissing)
317 317
318 318 def utime(self, path=None, t=None):
319 319 return os.utime(self.join(path), t)
320 320
321 321 class vfs(abstractvfs):
322 322 '''Operate files relative to a base directory
323 323
324 324 This class is used to hide the details of COW semantics and
325 325 remote file access from higher level code.
326 326 '''
327 327 def __init__(self, base, audit=True, expandpath=False, realpath=False):
328 328 if expandpath:
329 329 base = util.expandpath(base)
330 330 if realpath:
331 331 base = os.path.realpath(base)
332 332 self.base = base
333 333 self._setmustaudit(audit)
334 334 self.createmode = None
335 335 self._trustnlink = None
336 336
337 337 def _getmustaudit(self):
338 338 return self._audit
339 339
340 340 def _setmustaudit(self, onoff):
341 341 self._audit = onoff
342 342 if onoff:
343 343 self.audit = pathutil.pathauditor(self.base)
344 344 else:
345 345 self.audit = util.always
346 346
347 347 mustaudit = property(_getmustaudit, _setmustaudit)
348 348
349 349 @util.propertycache
350 350 def _cansymlink(self):
351 351 return util.checklink(self.base)
352 352
353 353 @util.propertycache
354 354 def _chmod(self):
355 355 return util.checkexec(self.base)
356 356
357 357 def _fixfilemode(self, name):
358 358 if self.createmode is None or not self._chmod:
359 359 return
360 360 os.chmod(name, self.createmode & 0666)
361 361
362 362 def __call__(self, path, mode="r", text=False, atomictemp=False,
363 363 notindexed=False):
364 364 '''Open ``path`` file, which is relative to vfs root.
365 365
366 366 Newly created directories are marked as "not to be indexed by
367 367 the content indexing service", if ``notindexed`` is specified
368 368 for "write" mode access.
369 369 '''
370 370 if self._audit:
371 371 r = util.checkosfilename(path)
372 372 if r:
373 373 raise util.Abort("%s: %r" % (r, path))
374 374 self.audit(path)
375 375 f = self.join(path)
376 376
377 377 if not text and "b" not in mode:
378 378 mode += "b" # for that other OS
379 379
380 380 nlink = -1
381 381 if mode not in ('r', 'rb'):
382 382 dirname, basename = util.split(f)
383 383 # If basename is empty, then the path is malformed because it points
384 384 # to a directory. Let the posixfile() call below raise IOError.
385 385 if basename:
386 386 if atomictemp:
387 387 util.ensuredirs(dirname, self.createmode, notindexed)
388 388 return util.atomictempfile(f, mode, self.createmode)
389 389 try:
390 390 if 'w' in mode:
391 391 util.unlink(f)
392 392 nlink = 0
393 393 else:
394 394 # nlinks() may behave differently for files on Windows
395 395 # shares if the file is open.
396 396 fd = util.posixfile(f)
397 397 nlink = util.nlinks(f)
398 398 if nlink < 1:
399 399 nlink = 2 # force mktempcopy (issue1922)
400 400 fd.close()
401 401 except (OSError, IOError), e:
402 402 if e.errno != errno.ENOENT:
403 403 raise
404 404 nlink = 0
405 405 util.ensuredirs(dirname, self.createmode, notindexed)
406 406 if nlink > 0:
407 407 if self._trustnlink is None:
408 408 self._trustnlink = nlink > 1 or util.checknlink(f)
409 409 if nlink > 1 or not self._trustnlink:
410 410 util.rename(util.mktempcopy(f), f)
411 411 fp = util.posixfile(f, mode)
412 412 if nlink == 0:
413 413 self._fixfilemode(f)
414 414 return fp
415 415
416 416 def symlink(self, src, dst):
417 417 self.audit(dst)
418 418 linkname = self.join(dst)
419 419 try:
420 420 os.unlink(linkname)
421 421 except OSError:
422 422 pass
423 423
424 424 util.ensuredirs(os.path.dirname(linkname), self.createmode)
425 425
426 426 if self._cansymlink:
427 427 try:
428 428 os.symlink(src, linkname)
429 429 except OSError, err:
430 430 raise OSError(err.errno, _('could not symlink to %r: %s') %
431 431 (src, err.strerror), linkname)
432 432 else:
433 433 self.write(dst, src)
434 434
435 435 def join(self, path):
436 436 if path:
437 437 return os.path.join(self.base, path)
438 438 else:
439 439 return self.base
440 440
441 441 opener = vfs
442 442
443 443 class auditvfs(object):
444 444 def __init__(self, vfs):
445 445 self.vfs = vfs
446 446
447 447 def _getmustaudit(self):
448 448 return self.vfs.mustaudit
449 449
450 450 def _setmustaudit(self, onoff):
451 451 self.vfs.mustaudit = onoff
452 452
453 453 mustaudit = property(_getmustaudit, _setmustaudit)
454 454
455 455 class filtervfs(abstractvfs, auditvfs):
456 456 '''Wrapper vfs for filtering filenames with a function.'''
457 457
458 458 def __init__(self, vfs, filter):
459 459 auditvfs.__init__(self, vfs)
460 460 self._filter = filter
461 461
462 462 def __call__(self, path, *args, **kwargs):
463 463 return self.vfs(self._filter(path), *args, **kwargs)
464 464
465 465 def join(self, path):
466 466 if path:
467 467 return self.vfs.join(self._filter(path))
468 468 else:
469 469 return self.vfs.join(path)
470 470
471 471 filteropener = filtervfs
472 472
473 473 class readonlyvfs(abstractvfs, auditvfs):
474 474 '''Wrapper vfs preventing any writing.'''
475 475
476 476 def __init__(self, vfs):
477 477 auditvfs.__init__(self, vfs)
478 478
479 479 def __call__(self, path, mode='r', *args, **kw):
480 480 if mode not in ('r', 'rb'):
481 481 raise util.Abort('this vfs is read only')
482 482 return self.vfs(path, mode, *args, **kw)
483 483
484 484
485 485 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
486 486 '''yield every hg repository under path, always recursively.
487 487 The recurse flag will only control recursion into repo working dirs'''
488 488 def errhandler(err):
489 489 if err.filename == path:
490 490 raise err
491 491 samestat = getattr(os.path, 'samestat', None)
492 492 if followsym and samestat is not None:
493 493 def adddir(dirlst, dirname):
494 494 match = False
495 495 dirstat = os.stat(dirname)
496 496 for lstdirstat in dirlst:
497 497 if samestat(dirstat, lstdirstat):
498 498 match = True
499 499 break
500 500 if not match:
501 501 dirlst.append(dirstat)
502 502 return not match
503 503 else:
504 504 followsym = False
505 505
506 506 if (seen_dirs is None) and followsym:
507 507 seen_dirs = []
508 508 adddir(seen_dirs, path)
509 509 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
510 510 dirs.sort()
511 511 if '.hg' in dirs:
512 512 yield root # found a repository
513 513 qroot = os.path.join(root, '.hg', 'patches')
514 514 if os.path.isdir(os.path.join(qroot, '.hg')):
515 515 yield qroot # we have a patch queue repo here
516 516 if recurse:
517 517 # avoid recursing inside the .hg directory
518 518 dirs.remove('.hg')
519 519 else:
520 520 dirs[:] = [] # don't descend further
521 521 elif followsym:
522 522 newdirs = []
523 523 for d in dirs:
524 524 fname = os.path.join(root, d)
525 525 if adddir(seen_dirs, fname):
526 526 if os.path.islink(fname):
527 527 for hgname in walkrepos(fname, True, seen_dirs):
528 528 yield hgname
529 529 else:
530 530 newdirs.append(d)
531 531 dirs[:] = newdirs
532 532
533 533 def osrcpath():
534 534 '''return default os-specific hgrc search path'''
535 535 path = []
536 536 defaultpath = os.path.join(util.datapath, 'default.d')
537 537 if os.path.isdir(defaultpath):
538 538 for f, kind in osutil.listdir(defaultpath):
539 539 if f.endswith('.rc'):
540 540 path.append(os.path.join(defaultpath, f))
541 541 path.extend(systemrcpath())
542 542 path.extend(userrcpath())
543 543 path = [os.path.normpath(f) for f in path]
544 544 return path
545 545
546 546 _rcpath = None
547 547
548 548 def rcpath():
549 549 '''return hgrc search path. if env var HGRCPATH is set, use it.
550 550 for each item in path, if directory, use files ending in .rc,
551 551 else use item.
552 552 make HGRCPATH empty to only look in .hg/hgrc of current repo.
553 553 if no HGRCPATH, use default os-specific path.'''
554 554 global _rcpath
555 555 if _rcpath is None:
556 556 if 'HGRCPATH' in os.environ:
557 557 _rcpath = []
558 558 for p in os.environ['HGRCPATH'].split(os.pathsep):
559 559 if not p:
560 560 continue
561 561 p = util.expandpath(p)
562 562 if os.path.isdir(p):
563 563 for f, kind in osutil.listdir(p):
564 564 if f.endswith('.rc'):
565 565 _rcpath.append(os.path.join(p, f))
566 566 else:
567 567 _rcpath.append(p)
568 568 else:
569 569 _rcpath = osrcpath()
570 570 return _rcpath
571 571
572 572 def revsingle(repo, revspec, default='.'):
573 573 if not revspec and revspec != 0:
574 574 return repo[default]
575 575
576 576 l = revrange(repo, [revspec])
577 577 if not l:
578 578 raise util.Abort(_('empty revision set'))
579 579 return repo[l.last()]
580 580
581 581 def revpair(repo, revs):
582 582 if not revs:
583 583 return repo.dirstate.p1(), None
584 584
585 585 l = revrange(repo, revs)
586 586
587 587 if not l:
588 588 first = second = None
589 589 elif l.isascending():
590 590 first = l.min()
591 591 second = l.max()
592 592 elif l.isdescending():
593 593 first = l.max()
594 594 second = l.min()
595 595 else:
596 596 first = l.first()
597 597 second = l.last()
598 598
599 599 if first is None:
600 600 raise util.Abort(_('empty revision range'))
601 601
602 602 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
603 603 return repo.lookup(first), None
604 604
605 605 return repo.lookup(first), repo.lookup(second)
606 606
607 607 _revrangesep = ':'
608 608
609 609 def revrange(repo, revs):
610 610 """Yield revision as strings from a list of revision specifications."""
611 611
612 612 def revfix(repo, val, defval):
613 613 if not val and val != 0 and defval is not None:
614 614 return defval
615 615 return repo[val].rev()
616 616
617 617 seen, l = set(), revset.baseset([])
618 618 for spec in revs:
619 619 if l and not seen:
620 620 seen = set(l)
621 621 # attempt to parse old-style ranges first to deal with
622 622 # things like old-tag which contain query metacharacters
623 623 try:
624 624 if isinstance(spec, int):
625 625 seen.add(spec)
626 626 l = l + revset.baseset([spec])
627 627 continue
628 628
629 629 if _revrangesep in spec:
630 630 start, end = spec.split(_revrangesep, 1)
631 631 start = revfix(repo, start, 0)
632 632 end = revfix(repo, end, len(repo) - 1)
633 633 if end == nullrev and start < 0:
634 634 start = nullrev
635 635 rangeiter = repo.changelog.revs(start, end)
636 636 if not seen and not l:
637 637 # by far the most common case: revs = ["-1:0"]
638 638 l = revset.baseset(rangeiter)
639 639 # defer syncing seen until next iteration
640 640 continue
641 641 newrevs = set(rangeiter)
642 642 if seen:
643 643 newrevs.difference_update(seen)
644 644 seen.update(newrevs)
645 645 else:
646 646 seen = newrevs
647 647 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
648 648 continue
649 649 elif spec and spec in repo: # single unquoted rev
650 650 rev = revfix(repo, spec, None)
651 651 if rev in seen:
652 652 continue
653 653 seen.add(rev)
654 654 l = l + revset.baseset([rev])
655 655 continue
656 656 except error.RepoLookupError:
657 657 pass
658 658
659 659 # fall through to new-style queries if old-style fails
660 660 m = revset.match(repo.ui, spec, repo)
661 661 if seen or l:
662 662 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
663 663 l = l + revset.baseset(dl)
664 664 seen.update(dl)
665 665 else:
666 666 l = m(repo, revset.spanset(repo))
667 667
668 668 return l
669 669
670 670 def expandpats(pats):
671 671 '''Expand bare globs when running on windows.
672 672 On posix we assume it already has already been done by sh.'''
673 673 if not util.expandglobs:
674 674 return list(pats)
675 675 ret = []
676 676 for kindpat in pats:
677 677 kind, pat = matchmod._patsplit(kindpat, None)
678 678 if kind is None:
679 679 try:
680 680 globbed = glob.glob(pat)
681 681 except re.error:
682 682 globbed = [pat]
683 683 if globbed:
684 684 ret.extend(globbed)
685 685 continue
686 686 ret.append(kindpat)
687 687 return ret
688 688
689 689 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
690 690 '''Return a matcher and the patterns that were used.
691 691 The matcher will warn about bad matches.'''
692 692 if pats == ("",):
693 693 pats = []
694 694 if not globbed and default == 'relpath':
695 695 pats = expandpats(pats or [])
696 696
697 697 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
698 698 default)
699 699 def badfn(f, msg):
700 700 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
701 701 m.bad = badfn
702 702 return m, pats
703 703
704 704 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
705 705 '''Return a matcher that will warn about bad matches.'''
706 706 return matchandpats(ctx, pats, opts, globbed, default)[0]
707 707
708 708 def matchall(repo):
709 709 '''Return a matcher that will efficiently match everything.'''
710 710 return matchmod.always(repo.root, repo.getcwd())
711 711
712 712 def matchfiles(repo, files):
713 713 '''Return a matcher that will efficiently match exactly these files.'''
714 714 return matchmod.exact(repo.root, repo.getcwd(), files)
715 715
716 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
716 def addremove(repo, matcher, opts={}, dry_run=None, similarity=None):
717 m = matcher
717 718 if dry_run is None:
718 719 dry_run = opts.get('dry_run')
719 720 if similarity is None:
720 721 similarity = float(opts.get('similarity') or 0)
721 # we'd use status here, except handling of symlinks and ignore is tricky
722 m = match(repo[None], pats, opts)
722
723 723 rejected = []
724 724 m.bad = lambda x, y: rejected.append(x)
725 725
726 726 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
727 727
728 728 unknownset = set(unknown + forgotten)
729 729 toprint = unknownset.copy()
730 730 toprint.update(deleted)
731 731 for abs in sorted(toprint):
732 732 if repo.ui.verbose or not m.exact(abs):
733 733 if abs in unknownset:
734 734 status = _('adding %s\n') % m.uipath(abs)
735 735 else:
736 736 status = _('removing %s\n') % m.uipath(abs)
737 737 repo.ui.status(status)
738 738
739 739 renames = _findrenames(repo, m, added + unknown, removed + deleted,
740 740 similarity)
741 741
742 742 if not dry_run:
743 743 _markchanges(repo, unknown + forgotten, deleted, renames)
744 744
745 745 for f in rejected:
746 746 if f in m.files():
747 747 return 1
748 748 return 0
749 749
750 750 def marktouched(repo, files, similarity=0.0):
751 751 '''Assert that files have somehow been operated upon. files are relative to
752 752 the repo root.'''
753 753 m = matchfiles(repo, files)
754 754 rejected = []
755 755 m.bad = lambda x, y: rejected.append(x)
756 756
757 757 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
758 758
759 759 if repo.ui.verbose:
760 760 unknownset = set(unknown + forgotten)
761 761 toprint = unknownset.copy()
762 762 toprint.update(deleted)
763 763 for abs in sorted(toprint):
764 764 if abs in unknownset:
765 765 status = _('adding %s\n') % abs
766 766 else:
767 767 status = _('removing %s\n') % abs
768 768 repo.ui.status(status)
769 769
770 770 renames = _findrenames(repo, m, added + unknown, removed + deleted,
771 771 similarity)
772 772
773 773 _markchanges(repo, unknown + forgotten, deleted, renames)
774 774
775 775 for f in rejected:
776 776 if f in m.files():
777 777 return 1
778 778 return 0
779 779
780 780 def _interestingfiles(repo, matcher):
781 781 '''Walk dirstate with matcher, looking for files that addremove would care
782 782 about.
783 783
784 784 This is different from dirstate.status because it doesn't care about
785 785 whether files are modified or clean.'''
786 786 added, unknown, deleted, removed, forgotten = [], [], [], [], []
787 787 audit_path = pathutil.pathauditor(repo.root)
788 788
789 789 ctx = repo[None]
790 790 dirstate = repo.dirstate
791 791 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
792 792 full=False)
793 793 for abs, st in walkresults.iteritems():
794 794 dstate = dirstate[abs]
795 795 if dstate == '?' and audit_path.check(abs):
796 796 unknown.append(abs)
797 797 elif dstate != 'r' and not st:
798 798 deleted.append(abs)
799 799 elif dstate == 'r' and st:
800 800 forgotten.append(abs)
801 801 # for finding renames
802 802 elif dstate == 'r' and not st:
803 803 removed.append(abs)
804 804 elif dstate == 'a':
805 805 added.append(abs)
806 806
807 807 return added, unknown, deleted, removed, forgotten
808 808
809 809 def _findrenames(repo, matcher, added, removed, similarity):
810 810 '''Find renames from removed files to added ones.'''
811 811 renames = {}
812 812 if similarity > 0:
813 813 for old, new, score in similar.findrenames(repo, added, removed,
814 814 similarity):
815 815 if (repo.ui.verbose or not matcher.exact(old)
816 816 or not matcher.exact(new)):
817 817 repo.ui.status(_('recording removal of %s as rename to %s '
818 818 '(%d%% similar)\n') %
819 819 (matcher.rel(old), matcher.rel(new),
820 820 score * 100))
821 821 renames[new] = old
822 822 return renames
823 823
824 824 def _markchanges(repo, unknown, deleted, renames):
825 825 '''Marks the files in unknown as added, the files in deleted as removed,
826 826 and the files in renames as copied.'''
827 827 wctx = repo[None]
828 828 wlock = repo.wlock()
829 829 try:
830 830 wctx.forget(deleted)
831 831 wctx.add(unknown)
832 832 for new, old in renames.iteritems():
833 833 wctx.copy(old, new)
834 834 finally:
835 835 wlock.release()
836 836
837 837 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
838 838 """Update the dirstate to reflect the intent of copying src to dst. For
839 839 different reasons it might not end with dst being marked as copied from src.
840 840 """
841 841 origsrc = repo.dirstate.copied(src) or src
842 842 if dst == origsrc: # copying back a copy?
843 843 if repo.dirstate[dst] not in 'mn' and not dryrun:
844 844 repo.dirstate.normallookup(dst)
845 845 else:
846 846 if repo.dirstate[origsrc] == 'a' and origsrc == src:
847 847 if not ui.quiet:
848 848 ui.warn(_("%s has not been committed yet, so no copy "
849 849 "data will be stored for %s.\n")
850 850 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
851 851 if repo.dirstate[dst] in '?r' and not dryrun:
852 852 wctx.add([dst])
853 853 elif not dryrun:
854 854 wctx.copy(origsrc, dst)
855 855
856 856 def readrequires(opener, supported):
857 857 '''Reads and parses .hg/requires and checks if all entries found
858 858 are in the list of supported features.'''
859 859 requirements = set(opener.read("requires").splitlines())
860 860 missings = []
861 861 for r in requirements:
862 862 if r not in supported:
863 863 if not r or not r[0].isalnum():
864 864 raise error.RequirementError(_(".hg/requires file is corrupt"))
865 865 missings.append(r)
866 866 missings.sort()
867 867 if missings:
868 868 raise error.RequirementError(
869 869 _("repository requires features unknown to this Mercurial: %s")
870 870 % " ".join(missings),
871 871 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
872 872 " for more information"))
873 873 return requirements
874 874
875 875 class filecachesubentry(object):
876 876 def __init__(self, path, stat):
877 877 self.path = path
878 878 self.cachestat = None
879 879 self._cacheable = None
880 880
881 881 if stat:
882 882 self.cachestat = filecachesubentry.stat(self.path)
883 883
884 884 if self.cachestat:
885 885 self._cacheable = self.cachestat.cacheable()
886 886 else:
887 887 # None means we don't know yet
888 888 self._cacheable = None
889 889
890 890 def refresh(self):
891 891 if self.cacheable():
892 892 self.cachestat = filecachesubentry.stat(self.path)
893 893
894 894 def cacheable(self):
895 895 if self._cacheable is not None:
896 896 return self._cacheable
897 897
898 898 # we don't know yet, assume it is for now
899 899 return True
900 900
901 901 def changed(self):
902 902 # no point in going further if we can't cache it
903 903 if not self.cacheable():
904 904 return True
905 905
906 906 newstat = filecachesubentry.stat(self.path)
907 907
908 908 # we may not know if it's cacheable yet, check again now
909 909 if newstat and self._cacheable is None:
910 910 self._cacheable = newstat.cacheable()
911 911
912 912 # check again
913 913 if not self._cacheable:
914 914 return True
915 915
916 916 if self.cachestat != newstat:
917 917 self.cachestat = newstat
918 918 return True
919 919 else:
920 920 return False
921 921
922 922 @staticmethod
923 923 def stat(path):
924 924 try:
925 925 return util.cachestat(path)
926 926 except OSError, e:
927 927 if e.errno != errno.ENOENT:
928 928 raise
929 929
930 930 class filecacheentry(object):
931 931 def __init__(self, paths, stat=True):
932 932 self._entries = []
933 933 for path in paths:
934 934 self._entries.append(filecachesubentry(path, stat))
935 935
936 936 def changed(self):
937 937 '''true if any entry has changed'''
938 938 for entry in self._entries:
939 939 if entry.changed():
940 940 return True
941 941 return False
942 942
943 943 def refresh(self):
944 944 for entry in self._entries:
945 945 entry.refresh()
946 946
947 947 class filecache(object):
948 948 '''A property like decorator that tracks files under .hg/ for updates.
949 949
950 950 Records stat info when called in _filecache.
951 951
952 952 On subsequent calls, compares old stat info with new info, and recreates the
953 953 object when any of the files changes, updating the new stat info in
954 954 _filecache.
955 955
956 956 Mercurial either atomic renames or appends for files under .hg,
957 957 so to ensure the cache is reliable we need the filesystem to be able
958 958 to tell us if a file has been replaced. If it can't, we fallback to
959 959 recreating the object on every call (essentially the same behaviour as
960 960 propertycache).
961 961
962 962 '''
963 963 def __init__(self, *paths):
964 964 self.paths = paths
965 965
966 966 def join(self, obj, fname):
967 967 """Used to compute the runtime path of a cached file.
968 968
969 969 Users should subclass filecache and provide their own version of this
970 970 function to call the appropriate join function on 'obj' (an instance
971 971 of the class that its member function was decorated).
972 972 """
973 973 return obj.join(fname)
974 974
975 975 def __call__(self, func):
976 976 self.func = func
977 977 self.name = func.__name__
978 978 return self
979 979
980 980 def __get__(self, obj, type=None):
981 981 # do we need to check if the file changed?
982 982 if self.name in obj.__dict__:
983 983 assert self.name in obj._filecache, self.name
984 984 return obj.__dict__[self.name]
985 985
986 986 entry = obj._filecache.get(self.name)
987 987
988 988 if entry:
989 989 if entry.changed():
990 990 entry.obj = self.func(obj)
991 991 else:
992 992 paths = [self.join(obj, path) for path in self.paths]
993 993
994 994 # We stat -before- creating the object so our cache doesn't lie if
995 995 # a writer modified between the time we read and stat
996 996 entry = filecacheentry(paths, True)
997 997 entry.obj = self.func(obj)
998 998
999 999 obj._filecache[self.name] = entry
1000 1000
1001 1001 obj.__dict__[self.name] = entry.obj
1002 1002 return entry.obj
1003 1003
1004 1004 def __set__(self, obj, value):
1005 1005 if self.name not in obj._filecache:
1006 1006 # we add an entry for the missing value because X in __dict__
1007 1007 # implies X in _filecache
1008 1008 paths = [self.join(obj, path) for path in self.paths]
1009 1009 ce = filecacheentry(paths, False)
1010 1010 obj._filecache[self.name] = ce
1011 1011 else:
1012 1012 ce = obj._filecache[self.name]
1013 1013
1014 1014 ce.obj = value # update cached copy
1015 1015 obj.__dict__[self.name] = value # update copy returned by obj.x
1016 1016
1017 1017 def __delete__(self, obj):
1018 1018 try:
1019 1019 del obj.__dict__[self.name]
1020 1020 except KeyError:
1021 1021 raise AttributeError(self.name)
1022 1022
1023 1023 class dirs(object):
1024 1024 '''a multiset of directory names from a dirstate or manifest'''
1025 1025
1026 1026 def __init__(self, map, skip=None):
1027 1027 self._dirs = {}
1028 1028 addpath = self.addpath
1029 1029 if util.safehasattr(map, 'iteritems') and skip is not None:
1030 1030 for f, s in map.iteritems():
1031 1031 if s[0] != skip:
1032 1032 addpath(f)
1033 1033 else:
1034 1034 for f in map:
1035 1035 addpath(f)
1036 1036
1037 1037 def addpath(self, path):
1038 1038 dirs = self._dirs
1039 1039 for base in finddirs(path):
1040 1040 if base in dirs:
1041 1041 dirs[base] += 1
1042 1042 return
1043 1043 dirs[base] = 1
1044 1044
1045 1045 def delpath(self, path):
1046 1046 dirs = self._dirs
1047 1047 for base in finddirs(path):
1048 1048 if dirs[base] > 1:
1049 1049 dirs[base] -= 1
1050 1050 return
1051 1051 del dirs[base]
1052 1052
1053 1053 def __iter__(self):
1054 1054 return self._dirs.iterkeys()
1055 1055
1056 1056 def __contains__(self, d):
1057 1057 return d in self._dirs
1058 1058
1059 1059 if util.safehasattr(parsers, 'dirs'):
1060 1060 dirs = parsers.dirs
1061 1061
1062 1062 def finddirs(path):
1063 1063 pos = path.rfind('/')
1064 1064 while pos != -1:
1065 1065 yield path[:pos]
1066 1066 pos = path.rfind('/', 0, pos)
General Comments 0
You need to be logged in to leave comments. Login now