##// END OF EJS Templates
addremove: remove dry_run, similarity from scmutil.addremove (API)...
Sushil khanchi -
r37286:14cd5290 default
parent child Browse files
Show More
@@ -1,1763 +1,1764 b''
1 1 # perf.py - performance test routines
2 2 '''helper extension to measure performance'''
3 3
4 4 # "historical portability" policy of perf.py:
5 5 #
6 6 # We have to do:
7 7 # - make perf.py "loadable" with as wide Mercurial version as possible
8 8 # This doesn't mean that perf commands work correctly with that Mercurial.
9 9 # BTW, perf.py itself has been available since 1.1 (or eb240755386d).
10 10 # - make historical perf command work correctly with as wide Mercurial
11 11 # version as possible
12 12 #
13 13 # We have to do, if possible with reasonable cost:
14 14 # - make recent perf command for historical feature work correctly
15 15 # with early Mercurial
16 16 #
17 17 # We don't have to do:
18 18 # - make perf command for recent feature work correctly with early
19 19 # Mercurial
20 20
21 21 from __future__ import absolute_import
22 22 import functools
23 23 import gc
24 24 import os
25 25 import random
26 26 import struct
27 27 import sys
28 28 import threading
29 29 import time
30 30 from mercurial import (
31 31 changegroup,
32 32 cmdutil,
33 33 commands,
34 34 copies,
35 35 error,
36 36 extensions,
37 37 mdiff,
38 38 merge,
39 39 revlog,
40 40 util,
41 41 )
42 42
43 43 # for "historical portability":
44 44 # try to import modules separately (in dict order), and ignore
45 45 # failure, because these aren't available with early Mercurial
46 46 try:
47 47 from mercurial import branchmap # since 2.5 (or bcee63733aad)
48 48 except ImportError:
49 49 pass
50 50 try:
51 51 from mercurial import obsolete # since 2.3 (or ad0d6c2b3279)
52 52 except ImportError:
53 53 pass
54 54 try:
55 55 from mercurial import registrar # since 3.7 (or 37d50250b696)
56 56 dir(registrar) # forcibly load it
57 57 except ImportError:
58 58 registrar = None
59 59 try:
60 60 from mercurial import repoview # since 2.5 (or 3a6ddacb7198)
61 61 except ImportError:
62 62 pass
63 63 try:
64 64 from mercurial import scmutil # since 1.9 (or 8b252e826c68)
65 65 except ImportError:
66 66 pass
67 67 try:
68 68 from mercurial import pycompat
69 69 getargspec = pycompat.getargspec # added to module after 4.5
70 70 except (ImportError, AttributeError):
71 71 import inspect
72 72 getargspec = inspect.getargspec
73 73
74 74 # for "historical portability":
75 75 # define util.safehasattr forcibly, because util.safehasattr has been
76 76 # available since 1.9.3 (or 94b200a11cf7)
77 77 _undefined = object()
78 78 def safehasattr(thing, attr):
79 79 return getattr(thing, attr, _undefined) is not _undefined
80 80 setattr(util, 'safehasattr', safehasattr)
81 81
82 82 # for "historical portability":
83 83 # define util.timer forcibly, because util.timer has been available
84 84 # since ae5d60bb70c9
85 85 if safehasattr(time, 'perf_counter'):
86 86 util.timer = time.perf_counter
87 87 elif os.name == 'nt':
88 88 util.timer = time.clock
89 89 else:
90 90 util.timer = time.time
91 91
92 92 # for "historical portability":
93 93 # use locally defined empty option list, if formatteropts isn't
94 94 # available, because commands.formatteropts has been available since
95 95 # 3.2 (or 7a7eed5176a4), even though formatting itself has been
96 96 # available since 2.2 (or ae5f92e154d3)
97 97 formatteropts = getattr(cmdutil, "formatteropts",
98 98 getattr(commands, "formatteropts", []))
99 99
100 100 # for "historical portability":
101 101 # use locally defined option list, if debugrevlogopts isn't available,
102 102 # because commands.debugrevlogopts has been available since 3.7 (or
103 103 # 5606f7d0d063), even though cmdutil.openrevlog() has been available
104 104 # since 1.9 (or a79fea6b3e77).
105 105 revlogopts = getattr(cmdutil, "debugrevlogopts",
106 106 getattr(commands, "debugrevlogopts", [
107 107 ('c', 'changelog', False, ('open changelog')),
108 108 ('m', 'manifest', False, ('open manifest')),
109 109 ('', 'dir', False, ('open directory manifest')),
110 110 ]))
111 111
112 112 cmdtable = {}
113 113
114 114 # for "historical portability":
115 115 # define parsealiases locally, because cmdutil.parsealiases has been
116 116 # available since 1.5 (or 6252852b4332)
117 117 def parsealiases(cmd):
118 118 return cmd.lstrip("^").split("|")
119 119
120 120 if safehasattr(registrar, 'command'):
121 121 command = registrar.command(cmdtable)
122 122 elif safehasattr(cmdutil, 'command'):
123 123 command = cmdutil.command(cmdtable)
124 124 if 'norepo' not in getargspec(command).args:
125 125 # for "historical portability":
126 126 # wrap original cmdutil.command, because "norepo" option has
127 127 # been available since 3.1 (or 75a96326cecb)
128 128 _command = command
129 129 def command(name, options=(), synopsis=None, norepo=False):
130 130 if norepo:
131 131 commands.norepo += ' %s' % ' '.join(parsealiases(name))
132 132 return _command(name, list(options), synopsis)
133 133 else:
134 134 # for "historical portability":
135 135 # define "@command" annotation locally, because cmdutil.command
136 136 # has been available since 1.9 (or 2daa5179e73f)
137 137 def command(name, options=(), synopsis=None, norepo=False):
138 138 def decorator(func):
139 139 if synopsis:
140 140 cmdtable[name] = func, list(options), synopsis
141 141 else:
142 142 cmdtable[name] = func, list(options)
143 143 if norepo:
144 144 commands.norepo += ' %s' % ' '.join(parsealiases(name))
145 145 return func
146 146 return decorator
147 147
148 148 try:
149 149 import mercurial.registrar
150 150 import mercurial.configitems
151 151 configtable = {}
152 152 configitem = mercurial.registrar.configitem(configtable)
153 153 configitem('perf', 'presleep',
154 154 default=mercurial.configitems.dynamicdefault,
155 155 )
156 156 configitem('perf', 'stub',
157 157 default=mercurial.configitems.dynamicdefault,
158 158 )
159 159 configitem('perf', 'parentscount',
160 160 default=mercurial.configitems.dynamicdefault,
161 161 )
162 162 except (ImportError, AttributeError):
163 163 pass
164 164
165 165 def getlen(ui):
166 166 if ui.configbool("perf", "stub", False):
167 167 return lambda x: 1
168 168 return len
169 169
170 170 def gettimer(ui, opts=None):
171 171 """return a timer function and formatter: (timer, formatter)
172 172
173 173 This function exists to gather the creation of formatter in a single
174 174 place instead of duplicating it in all performance commands."""
175 175
176 176 # enforce an idle period before execution to counteract power management
177 177 # experimental config: perf.presleep
178 178 time.sleep(getint(ui, "perf", "presleep", 1))
179 179
180 180 if opts is None:
181 181 opts = {}
182 182 # redirect all to stderr unless buffer api is in use
183 183 if not ui._buffers:
184 184 ui = ui.copy()
185 185 uifout = safeattrsetter(ui, 'fout', ignoremissing=True)
186 186 if uifout:
187 187 # for "historical portability":
188 188 # ui.fout/ferr have been available since 1.9 (or 4e1ccd4c2b6d)
189 189 uifout.set(ui.ferr)
190 190
191 191 # get a formatter
192 192 uiformatter = getattr(ui, 'formatter', None)
193 193 if uiformatter:
194 194 fm = uiformatter('perf', opts)
195 195 else:
196 196 # for "historical portability":
197 197 # define formatter locally, because ui.formatter has been
198 198 # available since 2.2 (or ae5f92e154d3)
199 199 from mercurial import node
200 200 class defaultformatter(object):
201 201 """Minimized composition of baseformatter and plainformatter
202 202 """
203 203 def __init__(self, ui, topic, opts):
204 204 self._ui = ui
205 205 if ui.debugflag:
206 206 self.hexfunc = node.hex
207 207 else:
208 208 self.hexfunc = node.short
209 209 def __nonzero__(self):
210 210 return False
211 211 __bool__ = __nonzero__
212 212 def startitem(self):
213 213 pass
214 214 def data(self, **data):
215 215 pass
216 216 def write(self, fields, deftext, *fielddata, **opts):
217 217 self._ui.write(deftext % fielddata, **opts)
218 218 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
219 219 if cond:
220 220 self._ui.write(deftext % fielddata, **opts)
221 221 def plain(self, text, **opts):
222 222 self._ui.write(text, **opts)
223 223 def end(self):
224 224 pass
225 225 fm = defaultformatter(ui, 'perf', opts)
226 226
227 227 # stub function, runs code only once instead of in a loop
228 228 # experimental config: perf.stub
229 229 if ui.configbool("perf", "stub", False):
230 230 return functools.partial(stub_timer, fm), fm
231 231 return functools.partial(_timer, fm), fm
232 232
233 233 def stub_timer(fm, func, title=None):
234 234 func()
235 235
236 236 def _timer(fm, func, title=None):
237 237 gc.collect()
238 238 results = []
239 239 begin = util.timer()
240 240 count = 0
241 241 while True:
242 242 ostart = os.times()
243 243 cstart = util.timer()
244 244 r = func()
245 245 cstop = util.timer()
246 246 ostop = os.times()
247 247 count += 1
248 248 a, b = ostart, ostop
249 249 results.append((cstop - cstart, b[0] - a[0], b[1]-a[1]))
250 250 if cstop - begin > 3 and count >= 100:
251 251 break
252 252 if cstop - begin > 10 and count >= 3:
253 253 break
254 254
255 255 fm.startitem()
256 256
257 257 if title:
258 258 fm.write('title', '! %s\n', title)
259 259 if r:
260 260 fm.write('result', '! result: %s\n', r)
261 261 m = min(results)
262 262 fm.plain('!')
263 263 fm.write('wall', ' wall %f', m[0])
264 264 fm.write('comb', ' comb %f', m[1] + m[2])
265 265 fm.write('user', ' user %f', m[1])
266 266 fm.write('sys', ' sys %f', m[2])
267 267 fm.write('count', ' (best of %d)', count)
268 268 fm.plain('\n')
269 269
270 270 # utilities for historical portability
271 271
272 272 def getint(ui, section, name, default):
273 273 # for "historical portability":
274 274 # ui.configint has been available since 1.9 (or fa2b596db182)
275 275 v = ui.config(section, name, None)
276 276 if v is None:
277 277 return default
278 278 try:
279 279 return int(v)
280 280 except ValueError:
281 281 raise error.ConfigError(("%s.%s is not an integer ('%s')")
282 282 % (section, name, v))
283 283
284 284 def safeattrsetter(obj, name, ignoremissing=False):
285 285 """Ensure that 'obj' has 'name' attribute before subsequent setattr
286 286
287 287 This function is aborted, if 'obj' doesn't have 'name' attribute
288 288 at runtime. This avoids overlooking removal of an attribute, which
289 289 breaks assumption of performance measurement, in the future.
290 290
291 291 This function returns the object to (1) assign a new value, and
292 292 (2) restore an original value to the attribute.
293 293
294 294 If 'ignoremissing' is true, missing 'name' attribute doesn't cause
295 295 abortion, and this function returns None. This is useful to
296 296 examine an attribute, which isn't ensured in all Mercurial
297 297 versions.
298 298 """
299 299 if not util.safehasattr(obj, name):
300 300 if ignoremissing:
301 301 return None
302 302 raise error.Abort(("missing attribute %s of %s might break assumption"
303 303 " of performance measurement") % (name, obj))
304 304
305 305 origvalue = getattr(obj, name)
306 306 class attrutil(object):
307 307 def set(self, newvalue):
308 308 setattr(obj, name, newvalue)
309 309 def restore(self):
310 310 setattr(obj, name, origvalue)
311 311
312 312 return attrutil()
313 313
314 314 # utilities to examine each internal API changes
315 315
316 316 def getbranchmapsubsettable():
317 317 # for "historical portability":
318 318 # subsettable is defined in:
319 319 # - branchmap since 2.9 (or 175c6fd8cacc)
320 320 # - repoview since 2.5 (or 59a9f18d4587)
321 321 for mod in (branchmap, repoview):
322 322 subsettable = getattr(mod, 'subsettable', None)
323 323 if subsettable:
324 324 return subsettable
325 325
326 326 # bisecting in bcee63733aad::59a9f18d4587 can reach here (both
327 327 # branchmap and repoview modules exist, but subsettable attribute
328 328 # doesn't)
329 329 raise error.Abort(("perfbranchmap not available with this Mercurial"),
330 330 hint="use 2.5 or later")
331 331
332 332 def getsvfs(repo):
333 333 """Return appropriate object to access files under .hg/store
334 334 """
335 335 # for "historical portability":
336 336 # repo.svfs has been available since 2.3 (or 7034365089bf)
337 337 svfs = getattr(repo, 'svfs', None)
338 338 if svfs:
339 339 return svfs
340 340 else:
341 341 return getattr(repo, 'sopener')
342 342
343 343 def getvfs(repo):
344 344 """Return appropriate object to access files under .hg
345 345 """
346 346 # for "historical portability":
347 347 # repo.vfs has been available since 2.3 (or 7034365089bf)
348 348 vfs = getattr(repo, 'vfs', None)
349 349 if vfs:
350 350 return vfs
351 351 else:
352 352 return getattr(repo, 'opener')
353 353
354 354 def repocleartagscachefunc(repo):
355 355 """Return the function to clear tags cache according to repo internal API
356 356 """
357 357 if util.safehasattr(repo, '_tagscache'): # since 2.0 (or 9dca7653b525)
358 358 # in this case, setattr(repo, '_tagscache', None) or so isn't
359 359 # correct way to clear tags cache, because existing code paths
360 360 # expect _tagscache to be a structured object.
361 361 def clearcache():
362 362 # _tagscache has been filteredpropertycache since 2.5 (or
363 363 # 98c867ac1330), and delattr() can't work in such case
364 364 if '_tagscache' in vars(repo):
365 365 del repo.__dict__['_tagscache']
366 366 return clearcache
367 367
368 368 repotags = safeattrsetter(repo, '_tags', ignoremissing=True)
369 369 if repotags: # since 1.4 (or 5614a628d173)
370 370 return lambda : repotags.set(None)
371 371
372 372 repotagscache = safeattrsetter(repo, 'tagscache', ignoremissing=True)
373 373 if repotagscache: # since 0.6 (or d7df759d0e97)
374 374 return lambda : repotagscache.set(None)
375 375
376 376 # Mercurial earlier than 0.6 (or d7df759d0e97) logically reaches
377 377 # this point, but it isn't so problematic, because:
378 378 # - repo.tags of such Mercurial isn't "callable", and repo.tags()
379 379 # in perftags() causes failure soon
380 380 # - perf.py itself has been available since 1.1 (or eb240755386d)
381 381 raise error.Abort(("tags API of this hg command is unknown"))
382 382
383 383 # utilities to clear cache
384 384
385 385 def clearfilecache(repo, attrname):
386 386 unfi = repo.unfiltered()
387 387 if attrname in vars(unfi):
388 388 delattr(unfi, attrname)
389 389 unfi._filecache.pop(attrname, None)
390 390
391 391 # perf commands
392 392
393 393 @command('perfwalk', formatteropts)
394 394 def perfwalk(ui, repo, *pats, **opts):
395 395 timer, fm = gettimer(ui, opts)
396 396 m = scmutil.match(repo[None], pats, {})
397 397 timer(lambda: len(list(repo.dirstate.walk(m, subrepos=[], unknown=True,
398 398 ignored=False))))
399 399 fm.end()
400 400
401 401 @command('perfannotate', formatteropts)
402 402 def perfannotate(ui, repo, f, **opts):
403 403 timer, fm = gettimer(ui, opts)
404 404 fc = repo['.'][f]
405 405 timer(lambda: len(fc.annotate(True)))
406 406 fm.end()
407 407
408 408 @command('perfstatus',
409 409 [('u', 'unknown', False,
410 410 'ask status to look for unknown files')] + formatteropts)
411 411 def perfstatus(ui, repo, **opts):
412 412 #m = match.always(repo.root, repo.getcwd())
413 413 #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
414 414 # False))))
415 415 timer, fm = gettimer(ui, opts)
416 416 timer(lambda: sum(map(len, repo.status(unknown=opts['unknown']))))
417 417 fm.end()
418 418
419 419 @command('perfaddremove', formatteropts)
420 420 def perfaddremove(ui, repo, **opts):
421 421 timer, fm = gettimer(ui, opts)
422 422 try:
423 423 oldquiet = repo.ui.quiet
424 424 repo.ui.quiet = True
425 425 matcher = scmutil.match(repo[None])
426 timer(lambda: scmutil.addremove(repo, matcher, "", dry_run=True))
426 opts['dry_run'] = True
427 timer(lambda: scmutil.addremove(repo, matcher, "", opts))
427 428 finally:
428 429 repo.ui.quiet = oldquiet
429 430 fm.end()
430 431
431 432 def clearcaches(cl):
432 433 # behave somewhat consistently across internal API changes
433 434 if util.safehasattr(cl, 'clearcaches'):
434 435 cl.clearcaches()
435 436 elif util.safehasattr(cl, '_nodecache'):
436 437 from mercurial.node import nullid, nullrev
437 438 cl._nodecache = {nullid: nullrev}
438 439 cl._nodepos = None
439 440
440 441 @command('perfheads', formatteropts)
441 442 def perfheads(ui, repo, **opts):
442 443 timer, fm = gettimer(ui, opts)
443 444 cl = repo.changelog
444 445 def d():
445 446 len(cl.headrevs())
446 447 clearcaches(cl)
447 448 timer(d)
448 449 fm.end()
449 450
450 451 @command('perftags', formatteropts)
451 452 def perftags(ui, repo, **opts):
452 453 import mercurial.changelog
453 454 import mercurial.manifest
454 455 timer, fm = gettimer(ui, opts)
455 456 svfs = getsvfs(repo)
456 457 repocleartagscache = repocleartagscachefunc(repo)
457 458 def t():
458 459 repo.changelog = mercurial.changelog.changelog(svfs)
459 460 repo.manifestlog = mercurial.manifest.manifestlog(svfs, repo)
460 461 repocleartagscache()
461 462 return len(repo.tags())
462 463 timer(t)
463 464 fm.end()
464 465
465 466 @command('perfancestors', formatteropts)
466 467 def perfancestors(ui, repo, **opts):
467 468 timer, fm = gettimer(ui, opts)
468 469 heads = repo.changelog.headrevs()
469 470 def d():
470 471 for a in repo.changelog.ancestors(heads):
471 472 pass
472 473 timer(d)
473 474 fm.end()
474 475
475 476 @command('perfancestorset', formatteropts)
476 477 def perfancestorset(ui, repo, revset, **opts):
477 478 timer, fm = gettimer(ui, opts)
478 479 revs = repo.revs(revset)
479 480 heads = repo.changelog.headrevs()
480 481 def d():
481 482 s = repo.changelog.ancestors(heads)
482 483 for rev in revs:
483 484 rev in s
484 485 timer(d)
485 486 fm.end()
486 487
487 488 @command('perfbookmarks', formatteropts)
488 489 def perfbookmarks(ui, repo, **opts):
489 490 """benchmark parsing bookmarks from disk to memory"""
490 491 timer, fm = gettimer(ui, opts)
491 492 def d():
492 493 clearfilecache(repo, '_bookmarks')
493 494 repo._bookmarks
494 495 timer(d)
495 496 fm.end()
496 497
497 498 @command('perfbundleread', formatteropts, 'BUNDLE')
498 499 def perfbundleread(ui, repo, bundlepath, **opts):
499 500 """Benchmark reading of bundle files.
500 501
501 502 This command is meant to isolate the I/O part of bundle reading as
502 503 much as possible.
503 504 """
504 505 from mercurial import (
505 506 bundle2,
506 507 exchange,
507 508 streamclone,
508 509 )
509 510
510 511 def makebench(fn):
511 512 def run():
512 513 with open(bundlepath, 'rb') as fh:
513 514 bundle = exchange.readbundle(ui, fh, bundlepath)
514 515 fn(bundle)
515 516
516 517 return run
517 518
518 519 def makereadnbytes(size):
519 520 def run():
520 521 with open(bundlepath, 'rb') as fh:
521 522 bundle = exchange.readbundle(ui, fh, bundlepath)
522 523 while bundle.read(size):
523 524 pass
524 525
525 526 return run
526 527
527 528 def makestdioread(size):
528 529 def run():
529 530 with open(bundlepath, 'rb') as fh:
530 531 while fh.read(size):
531 532 pass
532 533
533 534 return run
534 535
535 536 # bundle1
536 537
537 538 def deltaiter(bundle):
538 539 for delta in bundle.deltaiter():
539 540 pass
540 541
541 542 def iterchunks(bundle):
542 543 for chunk in bundle.getchunks():
543 544 pass
544 545
545 546 # bundle2
546 547
547 548 def forwardchunks(bundle):
548 549 for chunk in bundle._forwardchunks():
549 550 pass
550 551
551 552 def iterparts(bundle):
552 553 for part in bundle.iterparts():
553 554 pass
554 555
555 556 def iterpartsseekable(bundle):
556 557 for part in bundle.iterparts(seekable=True):
557 558 pass
558 559
559 560 def seek(bundle):
560 561 for part in bundle.iterparts(seekable=True):
561 562 part.seek(0, os.SEEK_END)
562 563
563 564 def makepartreadnbytes(size):
564 565 def run():
565 566 with open(bundlepath, 'rb') as fh:
566 567 bundle = exchange.readbundle(ui, fh, bundlepath)
567 568 for part in bundle.iterparts():
568 569 while part.read(size):
569 570 pass
570 571
571 572 return run
572 573
573 574 benches = [
574 575 (makestdioread(8192), 'read(8k)'),
575 576 (makestdioread(16384), 'read(16k)'),
576 577 (makestdioread(32768), 'read(32k)'),
577 578 (makestdioread(131072), 'read(128k)'),
578 579 ]
579 580
580 581 with open(bundlepath, 'rb') as fh:
581 582 bundle = exchange.readbundle(ui, fh, bundlepath)
582 583
583 584 if isinstance(bundle, changegroup.cg1unpacker):
584 585 benches.extend([
585 586 (makebench(deltaiter), 'cg1 deltaiter()'),
586 587 (makebench(iterchunks), 'cg1 getchunks()'),
587 588 (makereadnbytes(8192), 'cg1 read(8k)'),
588 589 (makereadnbytes(16384), 'cg1 read(16k)'),
589 590 (makereadnbytes(32768), 'cg1 read(32k)'),
590 591 (makereadnbytes(131072), 'cg1 read(128k)'),
591 592 ])
592 593 elif isinstance(bundle, bundle2.unbundle20):
593 594 benches.extend([
594 595 (makebench(forwardchunks), 'bundle2 forwardchunks()'),
595 596 (makebench(iterparts), 'bundle2 iterparts()'),
596 597 (makebench(iterpartsseekable), 'bundle2 iterparts() seekable'),
597 598 (makebench(seek), 'bundle2 part seek()'),
598 599 (makepartreadnbytes(8192), 'bundle2 part read(8k)'),
599 600 (makepartreadnbytes(16384), 'bundle2 part read(16k)'),
600 601 (makepartreadnbytes(32768), 'bundle2 part read(32k)'),
601 602 (makepartreadnbytes(131072), 'bundle2 part read(128k)'),
602 603 ])
603 604 elif isinstance(bundle, streamclone.streamcloneapplier):
604 605 raise error.Abort('stream clone bundles not supported')
605 606 else:
606 607 raise error.Abort('unhandled bundle type: %s' % type(bundle))
607 608
608 609 for fn, title in benches:
609 610 timer, fm = gettimer(ui, opts)
610 611 timer(fn, title=title)
611 612 fm.end()
612 613
613 614 @command('perfchangegroupchangelog', formatteropts +
614 615 [('', 'version', '02', 'changegroup version'),
615 616 ('r', 'rev', '', 'revisions to add to changegroup')])
616 617 def perfchangegroupchangelog(ui, repo, version='02', rev=None, **opts):
617 618 """Benchmark producing a changelog group for a changegroup.
618 619
619 620 This measures the time spent processing the changelog during a
620 621 bundle operation. This occurs during `hg bundle` and on a server
621 622 processing a `getbundle` wire protocol request (handles clones
622 623 and pull requests).
623 624
624 625 By default, all revisions are added to the changegroup.
625 626 """
626 627 cl = repo.changelog
627 628 revs = [cl.lookup(r) for r in repo.revs(rev or 'all()')]
628 629 bundler = changegroup.getbundler(version, repo)
629 630
630 631 def lookup(node):
631 632 # The real bundler reads the revision in order to access the
632 633 # manifest node and files list. Do that here.
633 634 cl.read(node)
634 635 return node
635 636
636 637 def d():
637 638 for chunk in bundler.group(revs, cl, lookup):
638 639 pass
639 640
640 641 timer, fm = gettimer(ui, opts)
641 642 timer(d)
642 643 fm.end()
643 644
644 645 @command('perfdirs', formatteropts)
645 646 def perfdirs(ui, repo, **opts):
646 647 timer, fm = gettimer(ui, opts)
647 648 dirstate = repo.dirstate
648 649 'a' in dirstate
649 650 def d():
650 651 dirstate.hasdir('a')
651 652 del dirstate._map._dirs
652 653 timer(d)
653 654 fm.end()
654 655
655 656 @command('perfdirstate', formatteropts)
656 657 def perfdirstate(ui, repo, **opts):
657 658 timer, fm = gettimer(ui, opts)
658 659 "a" in repo.dirstate
659 660 def d():
660 661 repo.dirstate.invalidate()
661 662 "a" in repo.dirstate
662 663 timer(d)
663 664 fm.end()
664 665
665 666 @command('perfdirstatedirs', formatteropts)
666 667 def perfdirstatedirs(ui, repo, **opts):
667 668 timer, fm = gettimer(ui, opts)
668 669 "a" in repo.dirstate
669 670 def d():
670 671 repo.dirstate.hasdir("a")
671 672 del repo.dirstate._map._dirs
672 673 timer(d)
673 674 fm.end()
674 675
675 676 @command('perfdirstatefoldmap', formatteropts)
676 677 def perfdirstatefoldmap(ui, repo, **opts):
677 678 timer, fm = gettimer(ui, opts)
678 679 dirstate = repo.dirstate
679 680 'a' in dirstate
680 681 def d():
681 682 dirstate._map.filefoldmap.get('a')
682 683 del dirstate._map.filefoldmap
683 684 timer(d)
684 685 fm.end()
685 686
686 687 @command('perfdirfoldmap', formatteropts)
687 688 def perfdirfoldmap(ui, repo, **opts):
688 689 timer, fm = gettimer(ui, opts)
689 690 dirstate = repo.dirstate
690 691 'a' in dirstate
691 692 def d():
692 693 dirstate._map.dirfoldmap.get('a')
693 694 del dirstate._map.dirfoldmap
694 695 del dirstate._map._dirs
695 696 timer(d)
696 697 fm.end()
697 698
698 699 @command('perfdirstatewrite', formatteropts)
699 700 def perfdirstatewrite(ui, repo, **opts):
700 701 timer, fm = gettimer(ui, opts)
701 702 ds = repo.dirstate
702 703 "a" in ds
703 704 def d():
704 705 ds._dirty = True
705 706 ds.write(repo.currenttransaction())
706 707 timer(d)
707 708 fm.end()
708 709
709 710 @command('perfmergecalculate',
710 711 [('r', 'rev', '.', 'rev to merge against')] + formatteropts)
711 712 def perfmergecalculate(ui, repo, rev, **opts):
712 713 timer, fm = gettimer(ui, opts)
713 714 wctx = repo[None]
714 715 rctx = scmutil.revsingle(repo, rev, rev)
715 716 ancestor = wctx.ancestor(rctx)
716 717 # we don't want working dir files to be stat'd in the benchmark, so prime
717 718 # that cache
718 719 wctx.dirty()
719 720 def d():
720 721 # acceptremote is True because we don't want prompts in the middle of
721 722 # our benchmark
722 723 merge.calculateupdates(repo, wctx, rctx, [ancestor], False, False,
723 724 acceptremote=True, followcopies=True)
724 725 timer(d)
725 726 fm.end()
726 727
727 728 @command('perfpathcopies', [], "REV REV")
728 729 def perfpathcopies(ui, repo, rev1, rev2, **opts):
729 730 timer, fm = gettimer(ui, opts)
730 731 ctx1 = scmutil.revsingle(repo, rev1, rev1)
731 732 ctx2 = scmutil.revsingle(repo, rev2, rev2)
732 733 def d():
733 734 copies.pathcopies(ctx1, ctx2)
734 735 timer(d)
735 736 fm.end()
736 737
737 738 @command('perfphases',
738 739 [('', 'full', False, 'include file reading time too'),
739 740 ], "")
740 741 def perfphases(ui, repo, **opts):
741 742 """benchmark phasesets computation"""
742 743 timer, fm = gettimer(ui, opts)
743 744 _phases = repo._phasecache
744 745 full = opts.get('full')
745 746 def d():
746 747 phases = _phases
747 748 if full:
748 749 clearfilecache(repo, '_phasecache')
749 750 phases = repo._phasecache
750 751 phases.invalidate()
751 752 phases.loadphaserevs(repo)
752 753 timer(d)
753 754 fm.end()
754 755
755 756 @command('perfmanifest', [], 'REV')
756 757 def perfmanifest(ui, repo, rev, **opts):
757 758 timer, fm = gettimer(ui, opts)
758 759 ctx = scmutil.revsingle(repo, rev, rev)
759 760 t = ctx.manifestnode()
760 761 def d():
761 762 repo.manifestlog.clearcaches()
762 763 repo.manifestlog[t].read()
763 764 timer(d)
764 765 fm.end()
765 766
766 767 @command('perfchangeset', formatteropts)
767 768 def perfchangeset(ui, repo, rev, **opts):
768 769 timer, fm = gettimer(ui, opts)
769 770 n = repo[rev].node()
770 771 def d():
771 772 repo.changelog.read(n)
772 773 #repo.changelog._cache = None
773 774 timer(d)
774 775 fm.end()
775 776
776 777 @command('perfindex', formatteropts)
777 778 def perfindex(ui, repo, **opts):
778 779 import mercurial.revlog
779 780 timer, fm = gettimer(ui, opts)
780 781 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
781 782 n = repo["tip"].node()
782 783 svfs = getsvfs(repo)
783 784 def d():
784 785 cl = mercurial.revlog.revlog(svfs, "00changelog.i")
785 786 cl.rev(n)
786 787 timer(d)
787 788 fm.end()
788 789
789 790 @command('perfstartup', formatteropts)
790 791 def perfstartup(ui, repo, **opts):
791 792 timer, fm = gettimer(ui, opts)
792 793 cmd = sys.argv[0]
793 794 def d():
794 795 if os.name != 'nt':
795 796 os.system("HGRCPATH= %s version -q > /dev/null" % cmd)
796 797 else:
797 798 os.environ['HGRCPATH'] = ' '
798 799 os.system("%s version -q > NUL" % cmd)
799 800 timer(d)
800 801 fm.end()
801 802
802 803 @command('perfparents', formatteropts)
803 804 def perfparents(ui, repo, **opts):
804 805 timer, fm = gettimer(ui, opts)
805 806 # control the number of commits perfparents iterates over
806 807 # experimental config: perf.parentscount
807 808 count = getint(ui, "perf", "parentscount", 1000)
808 809 if len(repo.changelog) < count:
809 810 raise error.Abort("repo needs %d commits for this test" % count)
810 811 repo = repo.unfiltered()
811 812 nl = [repo.changelog.node(i) for i in xrange(count)]
812 813 def d():
813 814 for n in nl:
814 815 repo.changelog.parents(n)
815 816 timer(d)
816 817 fm.end()
817 818
818 819 @command('perfctxfiles', formatteropts)
819 820 def perfctxfiles(ui, repo, x, **opts):
820 821 x = int(x)
821 822 timer, fm = gettimer(ui, opts)
822 823 def d():
823 824 len(repo[x].files())
824 825 timer(d)
825 826 fm.end()
826 827
827 828 @command('perfrawfiles', formatteropts)
828 829 def perfrawfiles(ui, repo, x, **opts):
829 830 x = int(x)
830 831 timer, fm = gettimer(ui, opts)
831 832 cl = repo.changelog
832 833 def d():
833 834 len(cl.read(x)[3])
834 835 timer(d)
835 836 fm.end()
836 837
837 838 @command('perflookup', formatteropts)
838 839 def perflookup(ui, repo, rev, **opts):
839 840 timer, fm = gettimer(ui, opts)
840 841 timer(lambda: len(repo.lookup(rev)))
841 842 fm.end()
842 843
843 844 @command('perfrevrange', formatteropts)
844 845 def perfrevrange(ui, repo, *specs, **opts):
845 846 timer, fm = gettimer(ui, opts)
846 847 revrange = scmutil.revrange
847 848 timer(lambda: len(revrange(repo, specs)))
848 849 fm.end()
849 850
850 851 @command('perfnodelookup', formatteropts)
851 852 def perfnodelookup(ui, repo, rev, **opts):
852 853 timer, fm = gettimer(ui, opts)
853 854 import mercurial.revlog
854 855 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
855 856 n = repo[rev].node()
856 857 cl = mercurial.revlog.revlog(getsvfs(repo), "00changelog.i")
857 858 def d():
858 859 cl.rev(n)
859 860 clearcaches(cl)
860 861 timer(d)
861 862 fm.end()
862 863
863 864 @command('perflog',
864 865 [('', 'rename', False, 'ask log to follow renames')] + formatteropts)
865 866 def perflog(ui, repo, rev=None, **opts):
866 867 if rev is None:
867 868 rev=[]
868 869 timer, fm = gettimer(ui, opts)
869 870 ui.pushbuffer()
870 871 timer(lambda: commands.log(ui, repo, rev=rev, date='', user='',
871 872 copies=opts.get('rename')))
872 873 ui.popbuffer()
873 874 fm.end()
874 875
875 876 @command('perfmoonwalk', formatteropts)
876 877 def perfmoonwalk(ui, repo, **opts):
877 878 """benchmark walking the changelog backwards
878 879
879 880 This also loads the changelog data for each revision in the changelog.
880 881 """
881 882 timer, fm = gettimer(ui, opts)
882 883 def moonwalk():
883 884 for i in xrange(len(repo), -1, -1):
884 885 ctx = repo[i]
885 886 ctx.branch() # read changelog data (in addition to the index)
886 887 timer(moonwalk)
887 888 fm.end()
888 889
889 890 @command('perftemplating', formatteropts)
890 891 def perftemplating(ui, repo, rev=None, **opts):
891 892 if rev is None:
892 893 rev=[]
893 894 timer, fm = gettimer(ui, opts)
894 895 ui.pushbuffer()
895 896 timer(lambda: commands.log(ui, repo, rev=rev, date='', user='',
896 897 template='{date|shortdate} [{rev}:{node|short}]'
897 898 ' {author|person}: {desc|firstline}\n'))
898 899 ui.popbuffer()
899 900 fm.end()
900 901
901 902 @command('perfcca', formatteropts)
902 903 def perfcca(ui, repo, **opts):
903 904 timer, fm = gettimer(ui, opts)
904 905 timer(lambda: scmutil.casecollisionauditor(ui, False, repo.dirstate))
905 906 fm.end()
906 907
907 908 @command('perffncacheload', formatteropts)
908 909 def perffncacheload(ui, repo, **opts):
909 910 timer, fm = gettimer(ui, opts)
910 911 s = repo.store
911 912 def d():
912 913 s.fncache._load()
913 914 timer(d)
914 915 fm.end()
915 916
916 917 @command('perffncachewrite', formatteropts)
917 918 def perffncachewrite(ui, repo, **opts):
918 919 timer, fm = gettimer(ui, opts)
919 920 s = repo.store
920 921 s.fncache._load()
921 922 lock = repo.lock()
922 923 tr = repo.transaction('perffncachewrite')
923 924 def d():
924 925 s.fncache._dirty = True
925 926 s.fncache.write(tr)
926 927 timer(d)
927 928 tr.close()
928 929 lock.release()
929 930 fm.end()
930 931
931 932 @command('perffncacheencode', formatteropts)
932 933 def perffncacheencode(ui, repo, **opts):
933 934 timer, fm = gettimer(ui, opts)
934 935 s = repo.store
935 936 s.fncache._load()
936 937 def d():
937 938 for p in s.fncache.entries:
938 939 s.encode(p)
939 940 timer(d)
940 941 fm.end()
941 942
942 943 def _bdiffworker(q, blocks, xdiff, ready, done):
943 944 while not done.is_set():
944 945 pair = q.get()
945 946 while pair is not None:
946 947 if xdiff:
947 948 mdiff.bdiff.xdiffblocks(*pair)
948 949 elif blocks:
949 950 mdiff.bdiff.blocks(*pair)
950 951 else:
951 952 mdiff.textdiff(*pair)
952 953 q.task_done()
953 954 pair = q.get()
954 955 q.task_done() # for the None one
955 956 with ready:
956 957 ready.wait()
957 958
958 959 @command('perfbdiff', revlogopts + formatteropts + [
959 960 ('', 'count', 1, 'number of revisions to test (when using --startrev)'),
960 961 ('', 'alldata', False, 'test bdiffs for all associated revisions'),
961 962 ('', 'threads', 0, 'number of thread to use (disable with 0)'),
962 963 ('', 'blocks', False, 'test computing diffs into blocks'),
963 964 ('', 'xdiff', False, 'use xdiff algorithm'),
964 965 ],
965 966
966 967 '-c|-m|FILE REV')
967 968 def perfbdiff(ui, repo, file_, rev=None, count=None, threads=0, **opts):
968 969 """benchmark a bdiff between revisions
969 970
970 971 By default, benchmark a bdiff between its delta parent and itself.
971 972
972 973 With ``--count``, benchmark bdiffs between delta parents and self for N
973 974 revisions starting at the specified revision.
974 975
975 976 With ``--alldata``, assume the requested revision is a changeset and
976 977 measure bdiffs for all changes related to that changeset (manifest
977 978 and filelogs).
978 979 """
979 980 opts = pycompat.byteskwargs(opts)
980 981
981 982 if opts['xdiff'] and not opts['blocks']:
982 983 raise error.CommandError('perfbdiff', '--xdiff requires --blocks')
983 984
984 985 if opts['alldata']:
985 986 opts['changelog'] = True
986 987
987 988 if opts.get('changelog') or opts.get('manifest'):
988 989 file_, rev = None, file_
989 990 elif rev is None:
990 991 raise error.CommandError('perfbdiff', 'invalid arguments')
991 992
992 993 blocks = opts['blocks']
993 994 xdiff = opts['xdiff']
994 995 textpairs = []
995 996
996 997 r = cmdutil.openrevlog(repo, 'perfbdiff', file_, opts)
997 998
998 999 startrev = r.rev(r.lookup(rev))
999 1000 for rev in range(startrev, min(startrev + count, len(r) - 1)):
1000 1001 if opts['alldata']:
1001 1002 # Load revisions associated with changeset.
1002 1003 ctx = repo[rev]
1003 1004 mtext = repo.manifestlog._revlog.revision(ctx.manifestnode())
1004 1005 for pctx in ctx.parents():
1005 1006 pman = repo.manifestlog._revlog.revision(pctx.manifestnode())
1006 1007 textpairs.append((pman, mtext))
1007 1008
1008 1009 # Load filelog revisions by iterating manifest delta.
1009 1010 man = ctx.manifest()
1010 1011 pman = ctx.p1().manifest()
1011 1012 for filename, change in pman.diff(man).items():
1012 1013 fctx = repo.file(filename)
1013 1014 f1 = fctx.revision(change[0][0] or -1)
1014 1015 f2 = fctx.revision(change[1][0] or -1)
1015 1016 textpairs.append((f1, f2))
1016 1017 else:
1017 1018 dp = r.deltaparent(rev)
1018 1019 textpairs.append((r.revision(dp), r.revision(rev)))
1019 1020
1020 1021 withthreads = threads > 0
1021 1022 if not withthreads:
1022 1023 def d():
1023 1024 for pair in textpairs:
1024 1025 if xdiff:
1025 1026 mdiff.bdiff.xdiffblocks(*pair)
1026 1027 elif blocks:
1027 1028 mdiff.bdiff.blocks(*pair)
1028 1029 else:
1029 1030 mdiff.textdiff(*pair)
1030 1031 else:
1031 1032 q = util.queue()
1032 1033 for i in xrange(threads):
1033 1034 q.put(None)
1034 1035 ready = threading.Condition()
1035 1036 done = threading.Event()
1036 1037 for i in xrange(threads):
1037 1038 threading.Thread(target=_bdiffworker,
1038 1039 args=(q, blocks, xdiff, ready, done)).start()
1039 1040 q.join()
1040 1041 def d():
1041 1042 for pair in textpairs:
1042 1043 q.put(pair)
1043 1044 for i in xrange(threads):
1044 1045 q.put(None)
1045 1046 with ready:
1046 1047 ready.notify_all()
1047 1048 q.join()
1048 1049 timer, fm = gettimer(ui, opts)
1049 1050 timer(d)
1050 1051 fm.end()
1051 1052
1052 1053 if withthreads:
1053 1054 done.set()
1054 1055 for i in xrange(threads):
1055 1056 q.put(None)
1056 1057 with ready:
1057 1058 ready.notify_all()
1058 1059
1059 1060 @command('perfunidiff', revlogopts + formatteropts + [
1060 1061 ('', 'count', 1, 'number of revisions to test (when using --startrev)'),
1061 1062 ('', 'alldata', False, 'test unidiffs for all associated revisions'),
1062 1063 ], '-c|-m|FILE REV')
1063 1064 def perfunidiff(ui, repo, file_, rev=None, count=None, **opts):
1064 1065 """benchmark a unified diff between revisions
1065 1066
1066 1067 This doesn't include any copy tracing - it's just a unified diff
1067 1068 of the texts.
1068 1069
1069 1070 By default, benchmark a diff between its delta parent and itself.
1070 1071
1071 1072 With ``--count``, benchmark diffs between delta parents and self for N
1072 1073 revisions starting at the specified revision.
1073 1074
1074 1075 With ``--alldata``, assume the requested revision is a changeset and
1075 1076 measure diffs for all changes related to that changeset (manifest
1076 1077 and filelogs).
1077 1078 """
1078 1079 if opts['alldata']:
1079 1080 opts['changelog'] = True
1080 1081
1081 1082 if opts.get('changelog') or opts.get('manifest'):
1082 1083 file_, rev = None, file_
1083 1084 elif rev is None:
1084 1085 raise error.CommandError('perfunidiff', 'invalid arguments')
1085 1086
1086 1087 textpairs = []
1087 1088
1088 1089 r = cmdutil.openrevlog(repo, 'perfunidiff', file_, opts)
1089 1090
1090 1091 startrev = r.rev(r.lookup(rev))
1091 1092 for rev in range(startrev, min(startrev + count, len(r) - 1)):
1092 1093 if opts['alldata']:
1093 1094 # Load revisions associated with changeset.
1094 1095 ctx = repo[rev]
1095 1096 mtext = repo.manifestlog._revlog.revision(ctx.manifestnode())
1096 1097 for pctx in ctx.parents():
1097 1098 pman = repo.manifestlog._revlog.revision(pctx.manifestnode())
1098 1099 textpairs.append((pman, mtext))
1099 1100
1100 1101 # Load filelog revisions by iterating manifest delta.
1101 1102 man = ctx.manifest()
1102 1103 pman = ctx.p1().manifest()
1103 1104 for filename, change in pman.diff(man).items():
1104 1105 fctx = repo.file(filename)
1105 1106 f1 = fctx.revision(change[0][0] or -1)
1106 1107 f2 = fctx.revision(change[1][0] or -1)
1107 1108 textpairs.append((f1, f2))
1108 1109 else:
1109 1110 dp = r.deltaparent(rev)
1110 1111 textpairs.append((r.revision(dp), r.revision(rev)))
1111 1112
1112 1113 def d():
1113 1114 for left, right in textpairs:
1114 1115 # The date strings don't matter, so we pass empty strings.
1115 1116 headerlines, hunks = mdiff.unidiff(
1116 1117 left, '', right, '', 'left', 'right', binary=False)
1117 1118 # consume iterators in roughly the way patch.py does
1118 1119 b'\n'.join(headerlines)
1119 1120 b''.join(sum((list(hlines) for hrange, hlines in hunks), []))
1120 1121 timer, fm = gettimer(ui, opts)
1121 1122 timer(d)
1122 1123 fm.end()
1123 1124
1124 1125 @command('perfdiffwd', formatteropts)
1125 1126 def perfdiffwd(ui, repo, **opts):
1126 1127 """Profile diff of working directory changes"""
1127 1128 timer, fm = gettimer(ui, opts)
1128 1129 options = {
1129 1130 'w': 'ignore_all_space',
1130 1131 'b': 'ignore_space_change',
1131 1132 'B': 'ignore_blank_lines',
1132 1133 }
1133 1134
1134 1135 for diffopt in ('', 'w', 'b', 'B', 'wB'):
1135 1136 opts = dict((options[c], '1') for c in diffopt)
1136 1137 def d():
1137 1138 ui.pushbuffer()
1138 1139 commands.diff(ui, repo, **opts)
1139 1140 ui.popbuffer()
1140 1141 title = 'diffopts: %s' % (diffopt and ('-' + diffopt) or 'none')
1141 1142 timer(d, title)
1142 1143 fm.end()
1143 1144
1144 1145 @command('perfrevlogindex', revlogopts + formatteropts,
1145 1146 '-c|-m|FILE')
1146 1147 def perfrevlogindex(ui, repo, file_=None, **opts):
1147 1148 """Benchmark operations against a revlog index.
1148 1149
1149 1150 This tests constructing a revlog instance, reading index data,
1150 1151 parsing index data, and performing various operations related to
1151 1152 index data.
1152 1153 """
1153 1154
1154 1155 rl = cmdutil.openrevlog(repo, 'perfrevlogindex', file_, opts)
1155 1156
1156 1157 opener = getattr(rl, 'opener') # trick linter
1157 1158 indexfile = rl.indexfile
1158 1159 data = opener.read(indexfile)
1159 1160
1160 1161 header = struct.unpack('>I', data[0:4])[0]
1161 1162 version = header & 0xFFFF
1162 1163 if version == 1:
1163 1164 revlogio = revlog.revlogio()
1164 1165 inline = header & (1 << 16)
1165 1166 else:
1166 1167 raise error.Abort(('unsupported revlog version: %d') % version)
1167 1168
1168 1169 rllen = len(rl)
1169 1170
1170 1171 node0 = rl.node(0)
1171 1172 node25 = rl.node(rllen // 4)
1172 1173 node50 = rl.node(rllen // 2)
1173 1174 node75 = rl.node(rllen // 4 * 3)
1174 1175 node100 = rl.node(rllen - 1)
1175 1176
1176 1177 allrevs = range(rllen)
1177 1178 allrevsrev = list(reversed(allrevs))
1178 1179 allnodes = [rl.node(rev) for rev in range(rllen)]
1179 1180 allnodesrev = list(reversed(allnodes))
1180 1181
1181 1182 def constructor():
1182 1183 revlog.revlog(opener, indexfile)
1183 1184
1184 1185 def read():
1185 1186 with opener(indexfile) as fh:
1186 1187 fh.read()
1187 1188
1188 1189 def parseindex():
1189 1190 revlogio.parseindex(data, inline)
1190 1191
1191 1192 def getentry(revornode):
1192 1193 index = revlogio.parseindex(data, inline)[0]
1193 1194 index[revornode]
1194 1195
1195 1196 def getentries(revs, count=1):
1196 1197 index = revlogio.parseindex(data, inline)[0]
1197 1198
1198 1199 for i in range(count):
1199 1200 for rev in revs:
1200 1201 index[rev]
1201 1202
1202 1203 def resolvenode(node):
1203 1204 nodemap = revlogio.parseindex(data, inline)[1]
1204 1205 # This only works for the C code.
1205 1206 if nodemap is None:
1206 1207 return
1207 1208
1208 1209 try:
1209 1210 nodemap[node]
1210 1211 except error.RevlogError:
1211 1212 pass
1212 1213
1213 1214 def resolvenodes(nodes, count=1):
1214 1215 nodemap = revlogio.parseindex(data, inline)[1]
1215 1216 if nodemap is None:
1216 1217 return
1217 1218
1218 1219 for i in range(count):
1219 1220 for node in nodes:
1220 1221 try:
1221 1222 nodemap[node]
1222 1223 except error.RevlogError:
1223 1224 pass
1224 1225
1225 1226 benches = [
1226 1227 (constructor, 'revlog constructor'),
1227 1228 (read, 'read'),
1228 1229 (parseindex, 'create index object'),
1229 1230 (lambda: getentry(0), 'retrieve index entry for rev 0'),
1230 1231 (lambda: resolvenode('a' * 20), 'look up missing node'),
1231 1232 (lambda: resolvenode(node0), 'look up node at rev 0'),
1232 1233 (lambda: resolvenode(node25), 'look up node at 1/4 len'),
1233 1234 (lambda: resolvenode(node50), 'look up node at 1/2 len'),
1234 1235 (lambda: resolvenode(node75), 'look up node at 3/4 len'),
1235 1236 (lambda: resolvenode(node100), 'look up node at tip'),
1236 1237 # 2x variation is to measure caching impact.
1237 1238 (lambda: resolvenodes(allnodes),
1238 1239 'look up all nodes (forward)'),
1239 1240 (lambda: resolvenodes(allnodes, 2),
1240 1241 'look up all nodes 2x (forward)'),
1241 1242 (lambda: resolvenodes(allnodesrev),
1242 1243 'look up all nodes (reverse)'),
1243 1244 (lambda: resolvenodes(allnodesrev, 2),
1244 1245 'look up all nodes 2x (reverse)'),
1245 1246 (lambda: getentries(allrevs),
1246 1247 'retrieve all index entries (forward)'),
1247 1248 (lambda: getentries(allrevs, 2),
1248 1249 'retrieve all index entries 2x (forward)'),
1249 1250 (lambda: getentries(allrevsrev),
1250 1251 'retrieve all index entries (reverse)'),
1251 1252 (lambda: getentries(allrevsrev, 2),
1252 1253 'retrieve all index entries 2x (reverse)'),
1253 1254 ]
1254 1255
1255 1256 for fn, title in benches:
1256 1257 timer, fm = gettimer(ui, opts)
1257 1258 timer(fn, title=title)
1258 1259 fm.end()
1259 1260
1260 1261 @command('perfrevlogrevisions', revlogopts + formatteropts +
1261 1262 [('d', 'dist', 100, 'distance between the revisions'),
1262 1263 ('s', 'startrev', 0, 'revision to start reading at'),
1263 1264 ('', 'reverse', False, 'read in reverse')],
1264 1265 '-c|-m|FILE')
1265 1266 def perfrevlogrevisions(ui, repo, file_=None, startrev=0, reverse=False,
1266 1267 **opts):
1267 1268 """Benchmark reading a series of revisions from a revlog.
1268 1269
1269 1270 By default, we read every ``-d/--dist`` revision from 0 to tip of
1270 1271 the specified revlog.
1271 1272
1272 1273 The start revision can be defined via ``-s/--startrev``.
1273 1274 """
1274 1275 rl = cmdutil.openrevlog(repo, 'perfrevlogrevisions', file_, opts)
1275 1276 rllen = getlen(ui)(rl)
1276 1277
1277 1278 def d():
1278 1279 rl.clearcaches()
1279 1280
1280 1281 beginrev = startrev
1281 1282 endrev = rllen
1282 1283 dist = opts['dist']
1283 1284
1284 1285 if reverse:
1285 1286 beginrev, endrev = endrev, beginrev
1286 1287 dist = -1 * dist
1287 1288
1288 1289 for x in xrange(beginrev, endrev, dist):
1289 1290 # Old revisions don't support passing int.
1290 1291 n = rl.node(x)
1291 1292 rl.revision(n)
1292 1293
1293 1294 timer, fm = gettimer(ui, opts)
1294 1295 timer(d)
1295 1296 fm.end()
1296 1297
1297 1298 @command('perfrevlogchunks', revlogopts + formatteropts +
1298 1299 [('e', 'engines', '', 'compression engines to use'),
1299 1300 ('s', 'startrev', 0, 'revision to start at')],
1300 1301 '-c|-m|FILE')
1301 1302 def perfrevlogchunks(ui, repo, file_=None, engines=None, startrev=0, **opts):
1302 1303 """Benchmark operations on revlog chunks.
1303 1304
1304 1305 Logically, each revlog is a collection of fulltext revisions. However,
1305 1306 stored within each revlog are "chunks" of possibly compressed data. This
1306 1307 data needs to be read and decompressed or compressed and written.
1307 1308
1308 1309 This command measures the time it takes to read+decompress and recompress
1309 1310 chunks in a revlog. It effectively isolates I/O and compression performance.
1310 1311 For measurements of higher-level operations like resolving revisions,
1311 1312 see ``perfrevlogrevisions`` and ``perfrevlogrevision``.
1312 1313 """
1313 1314 rl = cmdutil.openrevlog(repo, 'perfrevlogchunks', file_, opts)
1314 1315
1315 1316 # _chunkraw was renamed to _getsegmentforrevs.
1316 1317 try:
1317 1318 segmentforrevs = rl._getsegmentforrevs
1318 1319 except AttributeError:
1319 1320 segmentforrevs = rl._chunkraw
1320 1321
1321 1322 # Verify engines argument.
1322 1323 if engines:
1323 1324 engines = set(e.strip() for e in engines.split(','))
1324 1325 for engine in engines:
1325 1326 try:
1326 1327 util.compressionengines[engine]
1327 1328 except KeyError:
1328 1329 raise error.Abort('unknown compression engine: %s' % engine)
1329 1330 else:
1330 1331 engines = []
1331 1332 for e in util.compengines:
1332 1333 engine = util.compengines[e]
1333 1334 try:
1334 1335 if engine.available():
1335 1336 engine.revlogcompressor().compress('dummy')
1336 1337 engines.append(e)
1337 1338 except NotImplementedError:
1338 1339 pass
1339 1340
1340 1341 revs = list(rl.revs(startrev, len(rl) - 1))
1341 1342
1342 1343 def rlfh(rl):
1343 1344 if rl._inline:
1344 1345 return getsvfs(repo)(rl.indexfile)
1345 1346 else:
1346 1347 return getsvfs(repo)(rl.datafile)
1347 1348
1348 1349 def doread():
1349 1350 rl.clearcaches()
1350 1351 for rev in revs:
1351 1352 segmentforrevs(rev, rev)
1352 1353
1353 1354 def doreadcachedfh():
1354 1355 rl.clearcaches()
1355 1356 fh = rlfh(rl)
1356 1357 for rev in revs:
1357 1358 segmentforrevs(rev, rev, df=fh)
1358 1359
1359 1360 def doreadbatch():
1360 1361 rl.clearcaches()
1361 1362 segmentforrevs(revs[0], revs[-1])
1362 1363
1363 1364 def doreadbatchcachedfh():
1364 1365 rl.clearcaches()
1365 1366 fh = rlfh(rl)
1366 1367 segmentforrevs(revs[0], revs[-1], df=fh)
1367 1368
1368 1369 def dochunk():
1369 1370 rl.clearcaches()
1370 1371 fh = rlfh(rl)
1371 1372 for rev in revs:
1372 1373 rl._chunk(rev, df=fh)
1373 1374
1374 1375 chunks = [None]
1375 1376
1376 1377 def dochunkbatch():
1377 1378 rl.clearcaches()
1378 1379 fh = rlfh(rl)
1379 1380 # Save chunks as a side-effect.
1380 1381 chunks[0] = rl._chunks(revs, df=fh)
1381 1382
1382 1383 def docompress(compressor):
1383 1384 rl.clearcaches()
1384 1385
1385 1386 try:
1386 1387 # Swap in the requested compression engine.
1387 1388 oldcompressor = rl._compressor
1388 1389 rl._compressor = compressor
1389 1390 for chunk in chunks[0]:
1390 1391 rl.compress(chunk)
1391 1392 finally:
1392 1393 rl._compressor = oldcompressor
1393 1394
1394 1395 benches = [
1395 1396 (lambda: doread(), 'read'),
1396 1397 (lambda: doreadcachedfh(), 'read w/ reused fd'),
1397 1398 (lambda: doreadbatch(), 'read batch'),
1398 1399 (lambda: doreadbatchcachedfh(), 'read batch w/ reused fd'),
1399 1400 (lambda: dochunk(), 'chunk'),
1400 1401 (lambda: dochunkbatch(), 'chunk batch'),
1401 1402 ]
1402 1403
1403 1404 for engine in sorted(engines):
1404 1405 compressor = util.compengines[engine].revlogcompressor()
1405 1406 benches.append((functools.partial(docompress, compressor),
1406 1407 'compress w/ %s' % engine))
1407 1408
1408 1409 for fn, title in benches:
1409 1410 timer, fm = gettimer(ui, opts)
1410 1411 timer(fn, title=title)
1411 1412 fm.end()
1412 1413
1413 1414 @command('perfrevlogrevision', revlogopts + formatteropts +
1414 1415 [('', 'cache', False, 'use caches instead of clearing')],
1415 1416 '-c|-m|FILE REV')
1416 1417 def perfrevlogrevision(ui, repo, file_, rev=None, cache=None, **opts):
1417 1418 """Benchmark obtaining a revlog revision.
1418 1419
1419 1420 Obtaining a revlog revision consists of roughly the following steps:
1420 1421
1421 1422 1. Compute the delta chain
1422 1423 2. Obtain the raw chunks for that delta chain
1423 1424 3. Decompress each raw chunk
1424 1425 4. Apply binary patches to obtain fulltext
1425 1426 5. Verify hash of fulltext
1426 1427
1427 1428 This command measures the time spent in each of these phases.
1428 1429 """
1429 1430 if opts.get('changelog') or opts.get('manifest'):
1430 1431 file_, rev = None, file_
1431 1432 elif rev is None:
1432 1433 raise error.CommandError('perfrevlogrevision', 'invalid arguments')
1433 1434
1434 1435 r = cmdutil.openrevlog(repo, 'perfrevlogrevision', file_, opts)
1435 1436
1436 1437 # _chunkraw was renamed to _getsegmentforrevs.
1437 1438 try:
1438 1439 segmentforrevs = r._getsegmentforrevs
1439 1440 except AttributeError:
1440 1441 segmentforrevs = r._chunkraw
1441 1442
1442 1443 node = r.lookup(rev)
1443 1444 rev = r.rev(node)
1444 1445
1445 1446 def getrawchunks(data, chain):
1446 1447 start = r.start
1447 1448 length = r.length
1448 1449 inline = r._inline
1449 1450 iosize = r._io.size
1450 1451 buffer = util.buffer
1451 1452 offset = start(chain[0])
1452 1453
1453 1454 chunks = []
1454 1455 ladd = chunks.append
1455 1456
1456 1457 for rev in chain:
1457 1458 chunkstart = start(rev)
1458 1459 if inline:
1459 1460 chunkstart += (rev + 1) * iosize
1460 1461 chunklength = length(rev)
1461 1462 ladd(buffer(data, chunkstart - offset, chunklength))
1462 1463
1463 1464 return chunks
1464 1465
1465 1466 def dodeltachain(rev):
1466 1467 if not cache:
1467 1468 r.clearcaches()
1468 1469 r._deltachain(rev)
1469 1470
1470 1471 def doread(chain):
1471 1472 if not cache:
1472 1473 r.clearcaches()
1473 1474 segmentforrevs(chain[0], chain[-1])
1474 1475
1475 1476 def dorawchunks(data, chain):
1476 1477 if not cache:
1477 1478 r.clearcaches()
1478 1479 getrawchunks(data, chain)
1479 1480
1480 1481 def dodecompress(chunks):
1481 1482 decomp = r.decompress
1482 1483 for chunk in chunks:
1483 1484 decomp(chunk)
1484 1485
1485 1486 def dopatch(text, bins):
1486 1487 if not cache:
1487 1488 r.clearcaches()
1488 1489 mdiff.patches(text, bins)
1489 1490
1490 1491 def dohash(text):
1491 1492 if not cache:
1492 1493 r.clearcaches()
1493 1494 r.checkhash(text, node, rev=rev)
1494 1495
1495 1496 def dorevision():
1496 1497 if not cache:
1497 1498 r.clearcaches()
1498 1499 r.revision(node)
1499 1500
1500 1501 chain = r._deltachain(rev)[0]
1501 1502 data = segmentforrevs(chain[0], chain[-1])[1]
1502 1503 rawchunks = getrawchunks(data, chain)
1503 1504 bins = r._chunks(chain)
1504 1505 text = str(bins[0])
1505 1506 bins = bins[1:]
1506 1507 text = mdiff.patches(text, bins)
1507 1508
1508 1509 benches = [
1509 1510 (lambda: dorevision(), 'full'),
1510 1511 (lambda: dodeltachain(rev), 'deltachain'),
1511 1512 (lambda: doread(chain), 'read'),
1512 1513 (lambda: dorawchunks(data, chain), 'rawchunks'),
1513 1514 (lambda: dodecompress(rawchunks), 'decompress'),
1514 1515 (lambda: dopatch(text, bins), 'patch'),
1515 1516 (lambda: dohash(text), 'hash'),
1516 1517 ]
1517 1518
1518 1519 for fn, title in benches:
1519 1520 timer, fm = gettimer(ui, opts)
1520 1521 timer(fn, title=title)
1521 1522 fm.end()
1522 1523
1523 1524 @command('perfrevset',
1524 1525 [('C', 'clear', False, 'clear volatile cache between each call.'),
1525 1526 ('', 'contexts', False, 'obtain changectx for each revision')]
1526 1527 + formatteropts, "REVSET")
1527 1528 def perfrevset(ui, repo, expr, clear=False, contexts=False, **opts):
1528 1529 """benchmark the execution time of a revset
1529 1530
1530 1531 Use the --clean option if need to evaluate the impact of build volatile
1531 1532 revisions set cache on the revset execution. Volatile cache hold filtered
1532 1533 and obsolete related cache."""
1533 1534 timer, fm = gettimer(ui, opts)
1534 1535 def d():
1535 1536 if clear:
1536 1537 repo.invalidatevolatilesets()
1537 1538 if contexts:
1538 1539 for ctx in repo.set(expr): pass
1539 1540 else:
1540 1541 for r in repo.revs(expr): pass
1541 1542 timer(d)
1542 1543 fm.end()
1543 1544
1544 1545 @command('perfvolatilesets',
1545 1546 [('', 'clear-obsstore', False, 'drop obsstore between each call.'),
1546 1547 ] + formatteropts)
1547 1548 def perfvolatilesets(ui, repo, *names, **opts):
1548 1549 """benchmark the computation of various volatile set
1549 1550
1550 1551 Volatile set computes element related to filtering and obsolescence."""
1551 1552 timer, fm = gettimer(ui, opts)
1552 1553 repo = repo.unfiltered()
1553 1554
1554 1555 def getobs(name):
1555 1556 def d():
1556 1557 repo.invalidatevolatilesets()
1557 1558 if opts['clear_obsstore']:
1558 1559 clearfilecache(repo, 'obsstore')
1559 1560 obsolete.getrevs(repo, name)
1560 1561 return d
1561 1562
1562 1563 allobs = sorted(obsolete.cachefuncs)
1563 1564 if names:
1564 1565 allobs = [n for n in allobs if n in names]
1565 1566
1566 1567 for name in allobs:
1567 1568 timer(getobs(name), title=name)
1568 1569
1569 1570 def getfiltered(name):
1570 1571 def d():
1571 1572 repo.invalidatevolatilesets()
1572 1573 if opts['clear_obsstore']:
1573 1574 clearfilecache(repo, 'obsstore')
1574 1575 repoview.filterrevs(repo, name)
1575 1576 return d
1576 1577
1577 1578 allfilter = sorted(repoview.filtertable)
1578 1579 if names:
1579 1580 allfilter = [n for n in allfilter if n in names]
1580 1581
1581 1582 for name in allfilter:
1582 1583 timer(getfiltered(name), title=name)
1583 1584 fm.end()
1584 1585
1585 1586 @command('perfbranchmap',
1586 1587 [('f', 'full', False,
1587 1588 'Includes build time of subset'),
1588 1589 ('', 'clear-revbranch', False,
1589 1590 'purge the revbranch cache between computation'),
1590 1591 ] + formatteropts)
1591 1592 def perfbranchmap(ui, repo, *filternames, **opts):
1592 1593 """benchmark the update of a branchmap
1593 1594
1594 1595 This benchmarks the full repo.branchmap() call with read and write disabled
1595 1596 """
1596 1597 full = opts.get("full", False)
1597 1598 clear_revbranch = opts.get("clear_revbranch", False)
1598 1599 timer, fm = gettimer(ui, opts)
1599 1600 def getbranchmap(filtername):
1600 1601 """generate a benchmark function for the filtername"""
1601 1602 if filtername is None:
1602 1603 view = repo
1603 1604 else:
1604 1605 view = repo.filtered(filtername)
1605 1606 def d():
1606 1607 if clear_revbranch:
1607 1608 repo.revbranchcache()._clear()
1608 1609 if full:
1609 1610 view._branchcaches.clear()
1610 1611 else:
1611 1612 view._branchcaches.pop(filtername, None)
1612 1613 view.branchmap()
1613 1614 return d
1614 1615 # add filter in smaller subset to bigger subset
1615 1616 possiblefilters = set(repoview.filtertable)
1616 1617 if filternames:
1617 1618 possiblefilters &= set(filternames)
1618 1619 subsettable = getbranchmapsubsettable()
1619 1620 allfilters = []
1620 1621 while possiblefilters:
1621 1622 for name in possiblefilters:
1622 1623 subset = subsettable.get(name)
1623 1624 if subset not in possiblefilters:
1624 1625 break
1625 1626 else:
1626 1627 assert False, 'subset cycle %s!' % possiblefilters
1627 1628 allfilters.append(name)
1628 1629 possiblefilters.remove(name)
1629 1630
1630 1631 # warm the cache
1631 1632 if not full:
1632 1633 for name in allfilters:
1633 1634 repo.filtered(name).branchmap()
1634 1635 if not filternames or 'unfiltered' in filternames:
1635 1636 # add unfiltered
1636 1637 allfilters.append(None)
1637 1638
1638 1639 branchcacheread = safeattrsetter(branchmap, 'read')
1639 1640 branchcachewrite = safeattrsetter(branchmap.branchcache, 'write')
1640 1641 branchcacheread.set(lambda repo: None)
1641 1642 branchcachewrite.set(lambda bc, repo: None)
1642 1643 try:
1643 1644 for name in allfilters:
1644 1645 printname = name
1645 1646 if name is None:
1646 1647 printname = 'unfiltered'
1647 1648 timer(getbranchmap(name), title=str(printname))
1648 1649 finally:
1649 1650 branchcacheread.restore()
1650 1651 branchcachewrite.restore()
1651 1652 fm.end()
1652 1653
1653 1654 @command('perfloadmarkers')
1654 1655 def perfloadmarkers(ui, repo):
1655 1656 """benchmark the time to parse the on-disk markers for a repo
1656 1657
1657 1658 Result is the number of markers in the repo."""
1658 1659 timer, fm = gettimer(ui)
1659 1660 svfs = getsvfs(repo)
1660 1661 timer(lambda: len(obsolete.obsstore(svfs)))
1661 1662 fm.end()
1662 1663
1663 1664 @command('perflrucachedict', formatteropts +
1664 1665 [('', 'size', 4, 'size of cache'),
1665 1666 ('', 'gets', 10000, 'number of key lookups'),
1666 1667 ('', 'sets', 10000, 'number of key sets'),
1667 1668 ('', 'mixed', 10000, 'number of mixed mode operations'),
1668 1669 ('', 'mixedgetfreq', 50, 'frequency of get vs set ops in mixed mode')],
1669 1670 norepo=True)
1670 1671 def perflrucache(ui, size=4, gets=10000, sets=10000, mixed=10000,
1671 1672 mixedgetfreq=50, **opts):
1672 1673 def doinit():
1673 1674 for i in xrange(10000):
1674 1675 util.lrucachedict(size)
1675 1676
1676 1677 values = []
1677 1678 for i in xrange(size):
1678 1679 values.append(random.randint(0, sys.maxint))
1679 1680
1680 1681 # Get mode fills the cache and tests raw lookup performance with no
1681 1682 # eviction.
1682 1683 getseq = []
1683 1684 for i in xrange(gets):
1684 1685 getseq.append(random.choice(values))
1685 1686
1686 1687 def dogets():
1687 1688 d = util.lrucachedict(size)
1688 1689 for v in values:
1689 1690 d[v] = v
1690 1691 for key in getseq:
1691 1692 value = d[key]
1692 1693 value # silence pyflakes warning
1693 1694
1694 1695 # Set mode tests insertion speed with cache eviction.
1695 1696 setseq = []
1696 1697 for i in xrange(sets):
1697 1698 setseq.append(random.randint(0, sys.maxint))
1698 1699
1699 1700 def dosets():
1700 1701 d = util.lrucachedict(size)
1701 1702 for v in setseq:
1702 1703 d[v] = v
1703 1704
1704 1705 # Mixed mode randomly performs gets and sets with eviction.
1705 1706 mixedops = []
1706 1707 for i in xrange(mixed):
1707 1708 r = random.randint(0, 100)
1708 1709 if r < mixedgetfreq:
1709 1710 op = 0
1710 1711 else:
1711 1712 op = 1
1712 1713
1713 1714 mixedops.append((op, random.randint(0, size * 2)))
1714 1715
1715 1716 def domixed():
1716 1717 d = util.lrucachedict(size)
1717 1718
1718 1719 for op, v in mixedops:
1719 1720 if op == 0:
1720 1721 try:
1721 1722 d[v]
1722 1723 except KeyError:
1723 1724 pass
1724 1725 else:
1725 1726 d[v] = v
1726 1727
1727 1728 benches = [
1728 1729 (doinit, 'init'),
1729 1730 (dogets, 'gets'),
1730 1731 (dosets, 'sets'),
1731 1732 (domixed, 'mixed')
1732 1733 ]
1733 1734
1734 1735 for fn, title in benches:
1735 1736 timer, fm = gettimer(ui, opts)
1736 1737 timer(fn, title=title)
1737 1738 fm.end()
1738 1739
1739 1740 @command('perfwrite', formatteropts)
1740 1741 def perfwrite(ui, repo, **opts):
1741 1742 """microbenchmark ui.write
1742 1743 """
1743 1744 timer, fm = gettimer(ui, opts)
1744 1745 def write():
1745 1746 for i in range(100000):
1746 1747 ui.write(('Testing write performance\n'))
1747 1748 timer(write)
1748 1749 fm.end()
1749 1750
1750 1751 def uisetup(ui):
1751 1752 if (util.safehasattr(cmdutil, 'openrevlog') and
1752 1753 not util.safehasattr(commands, 'debugrevlogopts')):
1753 1754 # for "historical portability":
1754 1755 # In this case, Mercurial should be 1.9 (or a79fea6b3e77) -
1755 1756 # 3.7 (or 5606f7d0d063). Therefore, '--dir' option for
1756 1757 # openrevlog() should cause failure, because it has been
1757 1758 # available since 3.5 (or 49c583ca48c4).
1758 1759 def openrevlog(orig, repo, cmd, file_, opts):
1759 1760 if opts.get('dir') and not util.safehasattr(repo, 'dirlog'):
1760 1761 raise error.Abort("This version doesn't support --dir option",
1761 1762 hint="use 3.5 or later")
1762 1763 return orig(repo, cmd, file_, opts)
1763 1764 extensions.wrapfunction(cmdutil, 'openrevlog', openrevlog)
@@ -1,1496 +1,1495 b''
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10 10 from __future__ import absolute_import
11 11
12 12 import copy
13 13 import os
14 14
15 15 from mercurial.i18n import _
16 16
17 17 from mercurial import (
18 18 archival,
19 19 cmdutil,
20 20 error,
21 21 hg,
22 22 logcmdutil,
23 23 match as matchmod,
24 24 pathutil,
25 25 pycompat,
26 26 registrar,
27 27 scmutil,
28 28 smartset,
29 29 util,
30 30 )
31 31
32 32 from . import (
33 33 lfcommands,
34 34 lfutil,
35 35 storefactory,
36 36 )
37 37
38 38 # -- Utility functions: commonly/repeatedly needed functionality ---------------
39 39
40 40 def composelargefilematcher(match, manifest):
41 41 '''create a matcher that matches only the largefiles in the original
42 42 matcher'''
43 43 m = copy.copy(match)
44 44 lfile = lambda f: lfutil.standin(f) in manifest
45 45 m._files = [lf for lf in m._files if lfile(lf)]
46 46 m._fileset = set(m._files)
47 47 m.always = lambda: False
48 48 origmatchfn = m.matchfn
49 49 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
50 50 return m
51 51
52 52 def composenormalfilematcher(match, manifest, exclude=None):
53 53 excluded = set()
54 54 if exclude is not None:
55 55 excluded.update(exclude)
56 56
57 57 m = copy.copy(match)
58 58 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
59 59 manifest or f in excluded)
60 60 m._files = [lf for lf in m._files if notlfile(lf)]
61 61 m._fileset = set(m._files)
62 62 m.always = lambda: False
63 63 origmatchfn = m.matchfn
64 64 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
65 65 return m
66 66
67 67 def installnormalfilesmatchfn(manifest):
68 68 '''installmatchfn with a matchfn that ignores all largefiles'''
69 69 def overridematch(ctx, pats=(), opts=None, globbed=False,
70 70 default='relpath', badfn=None):
71 71 if opts is None:
72 72 opts = {}
73 73 match = oldmatch(ctx, pats, opts, globbed, default, badfn=badfn)
74 74 return composenormalfilematcher(match, manifest)
75 75 oldmatch = installmatchfn(overridematch)
76 76
77 77 def installmatchfn(f):
78 78 '''monkey patch the scmutil module with a custom match function.
79 79 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
80 80 oldmatch = scmutil.match
81 81 setattr(f, 'oldmatch', oldmatch)
82 82 scmutil.match = f
83 83 return oldmatch
84 84
85 85 def restorematchfn():
86 86 '''restores scmutil.match to what it was before installmatchfn
87 87 was called. no-op if scmutil.match is its original function.
88 88
89 89 Note that n calls to installmatchfn will require n calls to
90 90 restore the original matchfn.'''
91 91 scmutil.match = getattr(scmutil.match, 'oldmatch')
92 92
93 93 def installmatchandpatsfn(f):
94 94 oldmatchandpats = scmutil.matchandpats
95 95 setattr(f, 'oldmatchandpats', oldmatchandpats)
96 96 scmutil.matchandpats = f
97 97 return oldmatchandpats
98 98
99 99 def restorematchandpatsfn():
100 100 '''restores scmutil.matchandpats to what it was before
101 101 installmatchandpatsfn was called. No-op if scmutil.matchandpats
102 102 is its original function.
103 103
104 104 Note that n calls to installmatchandpatsfn will require n calls
105 105 to restore the original matchfn.'''
106 106 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
107 107 scmutil.matchandpats)
108 108
109 109 def addlargefiles(ui, repo, isaddremove, matcher, **opts):
110 110 large = opts.get(r'large')
111 111 lfsize = lfutil.getminsize(
112 112 ui, lfutil.islfilesrepo(repo), opts.get(r'lfsize'))
113 113
114 114 lfmatcher = None
115 115 if lfutil.islfilesrepo(repo):
116 116 lfpats = ui.configlist(lfutil.longname, 'patterns')
117 117 if lfpats:
118 118 lfmatcher = matchmod.match(repo.root, '', list(lfpats))
119 119
120 120 lfnames = []
121 121 m = matcher
122 122
123 123 wctx = repo[None]
124 124 for f in wctx.walk(matchmod.badmatch(m, lambda x, y: None)):
125 125 exact = m.exact(f)
126 126 lfile = lfutil.standin(f) in wctx
127 127 nfile = f in wctx
128 128 exists = lfile or nfile
129 129
130 130 # addremove in core gets fancy with the name, add doesn't
131 131 if isaddremove:
132 132 name = m.uipath(f)
133 133 else:
134 134 name = m.rel(f)
135 135
136 136 # Don't warn the user when they attempt to add a normal tracked file.
137 137 # The normal add code will do that for us.
138 138 if exact and exists:
139 139 if lfile:
140 140 ui.warn(_('%s already a largefile\n') % name)
141 141 continue
142 142
143 143 if (exact or not exists) and not lfutil.isstandin(f):
144 144 # In case the file was removed previously, but not committed
145 145 # (issue3507)
146 146 if not repo.wvfs.exists(f):
147 147 continue
148 148
149 149 abovemin = (lfsize and
150 150 repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024)
151 151 if large or abovemin or (lfmatcher and lfmatcher(f)):
152 152 lfnames.append(f)
153 153 if ui.verbose or not exact:
154 154 ui.status(_('adding %s as a largefile\n') % name)
155 155
156 156 bad = []
157 157
158 158 # Need to lock, otherwise there could be a race condition between
159 159 # when standins are created and added to the repo.
160 160 with repo.wlock():
161 161 if not opts.get(r'dry_run'):
162 162 standins = []
163 163 lfdirstate = lfutil.openlfdirstate(ui, repo)
164 164 for f in lfnames:
165 165 standinname = lfutil.standin(f)
166 166 lfutil.writestandin(repo, standinname, hash='',
167 167 executable=lfutil.getexecutable(repo.wjoin(f)))
168 168 standins.append(standinname)
169 169 if lfdirstate[f] == 'r':
170 170 lfdirstate.normallookup(f)
171 171 else:
172 172 lfdirstate.add(f)
173 173 lfdirstate.write()
174 174 bad += [lfutil.splitstandin(f)
175 175 for f in repo[None].add(standins)
176 176 if f in m.files()]
177 177
178 178 added = [f for f in lfnames if f not in bad]
179 179 return added, bad
180 180
181 181 def removelargefiles(ui, repo, isaddremove, matcher, dryrun, **opts):
182 182 after = opts.get(r'after')
183 183 m = composelargefilematcher(matcher, repo[None].manifest())
184 184 try:
185 185 repo.lfstatus = True
186 186 s = repo.status(match=m, clean=not isaddremove)
187 187 finally:
188 188 repo.lfstatus = False
189 189 manifest = repo[None].manifest()
190 190 modified, added, deleted, clean = [[f for f in list
191 191 if lfutil.standin(f) in manifest]
192 192 for list in (s.modified, s.added,
193 193 s.deleted, s.clean)]
194 194
195 195 def warn(files, msg):
196 196 for f in files:
197 197 ui.warn(msg % m.rel(f))
198 198 return int(len(files) > 0)
199 199
200 200 result = 0
201 201
202 202 if after:
203 203 remove = deleted
204 204 result = warn(modified + added + clean,
205 205 _('not removing %s: file still exists\n'))
206 206 else:
207 207 remove = deleted + clean
208 208 result = warn(modified, _('not removing %s: file is modified (use -f'
209 209 ' to force removal)\n'))
210 210 result = warn(added, _('not removing %s: file has been marked for add'
211 211 ' (use forget to undo)\n')) or result
212 212
213 213 # Need to lock because standin files are deleted then removed from the
214 214 # repository and we could race in-between.
215 215 with repo.wlock():
216 216 lfdirstate = lfutil.openlfdirstate(ui, repo)
217 217 for f in sorted(remove):
218 218 if ui.verbose or not m.exact(f):
219 219 # addremove in core gets fancy with the name, remove doesn't
220 220 if isaddremove:
221 221 name = m.uipath(f)
222 222 else:
223 223 name = m.rel(f)
224 224 ui.status(_('removing %s\n') % name)
225 225
226 226 if not dryrun:
227 227 if not after:
228 228 repo.wvfs.unlinkpath(f, ignoremissing=True)
229 229
230 230 if dryrun:
231 231 return result
232 232
233 233 remove = [lfutil.standin(f) for f in remove]
234 234 # If this is being called by addremove, let the original addremove
235 235 # function handle this.
236 236 if not isaddremove:
237 237 for f in remove:
238 238 repo.wvfs.unlinkpath(f, ignoremissing=True)
239 239 repo[None].forget(remove)
240 240
241 241 for f in remove:
242 242 lfutil.synclfdirstate(repo, lfdirstate, lfutil.splitstandin(f),
243 243 False)
244 244
245 245 lfdirstate.write()
246 246
247 247 return result
248 248
249 249 # For overriding mercurial.hgweb.webcommands so that largefiles will
250 250 # appear at their right place in the manifests.
251 251 def decodepath(orig, path):
252 252 return lfutil.splitstandin(path) or path
253 253
254 254 # -- Wrappers: modify existing commands --------------------------------
255 255
256 256 def overrideadd(orig, ui, repo, *pats, **opts):
257 257 if opts.get(r'normal') and opts.get(r'large'):
258 258 raise error.Abort(_('--normal cannot be used with --large'))
259 259 return orig(ui, repo, *pats, **opts)
260 260
261 261 def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
262 262 # The --normal flag short circuits this override
263 263 if opts.get(r'normal'):
264 264 return orig(ui, repo, matcher, prefix, explicitonly, **opts)
265 265
266 266 ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
267 267 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(),
268 268 ladded)
269 269 bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts)
270 270
271 271 bad.extend(f for f in lbad)
272 272 return bad
273 273
274 274 def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos,
275 275 dryrun):
276 276 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
277 277 result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos,
278 278 dryrun)
279 279 return removelargefiles(ui, repo, False, matcher, dryrun, after=after,
280 280 force=force) or result
281 281
282 282 def overridestatusfn(orig, repo, rev2, **opts):
283 283 try:
284 284 repo._repo.lfstatus = True
285 285 return orig(repo, rev2, **opts)
286 286 finally:
287 287 repo._repo.lfstatus = False
288 288
289 289 def overridestatus(orig, ui, repo, *pats, **opts):
290 290 try:
291 291 repo.lfstatus = True
292 292 return orig(ui, repo, *pats, **opts)
293 293 finally:
294 294 repo.lfstatus = False
295 295
296 296 def overridedirty(orig, repo, ignoreupdate=False, missing=False):
297 297 try:
298 298 repo._repo.lfstatus = True
299 299 return orig(repo, ignoreupdate=ignoreupdate, missing=missing)
300 300 finally:
301 301 repo._repo.lfstatus = False
302 302
303 303 def overridelog(orig, ui, repo, *pats, **opts):
304 304 def overridematchandpats(ctx, pats=(), opts=None, globbed=False,
305 305 default='relpath', badfn=None):
306 306 """Matcher that merges root directory with .hglf, suitable for log.
307 307 It is still possible to match .hglf directly.
308 308 For any listed files run log on the standin too.
309 309 matchfn tries both the given filename and with .hglf stripped.
310 310 """
311 311 if opts is None:
312 312 opts = {}
313 313 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default,
314 314 badfn=badfn)
315 315 m, p = copy.copy(matchandpats)
316 316
317 317 if m.always():
318 318 # We want to match everything anyway, so there's no benefit trying
319 319 # to add standins.
320 320 return matchandpats
321 321
322 322 pats = set(p)
323 323
324 324 def fixpats(pat, tostandin=lfutil.standin):
325 325 if pat.startswith('set:'):
326 326 return pat
327 327
328 328 kindpat = matchmod._patsplit(pat, None)
329 329
330 330 if kindpat[0] is not None:
331 331 return kindpat[0] + ':' + tostandin(kindpat[1])
332 332 return tostandin(kindpat[1])
333 333
334 334 if m._cwd:
335 335 hglf = lfutil.shortname
336 336 back = util.pconvert(m.rel(hglf)[:-len(hglf)])
337 337
338 338 def tostandin(f):
339 339 # The file may already be a standin, so truncate the back
340 340 # prefix and test before mangling it. This avoids turning
341 341 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
342 342 if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
343 343 return f
344 344
345 345 # An absolute path is from outside the repo, so truncate the
346 346 # path to the root before building the standin. Otherwise cwd
347 347 # is somewhere in the repo, relative to root, and needs to be
348 348 # prepended before building the standin.
349 349 if os.path.isabs(m._cwd):
350 350 f = f[len(back):]
351 351 else:
352 352 f = m._cwd + '/' + f
353 353 return back + lfutil.standin(f)
354 354 else:
355 355 def tostandin(f):
356 356 if lfutil.isstandin(f):
357 357 return f
358 358 return lfutil.standin(f)
359 359 pats.update(fixpats(f, tostandin) for f in p)
360 360
361 361 for i in range(0, len(m._files)):
362 362 # Don't add '.hglf' to m.files, since that is already covered by '.'
363 363 if m._files[i] == '.':
364 364 continue
365 365 standin = lfutil.standin(m._files[i])
366 366 # If the "standin" is a directory, append instead of replace to
367 367 # support naming a directory on the command line with only
368 368 # largefiles. The original directory is kept to support normal
369 369 # files.
370 370 if standin in ctx:
371 371 m._files[i] = standin
372 372 elif m._files[i] not in ctx and repo.wvfs.isdir(standin):
373 373 m._files.append(standin)
374 374
375 375 m._fileset = set(m._files)
376 376 m.always = lambda: False
377 377 origmatchfn = m.matchfn
378 378 def lfmatchfn(f):
379 379 lf = lfutil.splitstandin(f)
380 380 if lf is not None and origmatchfn(lf):
381 381 return True
382 382 r = origmatchfn(f)
383 383 return r
384 384 m.matchfn = lfmatchfn
385 385
386 386 ui.debug('updated patterns: %s\n' % ', '.join(sorted(pats)))
387 387 return m, pats
388 388
389 389 # For hg log --patch, the match object is used in two different senses:
390 390 # (1) to determine what revisions should be printed out, and
391 391 # (2) to determine what files to print out diffs for.
392 392 # The magic matchandpats override should be used for case (1) but not for
393 393 # case (2).
394 394 def overridemakefilematcher(repo, pats, opts, badfn=None):
395 395 wctx = repo[None]
396 396 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
397 397 return lambda ctx: match
398 398
399 399 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
400 400 oldmakefilematcher = logcmdutil._makenofollowfilematcher
401 401 setattr(logcmdutil, '_makenofollowfilematcher', overridemakefilematcher)
402 402
403 403 try:
404 404 return orig(ui, repo, *pats, **opts)
405 405 finally:
406 406 restorematchandpatsfn()
407 407 setattr(logcmdutil, '_makenofollowfilematcher', oldmakefilematcher)
408 408
409 409 def overrideverify(orig, ui, repo, *pats, **opts):
410 410 large = opts.pop(r'large', False)
411 411 all = opts.pop(r'lfa', False)
412 412 contents = opts.pop(r'lfc', False)
413 413
414 414 result = orig(ui, repo, *pats, **opts)
415 415 if large or all or contents:
416 416 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
417 417 return result
418 418
419 419 def overridedebugstate(orig, ui, repo, *pats, **opts):
420 420 large = opts.pop(r'large', False)
421 421 if large:
422 422 class fakerepo(object):
423 423 dirstate = lfutil.openlfdirstate(ui, repo)
424 424 orig(ui, fakerepo, *pats, **opts)
425 425 else:
426 426 orig(ui, repo, *pats, **opts)
427 427
428 428 # Before starting the manifest merge, merge.updates will call
429 429 # _checkunknownfile to check if there are any files in the merged-in
430 430 # changeset that collide with unknown files in the working copy.
431 431 #
432 432 # The largefiles are seen as unknown, so this prevents us from merging
433 433 # in a file 'foo' if we already have a largefile with the same name.
434 434 #
435 435 # The overridden function filters the unknown files by removing any
436 436 # largefiles. This makes the merge proceed and we can then handle this
437 437 # case further in the overridden calculateupdates function below.
438 438 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
439 439 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
440 440 return False
441 441 return origfn(repo, wctx, mctx, f, f2)
442 442
443 443 # The manifest merge handles conflicts on the manifest level. We want
444 444 # to handle changes in largefile-ness of files at this level too.
445 445 #
446 446 # The strategy is to run the original calculateupdates and then process
447 447 # the action list it outputs. There are two cases we need to deal with:
448 448 #
449 449 # 1. Normal file in p1, largefile in p2. Here the largefile is
450 450 # detected via its standin file, which will enter the working copy
451 451 # with a "get" action. It is not "merge" since the standin is all
452 452 # Mercurial is concerned with at this level -- the link to the
453 453 # existing normal file is not relevant here.
454 454 #
455 455 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
456 456 # since the largefile will be present in the working copy and
457 457 # different from the normal file in p2. Mercurial therefore
458 458 # triggers a merge action.
459 459 #
460 460 # In both cases, we prompt the user and emit new actions to either
461 461 # remove the standin (if the normal file was kept) or to remove the
462 462 # normal file and get the standin (if the largefile was kept). The
463 463 # default prompt answer is to use the largefile version since it was
464 464 # presumably changed on purpose.
465 465 #
466 466 # Finally, the merge.applyupdates function will then take care of
467 467 # writing the files into the working copy and lfcommands.updatelfiles
468 468 # will update the largefiles.
469 469 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
470 470 acceptremote, *args, **kwargs):
471 471 overwrite = force and not branchmerge
472 472 actions, diverge, renamedelete = origfn(
473 473 repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs)
474 474
475 475 if overwrite:
476 476 return actions, diverge, renamedelete
477 477
478 478 # Convert to dictionary with filename as key and action as value.
479 479 lfiles = set()
480 480 for f in actions:
481 481 splitstandin = lfutil.splitstandin(f)
482 482 if splitstandin in p1:
483 483 lfiles.add(splitstandin)
484 484 elif lfutil.standin(f) in p1:
485 485 lfiles.add(f)
486 486
487 487 for lfile in sorted(lfiles):
488 488 standin = lfutil.standin(lfile)
489 489 (lm, largs, lmsg) = actions.get(lfile, (None, None, None))
490 490 (sm, sargs, smsg) = actions.get(standin, (None, None, None))
491 491 if sm in ('g', 'dc') and lm != 'r':
492 492 if sm == 'dc':
493 493 f1, f2, fa, move, anc = sargs
494 494 sargs = (p2[f2].flags(), False)
495 495 # Case 1: normal file in the working copy, largefile in
496 496 # the second parent
497 497 usermsg = _('remote turned local normal file %s into a largefile\n'
498 498 'use (l)argefile or keep (n)ormal file?'
499 499 '$$ &Largefile $$ &Normal file') % lfile
500 500 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
501 501 actions[lfile] = ('r', None, 'replaced by standin')
502 502 actions[standin] = ('g', sargs, 'replaces standin')
503 503 else: # keep local normal file
504 504 actions[lfile] = ('k', None, 'replaces standin')
505 505 if branchmerge:
506 506 actions[standin] = ('k', None, 'replaced by non-standin')
507 507 else:
508 508 actions[standin] = ('r', None, 'replaced by non-standin')
509 509 elif lm in ('g', 'dc') and sm != 'r':
510 510 if lm == 'dc':
511 511 f1, f2, fa, move, anc = largs
512 512 largs = (p2[f2].flags(), False)
513 513 # Case 2: largefile in the working copy, normal file in
514 514 # the second parent
515 515 usermsg = _('remote turned local largefile %s into a normal file\n'
516 516 'keep (l)argefile or use (n)ormal file?'
517 517 '$$ &Largefile $$ &Normal file') % lfile
518 518 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
519 519 if branchmerge:
520 520 # largefile can be restored from standin safely
521 521 actions[lfile] = ('k', None, 'replaced by standin')
522 522 actions[standin] = ('k', None, 'replaces standin')
523 523 else:
524 524 # "lfile" should be marked as "removed" without
525 525 # removal of itself
526 526 actions[lfile] = ('lfmr', None,
527 527 'forget non-standin largefile')
528 528
529 529 # linear-merge should treat this largefile as 're-added'
530 530 actions[standin] = ('a', None, 'keep standin')
531 531 else: # pick remote normal file
532 532 actions[lfile] = ('g', largs, 'replaces standin')
533 533 actions[standin] = ('r', None, 'replaced by non-standin')
534 534
535 535 return actions, diverge, renamedelete
536 536
537 537 def mergerecordupdates(orig, repo, actions, branchmerge):
538 538 if 'lfmr' in actions:
539 539 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
540 540 for lfile, args, msg in actions['lfmr']:
541 541 # this should be executed before 'orig', to execute 'remove'
542 542 # before all other actions
543 543 repo.dirstate.remove(lfile)
544 544 # make sure lfile doesn't get synclfdirstate'd as normal
545 545 lfdirstate.add(lfile)
546 546 lfdirstate.write()
547 547
548 548 return orig(repo, actions, branchmerge)
549 549
550 550 # Override filemerge to prompt the user about how they wish to merge
551 551 # largefiles. This will handle identical edits without prompting the user.
552 552 def overridefilemerge(origfn, premerge, repo, wctx, mynode, orig, fcd, fco, fca,
553 553 labels=None):
554 554 if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
555 555 return origfn(premerge, repo, wctx, mynode, orig, fcd, fco, fca,
556 556 labels=labels)
557 557
558 558 ahash = lfutil.readasstandin(fca).lower()
559 559 dhash = lfutil.readasstandin(fcd).lower()
560 560 ohash = lfutil.readasstandin(fco).lower()
561 561 if (ohash != ahash and
562 562 ohash != dhash and
563 563 (dhash == ahash or
564 564 repo.ui.promptchoice(
565 565 _('largefile %s has a merge conflict\nancestor was %s\n'
566 566 'keep (l)ocal %s or\ntake (o)ther %s?'
567 567 '$$ &Local $$ &Other') %
568 568 (lfutil.splitstandin(orig), ahash, dhash, ohash),
569 569 0) == 1)):
570 570 repo.wwrite(fcd.path(), fco.data(), fco.flags())
571 571 return True, 0, False
572 572
573 573 def copiespathcopies(orig, ctx1, ctx2, match=None):
574 574 copies = orig(ctx1, ctx2, match=match)
575 575 updated = {}
576 576
577 577 for k, v in copies.iteritems():
578 578 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
579 579
580 580 return updated
581 581
582 582 # Copy first changes the matchers to match standins instead of
583 583 # largefiles. Then it overrides util.copyfile in that function it
584 584 # checks if the destination largefile already exists. It also keeps a
585 585 # list of copied files so that the largefiles can be copied and the
586 586 # dirstate updated.
587 587 def overridecopy(orig, ui, repo, pats, opts, rename=False):
588 588 # doesn't remove largefile on rename
589 589 if len(pats) < 2:
590 590 # this isn't legal, let the original function deal with it
591 591 return orig(ui, repo, pats, opts, rename)
592 592
593 593 # This could copy both lfiles and normal files in one command,
594 594 # but we don't want to do that. First replace their matcher to
595 595 # only match normal files and run it, then replace it to just
596 596 # match largefiles and run it again.
597 597 nonormalfiles = False
598 598 nolfiles = False
599 599 installnormalfilesmatchfn(repo[None].manifest())
600 600 try:
601 601 result = orig(ui, repo, pats, opts, rename)
602 602 except error.Abort as e:
603 603 if pycompat.bytestr(e) != _('no files to copy'):
604 604 raise e
605 605 else:
606 606 nonormalfiles = True
607 607 result = 0
608 608 finally:
609 609 restorematchfn()
610 610
611 611 # The first rename can cause our current working directory to be removed.
612 612 # In that case there is nothing left to copy/rename so just quit.
613 613 try:
614 614 repo.getcwd()
615 615 except OSError:
616 616 return result
617 617
618 618 def makestandin(relpath):
619 619 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
620 620 return repo.wvfs.join(lfutil.standin(path))
621 621
622 622 fullpats = scmutil.expandpats(pats)
623 623 dest = fullpats[-1]
624 624
625 625 if os.path.isdir(dest):
626 626 if not os.path.isdir(makestandin(dest)):
627 627 os.makedirs(makestandin(dest))
628 628
629 629 try:
630 630 # When we call orig below it creates the standins but we don't add
631 631 # them to the dir state until later so lock during that time.
632 632 wlock = repo.wlock()
633 633
634 634 manifest = repo[None].manifest()
635 635 def overridematch(ctx, pats=(), opts=None, globbed=False,
636 636 default='relpath', badfn=None):
637 637 if opts is None:
638 638 opts = {}
639 639 newpats = []
640 640 # The patterns were previously mangled to add the standin
641 641 # directory; we need to remove that now
642 642 for pat in pats:
643 643 if matchmod.patkind(pat) is None and lfutil.shortname in pat:
644 644 newpats.append(pat.replace(lfutil.shortname, ''))
645 645 else:
646 646 newpats.append(pat)
647 647 match = oldmatch(ctx, newpats, opts, globbed, default, badfn=badfn)
648 648 m = copy.copy(match)
649 649 lfile = lambda f: lfutil.standin(f) in manifest
650 650 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
651 651 m._fileset = set(m._files)
652 652 origmatchfn = m.matchfn
653 653 def matchfn(f):
654 654 lfile = lfutil.splitstandin(f)
655 655 return (lfile is not None and
656 656 (f in manifest) and
657 657 origmatchfn(lfile) or
658 658 None)
659 659 m.matchfn = matchfn
660 660 return m
661 661 oldmatch = installmatchfn(overridematch)
662 662 listpats = []
663 663 for pat in pats:
664 664 if matchmod.patkind(pat) is not None:
665 665 listpats.append(pat)
666 666 else:
667 667 listpats.append(makestandin(pat))
668 668
669 669 try:
670 670 origcopyfile = util.copyfile
671 671 copiedfiles = []
672 672 def overridecopyfile(src, dest, *args, **kwargs):
673 673 if (lfutil.shortname in src and
674 674 dest.startswith(repo.wjoin(lfutil.shortname))):
675 675 destlfile = dest.replace(lfutil.shortname, '')
676 676 if not opts['force'] and os.path.exists(destlfile):
677 677 raise IOError('',
678 678 _('destination largefile already exists'))
679 679 copiedfiles.append((src, dest))
680 680 origcopyfile(src, dest, *args, **kwargs)
681 681
682 682 util.copyfile = overridecopyfile
683 683 result += orig(ui, repo, listpats, opts, rename)
684 684 finally:
685 685 util.copyfile = origcopyfile
686 686
687 687 lfdirstate = lfutil.openlfdirstate(ui, repo)
688 688 for (src, dest) in copiedfiles:
689 689 if (lfutil.shortname in src and
690 690 dest.startswith(repo.wjoin(lfutil.shortname))):
691 691 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
692 692 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
693 693 destlfiledir = repo.wvfs.dirname(repo.wjoin(destlfile)) or '.'
694 694 if not os.path.isdir(destlfiledir):
695 695 os.makedirs(destlfiledir)
696 696 if rename:
697 697 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
698 698
699 699 # The file is gone, but this deletes any empty parent
700 700 # directories as a side-effect.
701 701 repo.wvfs.unlinkpath(srclfile, ignoremissing=True)
702 702 lfdirstate.remove(srclfile)
703 703 else:
704 704 util.copyfile(repo.wjoin(srclfile),
705 705 repo.wjoin(destlfile))
706 706
707 707 lfdirstate.add(destlfile)
708 708 lfdirstate.write()
709 709 except error.Abort as e:
710 710 if pycompat.bytestr(e) != _('no files to copy'):
711 711 raise e
712 712 else:
713 713 nolfiles = True
714 714 finally:
715 715 restorematchfn()
716 716 wlock.release()
717 717
718 718 if nolfiles and nonormalfiles:
719 719 raise error.Abort(_('no files to copy'))
720 720
721 721 return result
722 722
723 723 # When the user calls revert, we have to be careful to not revert any
724 724 # changes to other largefiles accidentally. This means we have to keep
725 725 # track of the largefiles that are being reverted so we only pull down
726 726 # the necessary largefiles.
727 727 #
728 728 # Standins are only updated (to match the hash of largefiles) before
729 729 # commits. Update the standins then run the original revert, changing
730 730 # the matcher to hit standins instead of largefiles. Based on the
731 731 # resulting standins update the largefiles.
732 732 def overriderevert(orig, ui, repo, ctx, parents, *pats, **opts):
733 733 # Because we put the standins in a bad state (by updating them)
734 734 # and then return them to a correct state we need to lock to
735 735 # prevent others from changing them in their incorrect state.
736 736 with repo.wlock():
737 737 lfdirstate = lfutil.openlfdirstate(ui, repo)
738 738 s = lfutil.lfdirstatestatus(lfdirstate, repo)
739 739 lfdirstate.write()
740 740 for lfile in s.modified:
741 741 lfutil.updatestandin(repo, lfile, lfutil.standin(lfile))
742 742 for lfile in s.deleted:
743 743 fstandin = lfutil.standin(lfile)
744 744 if (repo.wvfs.exists(fstandin)):
745 745 repo.wvfs.unlink(fstandin)
746 746
747 747 oldstandins = lfutil.getstandinsstate(repo)
748 748
749 749 def overridematch(mctx, pats=(), opts=None, globbed=False,
750 750 default='relpath', badfn=None):
751 751 if opts is None:
752 752 opts = {}
753 753 match = oldmatch(mctx, pats, opts, globbed, default, badfn=badfn)
754 754 m = copy.copy(match)
755 755
756 756 # revert supports recursing into subrepos, and though largefiles
757 757 # currently doesn't work correctly in that case, this match is
758 758 # called, so the lfdirstate above may not be the correct one for
759 759 # this invocation of match.
760 760 lfdirstate = lfutil.openlfdirstate(mctx.repo().ui, mctx.repo(),
761 761 False)
762 762
763 763 wctx = repo[None]
764 764 matchfiles = []
765 765 for f in m._files:
766 766 standin = lfutil.standin(f)
767 767 if standin in ctx or standin in mctx:
768 768 matchfiles.append(standin)
769 769 elif standin in wctx or lfdirstate[f] == 'r':
770 770 continue
771 771 else:
772 772 matchfiles.append(f)
773 773 m._files = matchfiles
774 774 m._fileset = set(m._files)
775 775 origmatchfn = m.matchfn
776 776 def matchfn(f):
777 777 lfile = lfutil.splitstandin(f)
778 778 if lfile is not None:
779 779 return (origmatchfn(lfile) and
780 780 (f in ctx or f in mctx))
781 781 return origmatchfn(f)
782 782 m.matchfn = matchfn
783 783 return m
784 784 oldmatch = installmatchfn(overridematch)
785 785 try:
786 786 orig(ui, repo, ctx, parents, *pats, **opts)
787 787 finally:
788 788 restorematchfn()
789 789
790 790 newstandins = lfutil.getstandinsstate(repo)
791 791 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
792 792 # lfdirstate should be 'normallookup'-ed for updated files,
793 793 # because reverting doesn't touch dirstate for 'normal' files
794 794 # when target revision is explicitly specified: in such case,
795 795 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
796 796 # of target (standin) file.
797 797 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
798 798 normallookup=True)
799 799
800 800 # after pulling changesets, we need to take some extra care to get
801 801 # largefiles updated remotely
802 802 def overridepull(orig, ui, repo, source=None, **opts):
803 803 revsprepull = len(repo)
804 804 if not source:
805 805 source = 'default'
806 806 repo.lfpullsource = source
807 807 result = orig(ui, repo, source, **opts)
808 808 revspostpull = len(repo)
809 809 lfrevs = opts.get(r'lfrev', [])
810 810 if opts.get(r'all_largefiles'):
811 811 lfrevs.append('pulled()')
812 812 if lfrevs and revspostpull > revsprepull:
813 813 numcached = 0
814 814 repo.firstpulled = revsprepull # for pulled() revset expression
815 815 try:
816 816 for rev in scmutil.revrange(repo, lfrevs):
817 817 ui.note(_('pulling largefiles for revision %d\n') % rev)
818 818 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
819 819 numcached += len(cached)
820 820 finally:
821 821 del repo.firstpulled
822 822 ui.status(_("%d largefiles cached\n") % numcached)
823 823 return result
824 824
825 825 def overridepush(orig, ui, repo, *args, **kwargs):
826 826 """Override push command and store --lfrev parameters in opargs"""
827 827 lfrevs = kwargs.pop(r'lfrev', None)
828 828 if lfrevs:
829 829 opargs = kwargs.setdefault(r'opargs', {})
830 830 opargs['lfrevs'] = scmutil.revrange(repo, lfrevs)
831 831 return orig(ui, repo, *args, **kwargs)
832 832
833 833 def exchangepushoperation(orig, *args, **kwargs):
834 834 """Override pushoperation constructor and store lfrevs parameter"""
835 835 lfrevs = kwargs.pop(r'lfrevs', None)
836 836 pushop = orig(*args, **kwargs)
837 837 pushop.lfrevs = lfrevs
838 838 return pushop
839 839
840 840 revsetpredicate = registrar.revsetpredicate()
841 841
842 842 @revsetpredicate('pulled()')
843 843 def pulledrevsetsymbol(repo, subset, x):
844 844 """Changesets that just has been pulled.
845 845
846 846 Only available with largefiles from pull --lfrev expressions.
847 847
848 848 .. container:: verbose
849 849
850 850 Some examples:
851 851
852 852 - pull largefiles for all new changesets::
853 853
854 854 hg pull -lfrev "pulled()"
855 855
856 856 - pull largefiles for all new branch heads::
857 857
858 858 hg pull -lfrev "head(pulled()) and not closed()"
859 859
860 860 """
861 861
862 862 try:
863 863 firstpulled = repo.firstpulled
864 864 except AttributeError:
865 865 raise error.Abort(_("pulled() only available in --lfrev"))
866 866 return smartset.baseset([r for r in subset if r >= firstpulled])
867 867
868 868 def overrideclone(orig, ui, source, dest=None, **opts):
869 869 d = dest
870 870 if d is None:
871 871 d = hg.defaultdest(source)
872 872 if opts.get(r'all_largefiles') and not hg.islocal(d):
873 873 raise error.Abort(_(
874 874 '--all-largefiles is incompatible with non-local destination %s') %
875 875 d)
876 876
877 877 return orig(ui, source, dest, **opts)
878 878
879 879 def hgclone(orig, ui, opts, *args, **kwargs):
880 880 result = orig(ui, opts, *args, **kwargs)
881 881
882 882 if result is not None:
883 883 sourcerepo, destrepo = result
884 884 repo = destrepo.local()
885 885
886 886 # When cloning to a remote repo (like through SSH), no repo is available
887 887 # from the peer. Therefore the largefiles can't be downloaded and the
888 888 # hgrc can't be updated.
889 889 if not repo:
890 890 return result
891 891
892 892 # If largefiles is required for this repo, permanently enable it locally
893 893 if 'largefiles' in repo.requirements:
894 894 repo.vfs.append('hgrc',
895 895 util.tonativeeol('\n[extensions]\nlargefiles=\n'))
896 896
897 897 # Caching is implicitly limited to 'rev' option, since the dest repo was
898 898 # truncated at that point. The user may expect a download count with
899 899 # this option, so attempt whether or not this is a largefile repo.
900 900 if opts.get(r'all_largefiles'):
901 901 success, missing = lfcommands.downloadlfiles(ui, repo, None)
902 902
903 903 if missing != 0:
904 904 return None
905 905
906 906 return result
907 907
908 908 def hgpostshare(orig, sourcerepo, destrepo, bookmarks=True, defaultpath=None):
909 909 orig(sourcerepo, destrepo, bookmarks, defaultpath)
910 910
911 911 # If largefiles is required for this repo, permanently enable it locally
912 912 if 'largefiles' in destrepo.requirements:
913 913 destrepo.vfs.append('hgrc',
914 914 util.tonativeeol('\n[extensions]\nlargefiles=\n'))
915 915
916 916 def overriderebase(orig, ui, repo, **opts):
917 917 if not util.safehasattr(repo, '_largefilesenabled'):
918 918 return orig(ui, repo, **opts)
919 919
920 920 resuming = opts.get(r'continue')
921 921 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
922 922 repo._lfstatuswriters.append(lambda *msg, **opts: None)
923 923 try:
924 924 return orig(ui, repo, **opts)
925 925 finally:
926 926 repo._lfstatuswriters.pop()
927 927 repo._lfcommithooks.pop()
928 928
929 929 def overridearchivecmd(orig, ui, repo, dest, **opts):
930 930 repo.unfiltered().lfstatus = True
931 931
932 932 try:
933 933 return orig(ui, repo.unfiltered(), dest, **opts)
934 934 finally:
935 935 repo.unfiltered().lfstatus = False
936 936
937 937 def hgwebarchive(orig, web):
938 938 web.repo.lfstatus = True
939 939
940 940 try:
941 941 return orig(web)
942 942 finally:
943 943 web.repo.lfstatus = False
944 944
945 945 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
946 946 prefix='', mtime=None, subrepos=None):
947 947 # For some reason setting repo.lfstatus in hgwebarchive only changes the
948 948 # unfiltered repo's attr, so check that as well.
949 949 if not repo.lfstatus and not repo.unfiltered().lfstatus:
950 950 return orig(repo, dest, node, kind, decode, matchfn, prefix, mtime,
951 951 subrepos)
952 952
953 953 # No need to lock because we are only reading history and
954 954 # largefile caches, neither of which are modified.
955 955 if node is not None:
956 956 lfcommands.cachelfiles(repo.ui, repo, node)
957 957
958 958 if kind not in archival.archivers:
959 959 raise error.Abort(_("unknown archive type '%s'") % kind)
960 960
961 961 ctx = repo[node]
962 962
963 963 if kind == 'files':
964 964 if prefix:
965 965 raise error.Abort(
966 966 _('cannot give prefix when archiving to files'))
967 967 else:
968 968 prefix = archival.tidyprefix(dest, kind, prefix)
969 969
970 970 def write(name, mode, islink, getdata):
971 971 if matchfn and not matchfn(name):
972 972 return
973 973 data = getdata()
974 974 if decode:
975 975 data = repo.wwritedata(name, data)
976 976 archiver.addfile(prefix + name, mode, islink, data)
977 977
978 978 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
979 979
980 980 if repo.ui.configbool("ui", "archivemeta"):
981 981 write('.hg_archival.txt', 0o644, False,
982 982 lambda: archival.buildmetadata(ctx))
983 983
984 984 for f in ctx:
985 985 ff = ctx.flags(f)
986 986 getdata = ctx[f].data
987 987 lfile = lfutil.splitstandin(f)
988 988 if lfile is not None:
989 989 if node is not None:
990 990 path = lfutil.findfile(repo, getdata().strip())
991 991
992 992 if path is None:
993 993 raise error.Abort(
994 994 _('largefile %s not found in repo store or system cache')
995 995 % lfile)
996 996 else:
997 997 path = lfile
998 998
999 999 f = lfile
1000 1000
1001 1001 getdata = lambda: util.readfile(path)
1002 1002 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
1003 1003
1004 1004 if subrepos:
1005 1005 for subpath in sorted(ctx.substate):
1006 1006 sub = ctx.workingsub(subpath)
1007 1007 submatch = matchmod.subdirmatcher(subpath, matchfn)
1008 1008 sub._repo.lfstatus = True
1009 1009 sub.archive(archiver, prefix, submatch)
1010 1010
1011 1011 archiver.done()
1012 1012
1013 1013 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None, decode=True):
1014 1014 lfenabled = util.safehasattr(repo._repo, '_largefilesenabled')
1015 1015 if not lfenabled or not repo._repo.lfstatus:
1016 1016 return orig(repo, archiver, prefix, match, decode)
1017 1017
1018 1018 repo._get(repo._state + ('hg',))
1019 1019 rev = repo._state[1]
1020 1020 ctx = repo._repo[rev]
1021 1021
1022 1022 if ctx.node() is not None:
1023 1023 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
1024 1024
1025 1025 def write(name, mode, islink, getdata):
1026 1026 # At this point, the standin has been replaced with the largefile name,
1027 1027 # so the normal matcher works here without the lfutil variants.
1028 1028 if match and not match(f):
1029 1029 return
1030 1030 data = getdata()
1031 1031 if decode:
1032 1032 data = repo._repo.wwritedata(name, data)
1033 1033
1034 1034 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
1035 1035
1036 1036 for f in ctx:
1037 1037 ff = ctx.flags(f)
1038 1038 getdata = ctx[f].data
1039 1039 lfile = lfutil.splitstandin(f)
1040 1040 if lfile is not None:
1041 1041 if ctx.node() is not None:
1042 1042 path = lfutil.findfile(repo._repo, getdata().strip())
1043 1043
1044 1044 if path is None:
1045 1045 raise error.Abort(
1046 1046 _('largefile %s not found in repo store or system cache')
1047 1047 % lfile)
1048 1048 else:
1049 1049 path = lfile
1050 1050
1051 1051 f = lfile
1052 1052
1053 1053 getdata = lambda: util.readfile(os.path.join(prefix, path))
1054 1054
1055 1055 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
1056 1056
1057 1057 for subpath in sorted(ctx.substate):
1058 1058 sub = ctx.workingsub(subpath)
1059 1059 submatch = matchmod.subdirmatcher(subpath, match)
1060 1060 sub._repo.lfstatus = True
1061 1061 sub.archive(archiver, prefix + repo._path + '/', submatch, decode)
1062 1062
1063 1063 # If a largefile is modified, the change is not reflected in its
1064 1064 # standin until a commit. cmdutil.bailifchanged() raises an exception
1065 1065 # if the repo has uncommitted changes. Wrap it to also check if
1066 1066 # largefiles were changed. This is used by bisect, backout and fetch.
1067 1067 def overridebailifchanged(orig, repo, *args, **kwargs):
1068 1068 orig(repo, *args, **kwargs)
1069 1069 repo.lfstatus = True
1070 1070 s = repo.status()
1071 1071 repo.lfstatus = False
1072 1072 if s.modified or s.added or s.removed or s.deleted:
1073 1073 raise error.Abort(_('uncommitted changes'))
1074 1074
1075 1075 def postcommitstatus(orig, repo, *args, **kwargs):
1076 1076 repo.lfstatus = True
1077 1077 try:
1078 1078 return orig(repo, *args, **kwargs)
1079 1079 finally:
1080 1080 repo.lfstatus = False
1081 1081
1082 1082 def cmdutilforget(orig, ui, repo, match, prefix, explicitonly, dryrun):
1083 1083 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1084 1084 bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly, dryrun)
1085 1085 m = composelargefilematcher(match, repo[None].manifest())
1086 1086
1087 1087 try:
1088 1088 repo.lfstatus = True
1089 1089 s = repo.status(match=m, clean=True)
1090 1090 finally:
1091 1091 repo.lfstatus = False
1092 1092 manifest = repo[None].manifest()
1093 1093 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1094 1094 forget = [f for f in forget if lfutil.standin(f) in manifest]
1095 1095
1096 1096 for f in forget:
1097 1097 fstandin = lfutil.standin(f)
1098 1098 if fstandin not in repo.dirstate and not repo.wvfs.isdir(fstandin):
1099 1099 ui.warn(_('not removing %s: file is already untracked\n')
1100 1100 % m.rel(f))
1101 1101 bad.append(f)
1102 1102
1103 1103 for f in forget:
1104 1104 if ui.verbose or not m.exact(f):
1105 1105 ui.status(_('removing %s\n') % m.rel(f))
1106 1106
1107 1107 # Need to lock because standin files are deleted then removed from the
1108 1108 # repository and we could race in-between.
1109 1109 with repo.wlock():
1110 1110 lfdirstate = lfutil.openlfdirstate(ui, repo)
1111 1111 for f in forget:
1112 1112 if lfdirstate[f] == 'a':
1113 1113 lfdirstate.drop(f)
1114 1114 else:
1115 1115 lfdirstate.remove(f)
1116 1116 lfdirstate.write()
1117 1117 standins = [lfutil.standin(f) for f in forget]
1118 1118 for f in standins:
1119 1119 repo.wvfs.unlinkpath(f, ignoremissing=True)
1120 1120 rejected = repo[None].forget(standins)
1121 1121
1122 1122 bad.extend(f for f in rejected if f in m.files())
1123 1123 forgot.extend(f for f in forget if f not in rejected)
1124 1124 return bad, forgot
1125 1125
1126 1126 def _getoutgoings(repo, other, missing, addfunc):
1127 1127 """get pairs of filename and largefile hash in outgoing revisions
1128 1128 in 'missing'.
1129 1129
1130 1130 largefiles already existing on 'other' repository are ignored.
1131 1131
1132 1132 'addfunc' is invoked with each unique pairs of filename and
1133 1133 largefile hash value.
1134 1134 """
1135 1135 knowns = set()
1136 1136 lfhashes = set()
1137 1137 def dedup(fn, lfhash):
1138 1138 k = (fn, lfhash)
1139 1139 if k not in knowns:
1140 1140 knowns.add(k)
1141 1141 lfhashes.add(lfhash)
1142 1142 lfutil.getlfilestoupload(repo, missing, dedup)
1143 1143 if lfhashes:
1144 1144 lfexists = storefactory.openstore(repo, other).exists(lfhashes)
1145 1145 for fn, lfhash in knowns:
1146 1146 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1147 1147 addfunc(fn, lfhash)
1148 1148
1149 1149 def outgoinghook(ui, repo, other, opts, missing):
1150 1150 if opts.pop('large', None):
1151 1151 lfhashes = set()
1152 1152 if ui.debugflag:
1153 1153 toupload = {}
1154 1154 def addfunc(fn, lfhash):
1155 1155 if fn not in toupload:
1156 1156 toupload[fn] = []
1157 1157 toupload[fn].append(lfhash)
1158 1158 lfhashes.add(lfhash)
1159 1159 def showhashes(fn):
1160 1160 for lfhash in sorted(toupload[fn]):
1161 1161 ui.debug(' %s\n' % (lfhash))
1162 1162 else:
1163 1163 toupload = set()
1164 1164 def addfunc(fn, lfhash):
1165 1165 toupload.add(fn)
1166 1166 lfhashes.add(lfhash)
1167 1167 def showhashes(fn):
1168 1168 pass
1169 1169 _getoutgoings(repo, other, missing, addfunc)
1170 1170
1171 1171 if not toupload:
1172 1172 ui.status(_('largefiles: no files to upload\n'))
1173 1173 else:
1174 1174 ui.status(_('largefiles to upload (%d entities):\n')
1175 1175 % (len(lfhashes)))
1176 1176 for file in sorted(toupload):
1177 1177 ui.status(lfutil.splitstandin(file) + '\n')
1178 1178 showhashes(file)
1179 1179 ui.status('\n')
1180 1180
1181 1181 def summaryremotehook(ui, repo, opts, changes):
1182 1182 largeopt = opts.get('large', False)
1183 1183 if changes is None:
1184 1184 if largeopt:
1185 1185 return (False, True) # only outgoing check is needed
1186 1186 else:
1187 1187 return (False, False)
1188 1188 elif largeopt:
1189 1189 url, branch, peer, outgoing = changes[1]
1190 1190 if peer is None:
1191 1191 # i18n: column positioning for "hg summary"
1192 1192 ui.status(_('largefiles: (no remote repo)\n'))
1193 1193 return
1194 1194
1195 1195 toupload = set()
1196 1196 lfhashes = set()
1197 1197 def addfunc(fn, lfhash):
1198 1198 toupload.add(fn)
1199 1199 lfhashes.add(lfhash)
1200 1200 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1201 1201
1202 1202 if not toupload:
1203 1203 # i18n: column positioning for "hg summary"
1204 1204 ui.status(_('largefiles: (no files to upload)\n'))
1205 1205 else:
1206 1206 # i18n: column positioning for "hg summary"
1207 1207 ui.status(_('largefiles: %d entities for %d files to upload\n')
1208 1208 % (len(lfhashes), len(toupload)))
1209 1209
1210 1210 def overridesummary(orig, ui, repo, *pats, **opts):
1211 1211 try:
1212 1212 repo.lfstatus = True
1213 1213 orig(ui, repo, *pats, **opts)
1214 1214 finally:
1215 1215 repo.lfstatus = False
1216 1216
1217 def scmutiladdremove(orig, repo, matcher, prefix, opts=None, dry_run=None,
1218 similarity=None):
1217 def scmutiladdremove(orig, repo, matcher, prefix, opts=None):
1219 1218 if opts is None:
1220 1219 opts = {}
1221 1220 if not lfutil.islfilesrepo(repo):
1222 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1221 return orig(repo, matcher, prefix, opts)
1223 1222 # Get the list of missing largefiles so we can remove them
1224 1223 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1225 1224 unsure, s = lfdirstate.status(matchmod.always(repo.root, repo.getcwd()),
1226 1225 subrepos=[], ignored=False, clean=False,
1227 1226 unknown=False)
1228 1227
1229 1228 # Call into the normal remove code, but the removing of the standin, we want
1230 1229 # to have handled by original addremove. Monkey patching here makes sure
1231 1230 # we don't remove the standin in the largefiles code, preventing a very
1232 1231 # confused state later.
1233 1232 if s.deleted:
1234 1233 m = copy.copy(matcher)
1235 1234
1236 1235 # The m._files and m._map attributes are not changed to the deleted list
1237 1236 # because that affects the m.exact() test, which in turn governs whether
1238 1237 # or not the file name is printed, and how. Simply limit the original
1239 1238 # matches to those in the deleted status list.
1240 1239 matchfn = m.matchfn
1241 1240 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1242 1241
1243 1242 removelargefiles(repo.ui, repo, True, m, opts.get('dry_run'),
1244 1243 **pycompat.strkwargs(opts))
1245 1244 # Call into the normal add code, and any files that *should* be added as
1246 1245 # largefiles will be
1247 1246 added, bad = addlargefiles(repo.ui, repo, True, matcher,
1248 1247 **pycompat.strkwargs(opts))
1249 1248 # Now that we've handled largefiles, hand off to the original addremove
1250 1249 # function to take care of the rest. Make sure it doesn't do anything with
1251 1250 # largefiles by passing a matcher that will ignore them.
1252 1251 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1253 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1252 return orig(repo, matcher, prefix, opts)
1254 1253
1255 1254 # Calling purge with --all will cause the largefiles to be deleted.
1256 1255 # Override repo.status to prevent this from happening.
1257 1256 def overridepurge(orig, ui, repo, *dirs, **opts):
1258 1257 # XXX Monkey patching a repoview will not work. The assigned attribute will
1259 1258 # be set on the unfiltered repo, but we will only lookup attributes in the
1260 1259 # unfiltered repo if the lookup in the repoview object itself fails. As the
1261 1260 # monkey patched method exists on the repoview class the lookup will not
1262 1261 # fail. As a result, the original version will shadow the monkey patched
1263 1262 # one, defeating the monkey patch.
1264 1263 #
1265 1264 # As a work around we use an unfiltered repo here. We should do something
1266 1265 # cleaner instead.
1267 1266 repo = repo.unfiltered()
1268 1267 oldstatus = repo.status
1269 1268 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1270 1269 clean=False, unknown=False, listsubrepos=False):
1271 1270 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1272 1271 listsubrepos)
1273 1272 lfdirstate = lfutil.openlfdirstate(ui, repo)
1274 1273 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1275 1274 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1276 1275 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1277 1276 unknown, ignored, r.clean)
1278 1277 repo.status = overridestatus
1279 1278 orig(ui, repo, *dirs, **opts)
1280 1279 repo.status = oldstatus
1281 1280
1282 1281 def overriderollback(orig, ui, repo, **opts):
1283 1282 with repo.wlock():
1284 1283 before = repo.dirstate.parents()
1285 1284 orphans = set(f for f in repo.dirstate
1286 1285 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1287 1286 result = orig(ui, repo, **opts)
1288 1287 after = repo.dirstate.parents()
1289 1288 if before == after:
1290 1289 return result # no need to restore standins
1291 1290
1292 1291 pctx = repo['.']
1293 1292 for f in repo.dirstate:
1294 1293 if lfutil.isstandin(f):
1295 1294 orphans.discard(f)
1296 1295 if repo.dirstate[f] == 'r':
1297 1296 repo.wvfs.unlinkpath(f, ignoremissing=True)
1298 1297 elif f in pctx:
1299 1298 fctx = pctx[f]
1300 1299 repo.wwrite(f, fctx.data(), fctx.flags())
1301 1300 else:
1302 1301 # content of standin is not so important in 'a',
1303 1302 # 'm' or 'n' (coming from the 2nd parent) cases
1304 1303 lfutil.writestandin(repo, f, '', False)
1305 1304 for standin in orphans:
1306 1305 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1307 1306
1308 1307 lfdirstate = lfutil.openlfdirstate(ui, repo)
1309 1308 orphans = set(lfdirstate)
1310 1309 lfiles = lfutil.listlfiles(repo)
1311 1310 for file in lfiles:
1312 1311 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1313 1312 orphans.discard(file)
1314 1313 for lfile in orphans:
1315 1314 lfdirstate.drop(lfile)
1316 1315 lfdirstate.write()
1317 1316 return result
1318 1317
1319 1318 def overridetransplant(orig, ui, repo, *revs, **opts):
1320 1319 resuming = opts.get(r'continue')
1321 1320 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1322 1321 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1323 1322 try:
1324 1323 result = orig(ui, repo, *revs, **opts)
1325 1324 finally:
1326 1325 repo._lfstatuswriters.pop()
1327 1326 repo._lfcommithooks.pop()
1328 1327 return result
1329 1328
1330 1329 def overridecat(orig, ui, repo, file1, *pats, **opts):
1331 1330 opts = pycompat.byteskwargs(opts)
1332 1331 ctx = scmutil.revsingle(repo, opts.get('rev'))
1333 1332 err = 1
1334 1333 notbad = set()
1335 1334 m = scmutil.match(ctx, (file1,) + pats, opts)
1336 1335 origmatchfn = m.matchfn
1337 1336 def lfmatchfn(f):
1338 1337 if origmatchfn(f):
1339 1338 return True
1340 1339 lf = lfutil.splitstandin(f)
1341 1340 if lf is None:
1342 1341 return False
1343 1342 notbad.add(lf)
1344 1343 return origmatchfn(lf)
1345 1344 m.matchfn = lfmatchfn
1346 1345 origbadfn = m.bad
1347 1346 def lfbadfn(f, msg):
1348 1347 if not f in notbad:
1349 1348 origbadfn(f, msg)
1350 1349 m.bad = lfbadfn
1351 1350
1352 1351 origvisitdirfn = m.visitdir
1353 1352 def lfvisitdirfn(dir):
1354 1353 if dir == lfutil.shortname:
1355 1354 return True
1356 1355 ret = origvisitdirfn(dir)
1357 1356 if ret:
1358 1357 return ret
1359 1358 lf = lfutil.splitstandin(dir)
1360 1359 if lf is None:
1361 1360 return False
1362 1361 return origvisitdirfn(lf)
1363 1362 m.visitdir = lfvisitdirfn
1364 1363
1365 1364 for f in ctx.walk(m):
1366 1365 with cmdutil.makefileobj(ctx, opts.get('output'), pathname=f) as fp:
1367 1366 lf = lfutil.splitstandin(f)
1368 1367 if lf is None or origmatchfn(f):
1369 1368 # duplicating unreachable code from commands.cat
1370 1369 data = ctx[f].data()
1371 1370 if opts.get('decode'):
1372 1371 data = repo.wwritedata(f, data)
1373 1372 fp.write(data)
1374 1373 else:
1375 1374 hash = lfutil.readasstandin(ctx[f])
1376 1375 if not lfutil.inusercache(repo.ui, hash):
1377 1376 store = storefactory.openstore(repo)
1378 1377 success, missing = store.get([(lf, hash)])
1379 1378 if len(success) != 1:
1380 1379 raise error.Abort(
1381 1380 _('largefile %s is not in cache and could not be '
1382 1381 'downloaded') % lf)
1383 1382 path = lfutil.usercachepath(repo.ui, hash)
1384 1383 with open(path, "rb") as fpin:
1385 1384 for chunk in util.filechunkiter(fpin):
1386 1385 fp.write(chunk)
1387 1386 err = 0
1388 1387 return err
1389 1388
1390 1389 def mergeupdate(orig, repo, node, branchmerge, force,
1391 1390 *args, **kwargs):
1392 1391 matcher = kwargs.get(r'matcher', None)
1393 1392 # note if this is a partial update
1394 1393 partial = matcher and not matcher.always()
1395 1394 with repo.wlock():
1396 1395 # branch | | |
1397 1396 # merge | force | partial | action
1398 1397 # -------+-------+---------+--------------
1399 1398 # x | x | x | linear-merge
1400 1399 # o | x | x | branch-merge
1401 1400 # x | o | x | overwrite (as clean update)
1402 1401 # o | o | x | force-branch-merge (*1)
1403 1402 # x | x | o | (*)
1404 1403 # o | x | o | (*)
1405 1404 # x | o | o | overwrite (as revert)
1406 1405 # o | o | o | (*)
1407 1406 #
1408 1407 # (*) don't care
1409 1408 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1410 1409
1411 1410 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1412 1411 unsure, s = lfdirstate.status(matchmod.always(repo.root,
1413 1412 repo.getcwd()),
1414 1413 subrepos=[], ignored=False,
1415 1414 clean=True, unknown=False)
1416 1415 oldclean = set(s.clean)
1417 1416 pctx = repo['.']
1418 1417 dctx = repo[node]
1419 1418 for lfile in unsure + s.modified:
1420 1419 lfileabs = repo.wvfs.join(lfile)
1421 1420 if not repo.wvfs.exists(lfileabs):
1422 1421 continue
1423 1422 lfhash = lfutil.hashfile(lfileabs)
1424 1423 standin = lfutil.standin(lfile)
1425 1424 lfutil.writestandin(repo, standin, lfhash,
1426 1425 lfutil.getexecutable(lfileabs))
1427 1426 if (standin in pctx and
1428 1427 lfhash == lfutil.readasstandin(pctx[standin])):
1429 1428 oldclean.add(lfile)
1430 1429 for lfile in s.added:
1431 1430 fstandin = lfutil.standin(lfile)
1432 1431 if fstandin not in dctx:
1433 1432 # in this case, content of standin file is meaningless
1434 1433 # (in dctx, lfile is unknown, or normal file)
1435 1434 continue
1436 1435 lfutil.updatestandin(repo, lfile, fstandin)
1437 1436 # mark all clean largefiles as dirty, just in case the update gets
1438 1437 # interrupted before largefiles and lfdirstate are synchronized
1439 1438 for lfile in oldclean:
1440 1439 lfdirstate.normallookup(lfile)
1441 1440 lfdirstate.write()
1442 1441
1443 1442 oldstandins = lfutil.getstandinsstate(repo)
1444 1443 # Make sure the merge runs on disk, not in-memory. largefiles is not a
1445 1444 # good candidate for in-memory merge (large files, custom dirstate,
1446 1445 # matcher usage).
1447 1446 kwargs[r'wc'] = repo[None]
1448 1447 result = orig(repo, node, branchmerge, force, *args, **kwargs)
1449 1448
1450 1449 newstandins = lfutil.getstandinsstate(repo)
1451 1450 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1452 1451
1453 1452 # to avoid leaving all largefiles as dirty and thus rehash them, mark
1454 1453 # all the ones that didn't change as clean
1455 1454 for lfile in oldclean.difference(filelist):
1456 1455 lfdirstate.normal(lfile)
1457 1456 lfdirstate.write()
1458 1457
1459 1458 if branchmerge or force or partial:
1460 1459 filelist.extend(s.deleted + s.removed)
1461 1460
1462 1461 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1463 1462 normallookup=partial)
1464 1463
1465 1464 return result
1466 1465
1467 1466 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1468 1467 result = orig(repo, files, *args, **kwargs)
1469 1468
1470 1469 filelist = []
1471 1470 for f in files:
1472 1471 lf = lfutil.splitstandin(f)
1473 1472 if lf is not None:
1474 1473 filelist.append(lf)
1475 1474 if filelist:
1476 1475 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1477 1476 printmessage=False, normallookup=True)
1478 1477
1479 1478 return result
1480 1479
1481 1480 def upgraderequirements(orig, repo):
1482 1481 reqs = orig(repo)
1483 1482 if 'largefiles' in repo.requirements:
1484 1483 reqs.add('largefiles')
1485 1484 return reqs
1486 1485
1487 1486 _lfscheme = 'largefile://'
1488 1487 def openlargefile(orig, ui, url_, data=None):
1489 1488 if url_.startswith(_lfscheme):
1490 1489 if data:
1491 1490 msg = "cannot use data on a 'largefile://' url"
1492 1491 raise error.ProgrammingError(msg)
1493 1492 lfid = url_[len(_lfscheme):]
1494 1493 return storefactory.getlfile(ui, lfid)
1495 1494 else:
1496 1495 return orig(ui, url_, data=data)
@@ -1,5640 +1,5641 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import difflib
11 11 import errno
12 12 import os
13 13 import re
14 14 import sys
15 15
16 16 from .i18n import _
17 17 from .node import (
18 18 hex,
19 19 nullid,
20 20 nullrev,
21 21 short,
22 22 )
23 23 from . import (
24 24 archival,
25 25 bookmarks,
26 26 bundle2,
27 27 changegroup,
28 28 cmdutil,
29 29 copies,
30 30 debugcommands as debugcommandsmod,
31 31 destutil,
32 32 dirstateguard,
33 33 discovery,
34 34 encoding,
35 35 error,
36 36 exchange,
37 37 extensions,
38 38 formatter,
39 39 graphmod,
40 40 hbisect,
41 41 help,
42 42 hg,
43 43 lock as lockmod,
44 44 logcmdutil,
45 45 merge as mergemod,
46 46 obsolete,
47 47 obsutil,
48 48 patch,
49 49 phases,
50 50 pycompat,
51 51 rcutil,
52 52 registrar,
53 53 revsetlang,
54 54 rewriteutil,
55 55 scmutil,
56 56 server,
57 57 streamclone,
58 58 tags as tagsmod,
59 59 templatekw,
60 60 ui as uimod,
61 61 util,
62 62 wireprotoserver,
63 63 )
64 64 from .utils import (
65 65 dateutil,
66 66 procutil,
67 67 stringutil,
68 68 )
69 69
70 70 release = lockmod.release
71 71
72 72 table = {}
73 73 table.update(debugcommandsmod.command._table)
74 74
75 75 command = registrar.command(table)
76 76 readonly = registrar.command.readonly
77 77
78 78 # common command options
79 79
80 80 globalopts = [
81 81 ('R', 'repository', '',
82 82 _('repository root directory or name of overlay bundle file'),
83 83 _('REPO')),
84 84 ('', 'cwd', '',
85 85 _('change working directory'), _('DIR')),
86 86 ('y', 'noninteractive', None,
87 87 _('do not prompt, automatically pick the first choice for all prompts')),
88 88 ('q', 'quiet', None, _('suppress output')),
89 89 ('v', 'verbose', None, _('enable additional output')),
90 90 ('', 'color', '',
91 91 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
92 92 # and should not be translated
93 93 _("when to colorize (boolean, always, auto, never, or debug)"),
94 94 _('TYPE')),
95 95 ('', 'config', [],
96 96 _('set/override config option (use \'section.name=value\')'),
97 97 _('CONFIG')),
98 98 ('', 'debug', None, _('enable debugging output')),
99 99 ('', 'debugger', None, _('start debugger')),
100 100 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
101 101 _('ENCODE')),
102 102 ('', 'encodingmode', encoding.encodingmode,
103 103 _('set the charset encoding mode'), _('MODE')),
104 104 ('', 'traceback', None, _('always print a traceback on exception')),
105 105 ('', 'time', None, _('time how long the command takes')),
106 106 ('', 'profile', None, _('print command execution profile')),
107 107 ('', 'version', None, _('output version information and exit')),
108 108 ('h', 'help', None, _('display help and exit')),
109 109 ('', 'hidden', False, _('consider hidden changesets')),
110 110 ('', 'pager', 'auto',
111 111 _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
112 112 ]
113 113
114 114 dryrunopts = cmdutil.dryrunopts
115 115 remoteopts = cmdutil.remoteopts
116 116 walkopts = cmdutil.walkopts
117 117 commitopts = cmdutil.commitopts
118 118 commitopts2 = cmdutil.commitopts2
119 119 formatteropts = cmdutil.formatteropts
120 120 templateopts = cmdutil.templateopts
121 121 logopts = cmdutil.logopts
122 122 diffopts = cmdutil.diffopts
123 123 diffwsopts = cmdutil.diffwsopts
124 124 diffopts2 = cmdutil.diffopts2
125 125 mergetoolopts = cmdutil.mergetoolopts
126 126 similarityopts = cmdutil.similarityopts
127 127 subrepoopts = cmdutil.subrepoopts
128 128 debugrevlogopts = cmdutil.debugrevlogopts
129 129
130 130 # Commands start here, listed alphabetically
131 131
132 132 @command('^add',
133 133 walkopts + subrepoopts + dryrunopts,
134 134 _('[OPTION]... [FILE]...'),
135 135 inferrepo=True)
136 136 def add(ui, repo, *pats, **opts):
137 137 """add the specified files on the next commit
138 138
139 139 Schedule files to be version controlled and added to the
140 140 repository.
141 141
142 142 The files will be added to the repository at the next commit. To
143 143 undo an add before that, see :hg:`forget`.
144 144
145 145 If no names are given, add all files to the repository (except
146 146 files matching ``.hgignore``).
147 147
148 148 .. container:: verbose
149 149
150 150 Examples:
151 151
152 152 - New (unknown) files are added
153 153 automatically by :hg:`add`::
154 154
155 155 $ ls
156 156 foo.c
157 157 $ hg status
158 158 ? foo.c
159 159 $ hg add
160 160 adding foo.c
161 161 $ hg status
162 162 A foo.c
163 163
164 164 - Specific files to be added can be specified::
165 165
166 166 $ ls
167 167 bar.c foo.c
168 168 $ hg status
169 169 ? bar.c
170 170 ? foo.c
171 171 $ hg add bar.c
172 172 $ hg status
173 173 A bar.c
174 174 ? foo.c
175 175
176 176 Returns 0 if all files are successfully added.
177 177 """
178 178
179 179 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
180 180 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
181 181 return rejected and 1 or 0
182 182
183 183 @command('addremove',
184 184 similarityopts + subrepoopts + walkopts + dryrunopts,
185 185 _('[OPTION]... [FILE]...'),
186 186 inferrepo=True)
187 187 def addremove(ui, repo, *pats, **opts):
188 188 """add all new files, delete all missing files
189 189
190 190 Add all new files and remove all missing files from the
191 191 repository.
192 192
193 193 Unless names are given, new files are ignored if they match any of
194 194 the patterns in ``.hgignore``. As with add, these changes take
195 195 effect at the next commit.
196 196
197 197 Use the -s/--similarity option to detect renamed files. This
198 198 option takes a percentage between 0 (disabled) and 100 (files must
199 199 be identical) as its parameter. With a parameter greater than 0,
200 200 this compares every removed file with every added file and records
201 201 those similar enough as renames. Detecting renamed files this way
202 202 can be expensive. After using this option, :hg:`status -C` can be
203 203 used to check which files were identified as moved or renamed. If
204 204 not specified, -s/--similarity defaults to 100 and only renames of
205 205 identical files are detected.
206 206
207 207 .. container:: verbose
208 208
209 209 Examples:
210 210
211 211 - A number of files (bar.c and foo.c) are new,
212 212 while foobar.c has been removed (without using :hg:`remove`)
213 213 from the repository::
214 214
215 215 $ ls
216 216 bar.c foo.c
217 217 $ hg status
218 218 ! foobar.c
219 219 ? bar.c
220 220 ? foo.c
221 221 $ hg addremove
222 222 adding bar.c
223 223 adding foo.c
224 224 removing foobar.c
225 225 $ hg status
226 226 A bar.c
227 227 A foo.c
228 228 R foobar.c
229 229
230 230 - A file foobar.c was moved to foo.c without using :hg:`rename`.
231 231 Afterwards, it was edited slightly::
232 232
233 233 $ ls
234 234 foo.c
235 235 $ hg status
236 236 ! foobar.c
237 237 ? foo.c
238 238 $ hg addremove --similarity 90
239 239 removing foobar.c
240 240 adding foo.c
241 241 recording removal of foobar.c as rename to foo.c (94% similar)
242 242 $ hg status -C
243 243 A foo.c
244 244 foobar.c
245 245 R foobar.c
246 246
247 247 Returns 0 if all files are successfully added.
248 248 """
249 249 opts = pycompat.byteskwargs(opts)
250 250 try:
251 251 sim = float(opts.get('similarity') or 100)
252 252 except ValueError:
253 253 raise error.Abort(_('similarity must be a number'))
254 254 if sim < 0 or sim > 100:
255 255 raise error.Abort(_('similarity must be between 0 and 100'))
256 opts['similarity'] = sim / 100.0
256 257 matcher = scmutil.match(repo[None], pats, opts)
257 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
258 return scmutil.addremove(repo, matcher, "", opts)
258 259
259 260 @command('^annotate|blame',
260 261 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
261 262 ('', 'follow', None,
262 263 _('follow copies/renames and list the filename (DEPRECATED)')),
263 264 ('', 'no-follow', None, _("don't follow copies and renames")),
264 265 ('a', 'text', None, _('treat all files as text')),
265 266 ('u', 'user', None, _('list the author (long with -v)')),
266 267 ('f', 'file', None, _('list the filename')),
267 268 ('d', 'date', None, _('list the date (short with -q)')),
268 269 ('n', 'number', None, _('list the revision number (default)')),
269 270 ('c', 'changeset', None, _('list the changeset')),
270 271 ('l', 'line-number', None, _('show line number at the first appearance')),
271 272 ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
272 273 ] + diffwsopts + walkopts + formatteropts,
273 274 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
274 275 inferrepo=True)
275 276 def annotate(ui, repo, *pats, **opts):
276 277 """show changeset information by line for each file
277 278
278 279 List changes in files, showing the revision id responsible for
279 280 each line.
280 281
281 282 This command is useful for discovering when a change was made and
282 283 by whom.
283 284
284 285 If you include --file, --user, or --date, the revision number is
285 286 suppressed unless you also include --number.
286 287
287 288 Without the -a/--text option, annotate will avoid processing files
288 289 it detects as binary. With -a, annotate will annotate the file
289 290 anyway, although the results will probably be neither useful
290 291 nor desirable.
291 292
292 293 Returns 0 on success.
293 294 """
294 295 opts = pycompat.byteskwargs(opts)
295 296 if not pats:
296 297 raise error.Abort(_('at least one filename or pattern is required'))
297 298
298 299 if opts.get('follow'):
299 300 # --follow is deprecated and now just an alias for -f/--file
300 301 # to mimic the behavior of Mercurial before version 1.5
301 302 opts['file'] = True
302 303
303 304 rev = opts.get('rev')
304 305 if rev:
305 306 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
306 307 ctx = scmutil.revsingle(repo, rev)
307 308
308 309 rootfm = ui.formatter('annotate', opts)
309 310 if ui.quiet:
310 311 datefunc = dateutil.shortdate
311 312 else:
312 313 datefunc = dateutil.datestr
313 314 if ctx.rev() is None:
314 315 def hexfn(node):
315 316 if node is None:
316 317 return None
317 318 else:
318 319 return rootfm.hexfunc(node)
319 320 if opts.get('changeset'):
320 321 # omit "+" suffix which is appended to node hex
321 322 def formatrev(rev):
322 323 if rev is None:
323 324 return '%d' % ctx.p1().rev()
324 325 else:
325 326 return '%d' % rev
326 327 else:
327 328 def formatrev(rev):
328 329 if rev is None:
329 330 return '%d+' % ctx.p1().rev()
330 331 else:
331 332 return '%d ' % rev
332 333 def formathex(hex):
333 334 if hex is None:
334 335 return '%s+' % rootfm.hexfunc(ctx.p1().node())
335 336 else:
336 337 return '%s ' % hex
337 338 else:
338 339 hexfn = rootfm.hexfunc
339 340 formatrev = formathex = pycompat.bytestr
340 341
341 342 opmap = [('user', ' ', lambda x: x.fctx.user(), ui.shortuser),
342 343 ('number', ' ', lambda x: x.fctx.rev(), formatrev),
343 344 ('changeset', ' ', lambda x: hexfn(x.fctx.node()), formathex),
344 345 ('date', ' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
345 346 ('file', ' ', lambda x: x.fctx.path(), pycompat.bytestr),
346 347 ('line_number', ':', lambda x: x.lineno, pycompat.bytestr),
347 348 ]
348 349 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
349 350
350 351 if (not opts.get('user') and not opts.get('changeset')
351 352 and not opts.get('date') and not opts.get('file')):
352 353 opts['number'] = True
353 354
354 355 linenumber = opts.get('line_number') is not None
355 356 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
356 357 raise error.Abort(_('at least one of -n/-c is required for -l'))
357 358
358 359 ui.pager('annotate')
359 360
360 361 if rootfm.isplain():
361 362 def makefunc(get, fmt):
362 363 return lambda x: fmt(get(x))
363 364 else:
364 365 def makefunc(get, fmt):
365 366 return get
366 367 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
367 368 if opts.get(op)]
368 369 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
369 370 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
370 371 if opts.get(op))
371 372
372 373 def bad(x, y):
373 374 raise error.Abort("%s: %s" % (x, y))
374 375
375 376 m = scmutil.match(ctx, pats, opts, badfn=bad)
376 377
377 378 follow = not opts.get('no_follow')
378 379 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
379 380 whitespace=True)
380 381 skiprevs = opts.get('skip')
381 382 if skiprevs:
382 383 skiprevs = scmutil.revrange(repo, skiprevs)
383 384
384 385 for abs in ctx.walk(m):
385 386 fctx = ctx[abs]
386 387 rootfm.startitem()
387 388 rootfm.data(abspath=abs, path=m.rel(abs))
388 389 if not opts.get('text') and fctx.isbinary():
389 390 rootfm.plain(_("%s: binary file\n")
390 391 % ((pats and m.rel(abs)) or abs))
391 392 continue
392 393
393 394 fm = rootfm.nested('lines')
394 395 lines = fctx.annotate(follow=follow, skiprevs=skiprevs,
395 396 diffopts=diffopts)
396 397 if not lines:
397 398 fm.end()
398 399 continue
399 400 formats = []
400 401 pieces = []
401 402
402 403 for f, sep in funcmap:
403 404 l = [f(n) for n in lines]
404 405 if fm.isplain():
405 406 sizes = [encoding.colwidth(x) for x in l]
406 407 ml = max(sizes)
407 408 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
408 409 else:
409 410 formats.append(['%s' for x in l])
410 411 pieces.append(l)
411 412
412 413 for f, p, n in zip(zip(*formats), zip(*pieces), lines):
413 414 fm.startitem()
414 415 fm.context(fctx=n.fctx)
415 416 fm.write(fields, "".join(f), *p)
416 417 if n.skip:
417 418 fmt = "* %s"
418 419 else:
419 420 fmt = ": %s"
420 421 fm.write('line', fmt, n.text)
421 422
422 423 if not lines[-1].text.endswith('\n'):
423 424 fm.plain('\n')
424 425 fm.end()
425 426
426 427 rootfm.end()
427 428
428 429 @command('archive',
429 430 [('', 'no-decode', None, _('do not pass files through decoders')),
430 431 ('p', 'prefix', '', _('directory prefix for files in archive'),
431 432 _('PREFIX')),
432 433 ('r', 'rev', '', _('revision to distribute'), _('REV')),
433 434 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
434 435 ] + subrepoopts + walkopts,
435 436 _('[OPTION]... DEST'))
436 437 def archive(ui, repo, dest, **opts):
437 438 '''create an unversioned archive of a repository revision
438 439
439 440 By default, the revision used is the parent of the working
440 441 directory; use -r/--rev to specify a different revision.
441 442
442 443 The archive type is automatically detected based on file
443 444 extension (to override, use -t/--type).
444 445
445 446 .. container:: verbose
446 447
447 448 Examples:
448 449
449 450 - create a zip file containing the 1.0 release::
450 451
451 452 hg archive -r 1.0 project-1.0.zip
452 453
453 454 - create a tarball excluding .hg files::
454 455
455 456 hg archive project.tar.gz -X ".hg*"
456 457
457 458 Valid types are:
458 459
459 460 :``files``: a directory full of files (default)
460 461 :``tar``: tar archive, uncompressed
461 462 :``tbz2``: tar archive, compressed using bzip2
462 463 :``tgz``: tar archive, compressed using gzip
463 464 :``uzip``: zip archive, uncompressed
464 465 :``zip``: zip archive, compressed using deflate
465 466
466 467 The exact name of the destination archive or directory is given
467 468 using a format string; see :hg:`help export` for details.
468 469
469 470 Each member added to an archive file has a directory prefix
470 471 prepended. Use -p/--prefix to specify a format string for the
471 472 prefix. The default is the basename of the archive, with suffixes
472 473 removed.
473 474
474 475 Returns 0 on success.
475 476 '''
476 477
477 478 opts = pycompat.byteskwargs(opts)
478 479 rev = opts.get('rev')
479 480 if rev:
480 481 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
481 482 ctx = scmutil.revsingle(repo, rev)
482 483 if not ctx:
483 484 raise error.Abort(_('no working directory: please specify a revision'))
484 485 node = ctx.node()
485 486 dest = cmdutil.makefilename(ctx, dest)
486 487 if os.path.realpath(dest) == repo.root:
487 488 raise error.Abort(_('repository root cannot be destination'))
488 489
489 490 kind = opts.get('type') or archival.guesskind(dest) or 'files'
490 491 prefix = opts.get('prefix')
491 492
492 493 if dest == '-':
493 494 if kind == 'files':
494 495 raise error.Abort(_('cannot archive plain files to stdout'))
495 496 dest = cmdutil.makefileobj(ctx, dest)
496 497 if not prefix:
497 498 prefix = os.path.basename(repo.root) + '-%h'
498 499
499 500 prefix = cmdutil.makefilename(ctx, prefix)
500 501 match = scmutil.match(ctx, [], opts)
501 502 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
502 503 match, prefix, subrepos=opts.get('subrepos'))
503 504
504 505 @command('backout',
505 506 [('', 'merge', None, _('merge with old dirstate parent after backout')),
506 507 ('', 'commit', None,
507 508 _('commit if no conflicts were encountered (DEPRECATED)')),
508 509 ('', 'no-commit', None, _('do not commit')),
509 510 ('', 'parent', '',
510 511 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
511 512 ('r', 'rev', '', _('revision to backout'), _('REV')),
512 513 ('e', 'edit', False, _('invoke editor on commit messages')),
513 514 ] + mergetoolopts + walkopts + commitopts + commitopts2,
514 515 _('[OPTION]... [-r] REV'))
515 516 def backout(ui, repo, node=None, rev=None, **opts):
516 517 '''reverse effect of earlier changeset
517 518
518 519 Prepare a new changeset with the effect of REV undone in the
519 520 current working directory. If no conflicts were encountered,
520 521 it will be committed immediately.
521 522
522 523 If REV is the parent of the working directory, then this new changeset
523 524 is committed automatically (unless --no-commit is specified).
524 525
525 526 .. note::
526 527
527 528 :hg:`backout` cannot be used to fix either an unwanted or
528 529 incorrect merge.
529 530
530 531 .. container:: verbose
531 532
532 533 Examples:
533 534
534 535 - Reverse the effect of the parent of the working directory.
535 536 This backout will be committed immediately::
536 537
537 538 hg backout -r .
538 539
539 540 - Reverse the effect of previous bad revision 23::
540 541
541 542 hg backout -r 23
542 543
543 544 - Reverse the effect of previous bad revision 23 and
544 545 leave changes uncommitted::
545 546
546 547 hg backout -r 23 --no-commit
547 548 hg commit -m "Backout revision 23"
548 549
549 550 By default, the pending changeset will have one parent,
550 551 maintaining a linear history. With --merge, the pending
551 552 changeset will instead have two parents: the old parent of the
552 553 working directory and a new child of REV that simply undoes REV.
553 554
554 555 Before version 1.7, the behavior without --merge was equivalent
555 556 to specifying --merge followed by :hg:`update --clean .` to
556 557 cancel the merge and leave the child of REV as a head to be
557 558 merged separately.
558 559
559 560 See :hg:`help dates` for a list of formats valid for -d/--date.
560 561
561 562 See :hg:`help revert` for a way to restore files to the state
562 563 of another revision.
563 564
564 565 Returns 0 on success, 1 if nothing to backout or there are unresolved
565 566 files.
566 567 '''
567 568 wlock = lock = None
568 569 try:
569 570 wlock = repo.wlock()
570 571 lock = repo.lock()
571 572 return _dobackout(ui, repo, node, rev, **opts)
572 573 finally:
573 574 release(lock, wlock)
574 575
575 576 def _dobackout(ui, repo, node=None, rev=None, **opts):
576 577 opts = pycompat.byteskwargs(opts)
577 578 if opts.get('commit') and opts.get('no_commit'):
578 579 raise error.Abort(_("cannot use --commit with --no-commit"))
579 580 if opts.get('merge') and opts.get('no_commit'):
580 581 raise error.Abort(_("cannot use --merge with --no-commit"))
581 582
582 583 if rev and node:
583 584 raise error.Abort(_("please specify just one revision"))
584 585
585 586 if not rev:
586 587 rev = node
587 588
588 589 if not rev:
589 590 raise error.Abort(_("please specify a revision to backout"))
590 591
591 592 date = opts.get('date')
592 593 if date:
593 594 opts['date'] = dateutil.parsedate(date)
594 595
595 596 cmdutil.checkunfinished(repo)
596 597 cmdutil.bailifchanged(repo)
597 598 node = scmutil.revsingle(repo, rev).node()
598 599
599 600 op1, op2 = repo.dirstate.parents()
600 601 if not repo.changelog.isancestor(node, op1):
601 602 raise error.Abort(_('cannot backout change that is not an ancestor'))
602 603
603 604 p1, p2 = repo.changelog.parents(node)
604 605 if p1 == nullid:
605 606 raise error.Abort(_('cannot backout a change with no parents'))
606 607 if p2 != nullid:
607 608 if not opts.get('parent'):
608 609 raise error.Abort(_('cannot backout a merge changeset'))
609 610 p = repo.lookup(opts['parent'])
610 611 if p not in (p1, p2):
611 612 raise error.Abort(_('%s is not a parent of %s') %
612 613 (short(p), short(node)))
613 614 parent = p
614 615 else:
615 616 if opts.get('parent'):
616 617 raise error.Abort(_('cannot use --parent on non-merge changeset'))
617 618 parent = p1
618 619
619 620 # the backout should appear on the same branch
620 621 branch = repo.dirstate.branch()
621 622 bheads = repo.branchheads(branch)
622 623 rctx = scmutil.revsingle(repo, hex(parent))
623 624 if not opts.get('merge') and op1 != node:
624 625 dsguard = dirstateguard.dirstateguard(repo, 'backout')
625 626 try:
626 627 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
627 628 'backout')
628 629 stats = mergemod.update(repo, parent, True, True, node, False)
629 630 repo.setparents(op1, op2)
630 631 dsguard.close()
631 632 hg._showstats(repo, stats)
632 633 if stats.unresolvedcount:
633 634 repo.ui.status(_("use 'hg resolve' to retry unresolved "
634 635 "file merges\n"))
635 636 return 1
636 637 finally:
637 638 ui.setconfig('ui', 'forcemerge', '', '')
638 639 lockmod.release(dsguard)
639 640 else:
640 641 hg.clean(repo, node, show_stats=False)
641 642 repo.dirstate.setbranch(branch)
642 643 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
643 644
644 645 if opts.get('no_commit'):
645 646 msg = _("changeset %s backed out, "
646 647 "don't forget to commit.\n")
647 648 ui.status(msg % short(node))
648 649 return 0
649 650
650 651 def commitfunc(ui, repo, message, match, opts):
651 652 editform = 'backout'
652 653 e = cmdutil.getcommiteditor(editform=editform,
653 654 **pycompat.strkwargs(opts))
654 655 if not message:
655 656 # we don't translate commit messages
656 657 message = "Backed out changeset %s" % short(node)
657 658 e = cmdutil.getcommiteditor(edit=True, editform=editform)
658 659 return repo.commit(message, opts.get('user'), opts.get('date'),
659 660 match, editor=e)
660 661 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
661 662 if not newnode:
662 663 ui.status(_("nothing changed\n"))
663 664 return 1
664 665 cmdutil.commitstatus(repo, newnode, branch, bheads)
665 666
666 667 def nice(node):
667 668 return '%d:%s' % (repo.changelog.rev(node), short(node))
668 669 ui.status(_('changeset %s backs out changeset %s\n') %
669 670 (nice(repo.changelog.tip()), nice(node)))
670 671 if opts.get('merge') and op1 != node:
671 672 hg.clean(repo, op1, show_stats=False)
672 673 ui.status(_('merging with changeset %s\n')
673 674 % nice(repo.changelog.tip()))
674 675 try:
675 676 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
676 677 'backout')
677 678 return hg.merge(repo, hex(repo.changelog.tip()))
678 679 finally:
679 680 ui.setconfig('ui', 'forcemerge', '', '')
680 681 return 0
681 682
682 683 @command('bisect',
683 684 [('r', 'reset', False, _('reset bisect state')),
684 685 ('g', 'good', False, _('mark changeset good')),
685 686 ('b', 'bad', False, _('mark changeset bad')),
686 687 ('s', 'skip', False, _('skip testing changeset')),
687 688 ('e', 'extend', False, _('extend the bisect range')),
688 689 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
689 690 ('U', 'noupdate', False, _('do not update to target'))],
690 691 _("[-gbsr] [-U] [-c CMD] [REV]"))
691 692 def bisect(ui, repo, rev=None, extra=None, command=None,
692 693 reset=None, good=None, bad=None, skip=None, extend=None,
693 694 noupdate=None):
694 695 """subdivision search of changesets
695 696
696 697 This command helps to find changesets which introduce problems. To
697 698 use, mark the earliest changeset you know exhibits the problem as
698 699 bad, then mark the latest changeset which is free from the problem
699 700 as good. Bisect will update your working directory to a revision
700 701 for testing (unless the -U/--noupdate option is specified). Once
701 702 you have performed tests, mark the working directory as good or
702 703 bad, and bisect will either update to another candidate changeset
703 704 or announce that it has found the bad revision.
704 705
705 706 As a shortcut, you can also use the revision argument to mark a
706 707 revision as good or bad without checking it out first.
707 708
708 709 If you supply a command, it will be used for automatic bisection.
709 710 The environment variable HG_NODE will contain the ID of the
710 711 changeset being tested. The exit status of the command will be
711 712 used to mark revisions as good or bad: status 0 means good, 125
712 713 means to skip the revision, 127 (command not found) will abort the
713 714 bisection, and any other non-zero exit status means the revision
714 715 is bad.
715 716
716 717 .. container:: verbose
717 718
718 719 Some examples:
719 720
720 721 - start a bisection with known bad revision 34, and good revision 12::
721 722
722 723 hg bisect --bad 34
723 724 hg bisect --good 12
724 725
725 726 - advance the current bisection by marking current revision as good or
726 727 bad::
727 728
728 729 hg bisect --good
729 730 hg bisect --bad
730 731
731 732 - mark the current revision, or a known revision, to be skipped (e.g. if
732 733 that revision is not usable because of another issue)::
733 734
734 735 hg bisect --skip
735 736 hg bisect --skip 23
736 737
737 738 - skip all revisions that do not touch directories ``foo`` or ``bar``::
738 739
739 740 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
740 741
741 742 - forget the current bisection::
742 743
743 744 hg bisect --reset
744 745
745 746 - use 'make && make tests' to automatically find the first broken
746 747 revision::
747 748
748 749 hg bisect --reset
749 750 hg bisect --bad 34
750 751 hg bisect --good 12
751 752 hg bisect --command "make && make tests"
752 753
753 754 - see all changesets whose states are already known in the current
754 755 bisection::
755 756
756 757 hg log -r "bisect(pruned)"
757 758
758 759 - see the changeset currently being bisected (especially useful
759 760 if running with -U/--noupdate)::
760 761
761 762 hg log -r "bisect(current)"
762 763
763 764 - see all changesets that took part in the current bisection::
764 765
765 766 hg log -r "bisect(range)"
766 767
767 768 - you can even get a nice graph::
768 769
769 770 hg log --graph -r "bisect(range)"
770 771
771 772 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
772 773
773 774 Returns 0 on success.
774 775 """
775 776 # backward compatibility
776 777 if rev in "good bad reset init".split():
777 778 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
778 779 cmd, rev, extra = rev, extra, None
779 780 if cmd == "good":
780 781 good = True
781 782 elif cmd == "bad":
782 783 bad = True
783 784 else:
784 785 reset = True
785 786 elif extra:
786 787 raise error.Abort(_('incompatible arguments'))
787 788
788 789 incompatibles = {
789 790 '--bad': bad,
790 791 '--command': bool(command),
791 792 '--extend': extend,
792 793 '--good': good,
793 794 '--reset': reset,
794 795 '--skip': skip,
795 796 }
796 797
797 798 enabled = [x for x in incompatibles if incompatibles[x]]
798 799
799 800 if len(enabled) > 1:
800 801 raise error.Abort(_('%s and %s are incompatible') %
801 802 tuple(sorted(enabled)[0:2]))
802 803
803 804 if reset:
804 805 hbisect.resetstate(repo)
805 806 return
806 807
807 808 state = hbisect.load_state(repo)
808 809
809 810 # update state
810 811 if good or bad or skip:
811 812 if rev:
812 813 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
813 814 else:
814 815 nodes = [repo.lookup('.')]
815 816 if good:
816 817 state['good'] += nodes
817 818 elif bad:
818 819 state['bad'] += nodes
819 820 elif skip:
820 821 state['skip'] += nodes
821 822 hbisect.save_state(repo, state)
822 823 if not (state['good'] and state['bad']):
823 824 return
824 825
825 826 def mayupdate(repo, node, show_stats=True):
826 827 """common used update sequence"""
827 828 if noupdate:
828 829 return
829 830 cmdutil.checkunfinished(repo)
830 831 cmdutil.bailifchanged(repo)
831 832 return hg.clean(repo, node, show_stats=show_stats)
832 833
833 834 displayer = logcmdutil.changesetdisplayer(ui, repo, {})
834 835
835 836 if command:
836 837 changesets = 1
837 838 if noupdate:
838 839 try:
839 840 node = state['current'][0]
840 841 except LookupError:
841 842 raise error.Abort(_('current bisect revision is unknown - '
842 843 'start a new bisect to fix'))
843 844 else:
844 845 node, p2 = repo.dirstate.parents()
845 846 if p2 != nullid:
846 847 raise error.Abort(_('current bisect revision is a merge'))
847 848 if rev:
848 849 node = repo[scmutil.revsingle(repo, rev, node)].node()
849 850 try:
850 851 while changesets:
851 852 # update state
852 853 state['current'] = [node]
853 854 hbisect.save_state(repo, state)
854 855 status = ui.system(command, environ={'HG_NODE': hex(node)},
855 856 blockedtag='bisect_check')
856 857 if status == 125:
857 858 transition = "skip"
858 859 elif status == 0:
859 860 transition = "good"
860 861 # status < 0 means process was killed
861 862 elif status == 127:
862 863 raise error.Abort(_("failed to execute %s") % command)
863 864 elif status < 0:
864 865 raise error.Abort(_("%s killed") % command)
865 866 else:
866 867 transition = "bad"
867 868 state[transition].append(node)
868 869 ctx = repo[node]
869 870 ui.status(_('changeset %d:%s: %s\n') % (ctx.rev(), ctx,
870 871 transition))
871 872 hbisect.checkstate(state)
872 873 # bisect
873 874 nodes, changesets, bgood = hbisect.bisect(repo, state)
874 875 # update to next check
875 876 node = nodes[0]
876 877 mayupdate(repo, node, show_stats=False)
877 878 finally:
878 879 state['current'] = [node]
879 880 hbisect.save_state(repo, state)
880 881 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
881 882 return
882 883
883 884 hbisect.checkstate(state)
884 885
885 886 # actually bisect
886 887 nodes, changesets, good = hbisect.bisect(repo, state)
887 888 if extend:
888 889 if not changesets:
889 890 extendnode = hbisect.extendrange(repo, state, nodes, good)
890 891 if extendnode is not None:
891 892 ui.write(_("Extending search to changeset %d:%s\n")
892 893 % (extendnode.rev(), extendnode))
893 894 state['current'] = [extendnode.node()]
894 895 hbisect.save_state(repo, state)
895 896 return mayupdate(repo, extendnode.node())
896 897 raise error.Abort(_("nothing to extend"))
897 898
898 899 if changesets == 0:
899 900 hbisect.printresult(ui, repo, state, displayer, nodes, good)
900 901 else:
901 902 assert len(nodes) == 1 # only a single node can be tested next
902 903 node = nodes[0]
903 904 # compute the approximate number of remaining tests
904 905 tests, size = 0, 2
905 906 while size <= changesets:
906 907 tests, size = tests + 1, size * 2
907 908 rev = repo.changelog.rev(node)
908 909 ui.write(_("Testing changeset %d:%s "
909 910 "(%d changesets remaining, ~%d tests)\n")
910 911 % (rev, short(node), changesets, tests))
911 912 state['current'] = [node]
912 913 hbisect.save_state(repo, state)
913 914 return mayupdate(repo, node)
914 915
915 916 @command('bookmarks|bookmark',
916 917 [('f', 'force', False, _('force')),
917 918 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
918 919 ('d', 'delete', False, _('delete a given bookmark')),
919 920 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
920 921 ('i', 'inactive', False, _('mark a bookmark inactive')),
921 922 ] + formatteropts,
922 923 _('hg bookmarks [OPTIONS]... [NAME]...'))
923 924 def bookmark(ui, repo, *names, **opts):
924 925 '''create a new bookmark or list existing bookmarks
925 926
926 927 Bookmarks are labels on changesets to help track lines of development.
927 928 Bookmarks are unversioned and can be moved, renamed and deleted.
928 929 Deleting or moving a bookmark has no effect on the associated changesets.
929 930
930 931 Creating or updating to a bookmark causes it to be marked as 'active'.
931 932 The active bookmark is indicated with a '*'.
932 933 When a commit is made, the active bookmark will advance to the new commit.
933 934 A plain :hg:`update` will also advance an active bookmark, if possible.
934 935 Updating away from a bookmark will cause it to be deactivated.
935 936
936 937 Bookmarks can be pushed and pulled between repositories (see
937 938 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
938 939 diverged, a new 'divergent bookmark' of the form 'name@path' will
939 940 be created. Using :hg:`merge` will resolve the divergence.
940 941
941 942 Specifying bookmark as '.' to -m or -d options is equivalent to specifying
942 943 the active bookmark's name.
943 944
944 945 A bookmark named '@' has the special property that :hg:`clone` will
945 946 check it out by default if it exists.
946 947
947 948 .. container:: verbose
948 949
949 950 Examples:
950 951
951 952 - create an active bookmark for a new line of development::
952 953
953 954 hg book new-feature
954 955
955 956 - create an inactive bookmark as a place marker::
956 957
957 958 hg book -i reviewed
958 959
959 960 - create an inactive bookmark on another changeset::
960 961
961 962 hg book -r .^ tested
962 963
963 964 - rename bookmark turkey to dinner::
964 965
965 966 hg book -m turkey dinner
966 967
967 968 - move the '@' bookmark from another branch::
968 969
969 970 hg book -f @
970 971 '''
971 972 force = opts.get(r'force')
972 973 rev = opts.get(r'rev')
973 974 delete = opts.get(r'delete')
974 975 rename = opts.get(r'rename')
975 976 inactive = opts.get(r'inactive')
976 977
977 978 if delete and rename:
978 979 raise error.Abort(_("--delete and --rename are incompatible"))
979 980 if delete and rev:
980 981 raise error.Abort(_("--rev is incompatible with --delete"))
981 982 if rename and rev:
982 983 raise error.Abort(_("--rev is incompatible with --rename"))
983 984 if not names and (delete or rev):
984 985 raise error.Abort(_("bookmark name required"))
985 986
986 987 if delete or rename or names or inactive:
987 988 with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
988 989 if delete:
989 990 names = pycompat.maplist(repo._bookmarks.expandname, names)
990 991 bookmarks.delete(repo, tr, names)
991 992 elif rename:
992 993 if not names:
993 994 raise error.Abort(_("new bookmark name required"))
994 995 elif len(names) > 1:
995 996 raise error.Abort(_("only one new bookmark name allowed"))
996 997 rename = repo._bookmarks.expandname(rename)
997 998 bookmarks.rename(repo, tr, rename, names[0], force, inactive)
998 999 elif names:
999 1000 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
1000 1001 elif inactive:
1001 1002 if len(repo._bookmarks) == 0:
1002 1003 ui.status(_("no bookmarks set\n"))
1003 1004 elif not repo._activebookmark:
1004 1005 ui.status(_("no active bookmark\n"))
1005 1006 else:
1006 1007 bookmarks.deactivate(repo)
1007 1008 else: # show bookmarks
1008 1009 bookmarks.printbookmarks(ui, repo, **opts)
1009 1010
1010 1011 @command('branch',
1011 1012 [('f', 'force', None,
1012 1013 _('set branch name even if it shadows an existing branch')),
1013 1014 ('C', 'clean', None, _('reset branch name to parent branch name')),
1014 1015 ('r', 'rev', [], _('change branches of the given revs (EXPERIMENTAL)')),
1015 1016 ],
1016 1017 _('[-fC] [NAME]'))
1017 1018 def branch(ui, repo, label=None, **opts):
1018 1019 """set or show the current branch name
1019 1020
1020 1021 .. note::
1021 1022
1022 1023 Branch names are permanent and global. Use :hg:`bookmark` to create a
1023 1024 light-weight bookmark instead. See :hg:`help glossary` for more
1024 1025 information about named branches and bookmarks.
1025 1026
1026 1027 With no argument, show the current branch name. With one argument,
1027 1028 set the working directory branch name (the branch will not exist
1028 1029 in the repository until the next commit). Standard practice
1029 1030 recommends that primary development take place on the 'default'
1030 1031 branch.
1031 1032
1032 1033 Unless -f/--force is specified, branch will not let you set a
1033 1034 branch name that already exists.
1034 1035
1035 1036 Use -C/--clean to reset the working directory branch to that of
1036 1037 the parent of the working directory, negating a previous branch
1037 1038 change.
1038 1039
1039 1040 Use the command :hg:`update` to switch to an existing branch. Use
1040 1041 :hg:`commit --close-branch` to mark this branch head as closed.
1041 1042 When all heads of a branch are closed, the branch will be
1042 1043 considered closed.
1043 1044
1044 1045 Returns 0 on success.
1045 1046 """
1046 1047 opts = pycompat.byteskwargs(opts)
1047 1048 revs = opts.get('rev')
1048 1049 if label:
1049 1050 label = label.strip()
1050 1051
1051 1052 if not opts.get('clean') and not label:
1052 1053 if revs:
1053 1054 raise error.Abort(_("no branch name specified for the revisions"))
1054 1055 ui.write("%s\n" % repo.dirstate.branch())
1055 1056 return
1056 1057
1057 1058 with repo.wlock():
1058 1059 if opts.get('clean'):
1059 1060 label = repo[None].p1().branch()
1060 1061 repo.dirstate.setbranch(label)
1061 1062 ui.status(_('reset working directory to branch %s\n') % label)
1062 1063 elif label:
1063 1064
1064 1065 scmutil.checknewlabel(repo, label, 'branch')
1065 1066 if revs:
1066 1067 return cmdutil.changebranch(ui, repo, revs, label)
1067 1068
1068 1069 if not opts.get('force') and label in repo.branchmap():
1069 1070 if label not in [p.branch() for p in repo[None].parents()]:
1070 1071 raise error.Abort(_('a branch of the same name already'
1071 1072 ' exists'),
1072 1073 # i18n: "it" refers to an existing branch
1073 1074 hint=_("use 'hg update' to switch to it"))
1074 1075
1075 1076 repo.dirstate.setbranch(label)
1076 1077 ui.status(_('marked working directory as branch %s\n') % label)
1077 1078
1078 1079 # find any open named branches aside from default
1079 1080 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1080 1081 if n != "default" and not c]
1081 1082 if not others:
1082 1083 ui.status(_('(branches are permanent and global, '
1083 1084 'did you want a bookmark?)\n'))
1084 1085
1085 1086 @command('branches',
1086 1087 [('a', 'active', False,
1087 1088 _('show only branches that have unmerged heads (DEPRECATED)')),
1088 1089 ('c', 'closed', False, _('show normal and closed branches')),
1089 1090 ] + formatteropts,
1090 1091 _('[-c]'), cmdtype=readonly)
1091 1092 def branches(ui, repo, active=False, closed=False, **opts):
1092 1093 """list repository named branches
1093 1094
1094 1095 List the repository's named branches, indicating which ones are
1095 1096 inactive. If -c/--closed is specified, also list branches which have
1096 1097 been marked closed (see :hg:`commit --close-branch`).
1097 1098
1098 1099 Use the command :hg:`update` to switch to an existing branch.
1099 1100
1100 1101 Returns 0.
1101 1102 """
1102 1103
1103 1104 opts = pycompat.byteskwargs(opts)
1104 1105 ui.pager('branches')
1105 1106 fm = ui.formatter('branches', opts)
1106 1107 hexfunc = fm.hexfunc
1107 1108
1108 1109 allheads = set(repo.heads())
1109 1110 branches = []
1110 1111 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1111 1112 isactive = False
1112 1113 if not isclosed:
1113 1114 openheads = set(repo.branchmap().iteropen(heads))
1114 1115 isactive = bool(openheads & allheads)
1115 1116 branches.append((tag, repo[tip], isactive, not isclosed))
1116 1117 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1117 1118 reverse=True)
1118 1119
1119 1120 for tag, ctx, isactive, isopen in branches:
1120 1121 if active and not isactive:
1121 1122 continue
1122 1123 if isactive:
1123 1124 label = 'branches.active'
1124 1125 notice = ''
1125 1126 elif not isopen:
1126 1127 if not closed:
1127 1128 continue
1128 1129 label = 'branches.closed'
1129 1130 notice = _(' (closed)')
1130 1131 else:
1131 1132 label = 'branches.inactive'
1132 1133 notice = _(' (inactive)')
1133 1134 current = (tag == repo.dirstate.branch())
1134 1135 if current:
1135 1136 label = 'branches.current'
1136 1137
1137 1138 fm.startitem()
1138 1139 fm.write('branch', '%s', tag, label=label)
1139 1140 rev = ctx.rev()
1140 1141 padsize = max(31 - len("%d" % rev) - encoding.colwidth(tag), 0)
1141 1142 fmt = ' ' * padsize + ' %d:%s'
1142 1143 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1143 1144 label='log.changeset changeset.%s' % ctx.phasestr())
1144 1145 fm.context(ctx=ctx)
1145 1146 fm.data(active=isactive, closed=not isopen, current=current)
1146 1147 if not ui.quiet:
1147 1148 fm.plain(notice)
1148 1149 fm.plain('\n')
1149 1150 fm.end()
1150 1151
1151 1152 @command('bundle',
1152 1153 [('f', 'force', None, _('run even when the destination is unrelated')),
1153 1154 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1154 1155 _('REV')),
1155 1156 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1156 1157 _('BRANCH')),
1157 1158 ('', 'base', [],
1158 1159 _('a base changeset assumed to be available at the destination'),
1159 1160 _('REV')),
1160 1161 ('a', 'all', None, _('bundle all changesets in the repository')),
1161 1162 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1162 1163 ] + remoteopts,
1163 1164 _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1164 1165 def bundle(ui, repo, fname, dest=None, **opts):
1165 1166 """create a bundle file
1166 1167
1167 1168 Generate a bundle file containing data to be transferred to another
1168 1169 repository.
1169 1170
1170 1171 To create a bundle containing all changesets, use -a/--all
1171 1172 (or --base null). Otherwise, hg assumes the destination will have
1172 1173 all the nodes you specify with --base parameters. Otherwise, hg
1173 1174 will assume the repository has all the nodes in destination, or
1174 1175 default-push/default if no destination is specified, where destination
1175 1176 is the repository you provide through DEST option.
1176 1177
1177 1178 You can change bundle format with the -t/--type option. See
1178 1179 :hg:`help bundlespec` for documentation on this format. By default,
1179 1180 the most appropriate format is used and compression defaults to
1180 1181 bzip2.
1181 1182
1182 1183 The bundle file can then be transferred using conventional means
1183 1184 and applied to another repository with the unbundle or pull
1184 1185 command. This is useful when direct push and pull are not
1185 1186 available or when exporting an entire repository is undesirable.
1186 1187
1187 1188 Applying bundles preserves all changeset contents including
1188 1189 permissions, copy/rename information, and revision history.
1189 1190
1190 1191 Returns 0 on success, 1 if no changes found.
1191 1192 """
1192 1193 opts = pycompat.byteskwargs(opts)
1193 1194 revs = None
1194 1195 if 'rev' in opts:
1195 1196 revstrings = opts['rev']
1196 1197 revs = scmutil.revrange(repo, revstrings)
1197 1198 if revstrings and not revs:
1198 1199 raise error.Abort(_('no commits to bundle'))
1199 1200
1200 1201 bundletype = opts.get('type', 'bzip2').lower()
1201 1202 try:
1202 1203 bundlespec = exchange.parsebundlespec(repo, bundletype, strict=False)
1203 1204 except error.UnsupportedBundleSpecification as e:
1204 1205 raise error.Abort(pycompat.bytestr(e),
1205 1206 hint=_("see 'hg help bundlespec' for supported "
1206 1207 "values for --type"))
1207 1208 cgversion = bundlespec.contentopts["cg.version"]
1208 1209
1209 1210 # Packed bundles are a pseudo bundle format for now.
1210 1211 if cgversion == 's1':
1211 1212 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1212 1213 hint=_("use 'hg debugcreatestreamclonebundle'"))
1213 1214
1214 1215 if opts.get('all'):
1215 1216 if dest:
1216 1217 raise error.Abort(_("--all is incompatible with specifying "
1217 1218 "a destination"))
1218 1219 if opts.get('base'):
1219 1220 ui.warn(_("ignoring --base because --all was specified\n"))
1220 1221 base = ['null']
1221 1222 else:
1222 1223 base = scmutil.revrange(repo, opts.get('base'))
1223 1224 if cgversion not in changegroup.supportedoutgoingversions(repo):
1224 1225 raise error.Abort(_("repository does not support bundle version %s") %
1225 1226 cgversion)
1226 1227
1227 1228 if base:
1228 1229 if dest:
1229 1230 raise error.Abort(_("--base is incompatible with specifying "
1230 1231 "a destination"))
1231 1232 common = [repo.lookup(rev) for rev in base]
1232 1233 heads = [repo.lookup(r) for r in revs] if revs else None
1233 1234 outgoing = discovery.outgoing(repo, common, heads)
1234 1235 else:
1235 1236 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1236 1237 dest, branches = hg.parseurl(dest, opts.get('branch'))
1237 1238 other = hg.peer(repo, opts, dest)
1238 1239 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1239 1240 heads = revs and map(repo.lookup, revs) or revs
1240 1241 outgoing = discovery.findcommonoutgoing(repo, other,
1241 1242 onlyheads=heads,
1242 1243 force=opts.get('force'),
1243 1244 portable=True)
1244 1245
1245 1246 if not outgoing.missing:
1246 1247 scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
1247 1248 return 1
1248 1249
1249 1250 bcompression = bundlespec.compression
1250 1251 if cgversion == '01': #bundle1
1251 1252 if bcompression is None:
1252 1253 bcompression = 'UN'
1253 1254 bversion = 'HG10' + bcompression
1254 1255 bcompression = None
1255 1256 elif cgversion in ('02', '03'):
1256 1257 bversion = 'HG20'
1257 1258 else:
1258 1259 raise error.ProgrammingError(
1259 1260 'bundle: unexpected changegroup version %s' % cgversion)
1260 1261
1261 1262 # TODO compression options should be derived from bundlespec parsing.
1262 1263 # This is a temporary hack to allow adjusting bundle compression
1263 1264 # level without a) formalizing the bundlespec changes to declare it
1264 1265 # b) introducing a command flag.
1265 1266 compopts = {}
1266 1267 complevel = ui.configint('experimental', 'bundlecomplevel')
1267 1268 if complevel is not None:
1268 1269 compopts['level'] = complevel
1269 1270
1270 1271 # Allow overriding the bundling of obsmarker in phases through
1271 1272 # configuration while we don't have a bundle version that include them
1272 1273 if repo.ui.configbool('experimental', 'evolution.bundle-obsmarker'):
1273 1274 bundlespec.contentopts['obsolescence'] = True
1274 1275 if repo.ui.configbool('experimental', 'bundle-phases'):
1275 1276 bundlespec.contentopts['phases'] = True
1276 1277
1277 1278 bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
1278 1279 bundlespec.contentopts, compression=bcompression,
1279 1280 compopts=compopts)
1280 1281
1281 1282 @command('cat',
1282 1283 [('o', 'output', '',
1283 1284 _('print output to file with formatted name'), _('FORMAT')),
1284 1285 ('r', 'rev', '', _('print the given revision'), _('REV')),
1285 1286 ('', 'decode', None, _('apply any matching decode filter')),
1286 1287 ] + walkopts + formatteropts,
1287 1288 _('[OPTION]... FILE...'),
1288 1289 inferrepo=True, cmdtype=readonly)
1289 1290 def cat(ui, repo, file1, *pats, **opts):
1290 1291 """output the current or given revision of files
1291 1292
1292 1293 Print the specified files as they were at the given revision. If
1293 1294 no revision is given, the parent of the working directory is used.
1294 1295
1295 1296 Output may be to a file, in which case the name of the file is
1296 1297 given using a template string. See :hg:`help templates`. In addition
1297 1298 to the common template keywords, the following formatting rules are
1298 1299 supported:
1299 1300
1300 1301 :``%%``: literal "%" character
1301 1302 :``%s``: basename of file being printed
1302 1303 :``%d``: dirname of file being printed, or '.' if in repository root
1303 1304 :``%p``: root-relative path name of file being printed
1304 1305 :``%H``: changeset hash (40 hexadecimal digits)
1305 1306 :``%R``: changeset revision number
1306 1307 :``%h``: short-form changeset hash (12 hexadecimal digits)
1307 1308 :``%r``: zero-padded changeset revision number
1308 1309 :``%b``: basename of the exporting repository
1309 1310 :``\\``: literal "\\" character
1310 1311
1311 1312 Returns 0 on success.
1312 1313 """
1313 1314 opts = pycompat.byteskwargs(opts)
1314 1315 rev = opts.get('rev')
1315 1316 if rev:
1316 1317 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
1317 1318 ctx = scmutil.revsingle(repo, rev)
1318 1319 m = scmutil.match(ctx, (file1,) + pats, opts)
1319 1320 fntemplate = opts.pop('output', '')
1320 1321 if cmdutil.isstdiofilename(fntemplate):
1321 1322 fntemplate = ''
1322 1323
1323 1324 if fntemplate:
1324 1325 fm = formatter.nullformatter(ui, 'cat')
1325 1326 else:
1326 1327 ui.pager('cat')
1327 1328 fm = ui.formatter('cat', opts)
1328 1329 with fm:
1329 1330 return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '',
1330 1331 **pycompat.strkwargs(opts))
1331 1332
1332 1333 @command('^clone',
1333 1334 [('U', 'noupdate', None, _('the clone will include an empty working '
1334 1335 'directory (only a repository)')),
1335 1336 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1336 1337 _('REV')),
1337 1338 ('r', 'rev', [], _('do not clone everything, but include this changeset'
1338 1339 ' and its ancestors'), _('REV')),
1339 1340 ('b', 'branch', [], _('do not clone everything, but include this branch\'s'
1340 1341 ' changesets and their ancestors'), _('BRANCH')),
1341 1342 ('', 'pull', None, _('use pull protocol to copy metadata')),
1342 1343 ('', 'uncompressed', None,
1343 1344 _('an alias to --stream (DEPRECATED)')),
1344 1345 ('', 'stream', None,
1345 1346 _('clone with minimal data processing')),
1346 1347 ] + remoteopts,
1347 1348 _('[OPTION]... SOURCE [DEST]'),
1348 1349 norepo=True)
1349 1350 def clone(ui, source, dest=None, **opts):
1350 1351 """make a copy of an existing repository
1351 1352
1352 1353 Create a copy of an existing repository in a new directory.
1353 1354
1354 1355 If no destination directory name is specified, it defaults to the
1355 1356 basename of the source.
1356 1357
1357 1358 The location of the source is added to the new repository's
1358 1359 ``.hg/hgrc`` file, as the default to be used for future pulls.
1359 1360
1360 1361 Only local paths and ``ssh://`` URLs are supported as
1361 1362 destinations. For ``ssh://`` destinations, no working directory or
1362 1363 ``.hg/hgrc`` will be created on the remote side.
1363 1364
1364 1365 If the source repository has a bookmark called '@' set, that
1365 1366 revision will be checked out in the new repository by default.
1366 1367
1367 1368 To check out a particular version, use -u/--update, or
1368 1369 -U/--noupdate to create a clone with no working directory.
1369 1370
1370 1371 To pull only a subset of changesets, specify one or more revisions
1371 1372 identifiers with -r/--rev or branches with -b/--branch. The
1372 1373 resulting clone will contain only the specified changesets and
1373 1374 their ancestors. These options (or 'clone src#rev dest') imply
1374 1375 --pull, even for local source repositories.
1375 1376
1376 1377 In normal clone mode, the remote normalizes repository data into a common
1377 1378 exchange format and the receiving end translates this data into its local
1378 1379 storage format. --stream activates a different clone mode that essentially
1379 1380 copies repository files from the remote with minimal data processing. This
1380 1381 significantly reduces the CPU cost of a clone both remotely and locally.
1381 1382 However, it often increases the transferred data size by 30-40%. This can
1382 1383 result in substantially faster clones where I/O throughput is plentiful,
1383 1384 especially for larger repositories. A side-effect of --stream clones is
1384 1385 that storage settings and requirements on the remote are applied locally:
1385 1386 a modern client may inherit legacy or inefficient storage used by the
1386 1387 remote or a legacy Mercurial client may not be able to clone from a
1387 1388 modern Mercurial remote.
1388 1389
1389 1390 .. note::
1390 1391
1391 1392 Specifying a tag will include the tagged changeset but not the
1392 1393 changeset containing the tag.
1393 1394
1394 1395 .. container:: verbose
1395 1396
1396 1397 For efficiency, hardlinks are used for cloning whenever the
1397 1398 source and destination are on the same filesystem (note this
1398 1399 applies only to the repository data, not to the working
1399 1400 directory). Some filesystems, such as AFS, implement hardlinking
1400 1401 incorrectly, but do not report errors. In these cases, use the
1401 1402 --pull option to avoid hardlinking.
1402 1403
1403 1404 Mercurial will update the working directory to the first applicable
1404 1405 revision from this list:
1405 1406
1406 1407 a) null if -U or the source repository has no changesets
1407 1408 b) if -u . and the source repository is local, the first parent of
1408 1409 the source repository's working directory
1409 1410 c) the changeset specified with -u (if a branch name, this means the
1410 1411 latest head of that branch)
1411 1412 d) the changeset specified with -r
1412 1413 e) the tipmost head specified with -b
1413 1414 f) the tipmost head specified with the url#branch source syntax
1414 1415 g) the revision marked with the '@' bookmark, if present
1415 1416 h) the tipmost head of the default branch
1416 1417 i) tip
1417 1418
1418 1419 When cloning from servers that support it, Mercurial may fetch
1419 1420 pre-generated data from a server-advertised URL. When this is done,
1420 1421 hooks operating on incoming changesets and changegroups may fire twice,
1421 1422 once for the bundle fetched from the URL and another for any additional
1422 1423 data not fetched from this URL. In addition, if an error occurs, the
1423 1424 repository may be rolled back to a partial clone. This behavior may
1424 1425 change in future releases. See :hg:`help -e clonebundles` for more.
1425 1426
1426 1427 Examples:
1427 1428
1428 1429 - clone a remote repository to a new directory named hg/::
1429 1430
1430 1431 hg clone https://www.mercurial-scm.org/repo/hg/
1431 1432
1432 1433 - create a lightweight local clone::
1433 1434
1434 1435 hg clone project/ project-feature/
1435 1436
1436 1437 - clone from an absolute path on an ssh server (note double-slash)::
1437 1438
1438 1439 hg clone ssh://user@server//home/projects/alpha/
1439 1440
1440 1441 - do a streaming clone while checking out a specified version::
1441 1442
1442 1443 hg clone --stream http://server/repo -u 1.5
1443 1444
1444 1445 - create a repository without changesets after a particular revision::
1445 1446
1446 1447 hg clone -r 04e544 experimental/ good/
1447 1448
1448 1449 - clone (and track) a particular named branch::
1449 1450
1450 1451 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1451 1452
1452 1453 See :hg:`help urls` for details on specifying URLs.
1453 1454
1454 1455 Returns 0 on success.
1455 1456 """
1456 1457 opts = pycompat.byteskwargs(opts)
1457 1458 if opts.get('noupdate') and opts.get('updaterev'):
1458 1459 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1459 1460
1460 1461 r = hg.clone(ui, opts, source, dest,
1461 1462 pull=opts.get('pull'),
1462 1463 stream=opts.get('stream') or opts.get('uncompressed'),
1463 1464 revs=opts.get('rev'),
1464 1465 update=opts.get('updaterev') or not opts.get('noupdate'),
1465 1466 branch=opts.get('branch'),
1466 1467 shareopts=opts.get('shareopts'))
1467 1468
1468 1469 return r is None
1469 1470
1470 1471 @command('^commit|ci',
1471 1472 [('A', 'addremove', None,
1472 1473 _('mark new/missing files as added/removed before committing')),
1473 1474 ('', 'close-branch', None,
1474 1475 _('mark a branch head as closed')),
1475 1476 ('', 'amend', None, _('amend the parent of the working directory')),
1476 1477 ('s', 'secret', None, _('use the secret phase for committing')),
1477 1478 ('e', 'edit', None, _('invoke editor on commit messages')),
1478 1479 ('i', 'interactive', None, _('use interactive mode')),
1479 1480 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1480 1481 _('[OPTION]... [FILE]...'),
1481 1482 inferrepo=True)
1482 1483 def commit(ui, repo, *pats, **opts):
1483 1484 """commit the specified files or all outstanding changes
1484 1485
1485 1486 Commit changes to the given files into the repository. Unlike a
1486 1487 centralized SCM, this operation is a local operation. See
1487 1488 :hg:`push` for a way to actively distribute your changes.
1488 1489
1489 1490 If a list of files is omitted, all changes reported by :hg:`status`
1490 1491 will be committed.
1491 1492
1492 1493 If you are committing the result of a merge, do not provide any
1493 1494 filenames or -I/-X filters.
1494 1495
1495 1496 If no commit message is specified, Mercurial starts your
1496 1497 configured editor where you can enter a message. In case your
1497 1498 commit fails, you will find a backup of your message in
1498 1499 ``.hg/last-message.txt``.
1499 1500
1500 1501 The --close-branch flag can be used to mark the current branch
1501 1502 head closed. When all heads of a branch are closed, the branch
1502 1503 will be considered closed and no longer listed.
1503 1504
1504 1505 The --amend flag can be used to amend the parent of the
1505 1506 working directory with a new commit that contains the changes
1506 1507 in the parent in addition to those currently reported by :hg:`status`,
1507 1508 if there are any. The old commit is stored in a backup bundle in
1508 1509 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1509 1510 on how to restore it).
1510 1511
1511 1512 Message, user and date are taken from the amended commit unless
1512 1513 specified. When a message isn't specified on the command line,
1513 1514 the editor will open with the message of the amended commit.
1514 1515
1515 1516 It is not possible to amend public changesets (see :hg:`help phases`)
1516 1517 or changesets that have children.
1517 1518
1518 1519 See :hg:`help dates` for a list of formats valid for -d/--date.
1519 1520
1520 1521 Returns 0 on success, 1 if nothing changed.
1521 1522
1522 1523 .. container:: verbose
1523 1524
1524 1525 Examples:
1525 1526
1526 1527 - commit all files ending in .py::
1527 1528
1528 1529 hg commit --include "set:**.py"
1529 1530
1530 1531 - commit all non-binary files::
1531 1532
1532 1533 hg commit --exclude "set:binary()"
1533 1534
1534 1535 - amend the current commit and set the date to now::
1535 1536
1536 1537 hg commit --amend --date now
1537 1538 """
1538 1539 wlock = lock = None
1539 1540 try:
1540 1541 wlock = repo.wlock()
1541 1542 lock = repo.lock()
1542 1543 return _docommit(ui, repo, *pats, **opts)
1543 1544 finally:
1544 1545 release(lock, wlock)
1545 1546
1546 1547 def _docommit(ui, repo, *pats, **opts):
1547 1548 if opts.get(r'interactive'):
1548 1549 opts.pop(r'interactive')
1549 1550 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1550 1551 cmdutil.recordfilter, *pats,
1551 1552 **opts)
1552 1553 # ret can be 0 (no changes to record) or the value returned by
1553 1554 # commit(), 1 if nothing changed or None on success.
1554 1555 return 1 if ret == 0 else ret
1555 1556
1556 1557 opts = pycompat.byteskwargs(opts)
1557 1558 if opts.get('subrepos'):
1558 1559 if opts.get('amend'):
1559 1560 raise error.Abort(_('cannot amend with --subrepos'))
1560 1561 # Let --subrepos on the command line override config setting.
1561 1562 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1562 1563
1563 1564 cmdutil.checkunfinished(repo, commit=True)
1564 1565
1565 1566 branch = repo[None].branch()
1566 1567 bheads = repo.branchheads(branch)
1567 1568
1568 1569 extra = {}
1569 1570 if opts.get('close_branch'):
1570 1571 extra['close'] = '1'
1571 1572
1572 1573 if not bheads:
1573 1574 raise error.Abort(_('can only close branch heads'))
1574 1575 elif opts.get('amend'):
1575 1576 if repo[None].parents()[0].p1().branch() != branch and \
1576 1577 repo[None].parents()[0].p2().branch() != branch:
1577 1578 raise error.Abort(_('can only close branch heads'))
1578 1579
1579 1580 if opts.get('amend'):
1580 1581 if ui.configbool('ui', 'commitsubrepos'):
1581 1582 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1582 1583
1583 1584 old = repo['.']
1584 1585 rewriteutil.precheck(repo, [old.rev()], 'amend')
1585 1586
1586 1587 # Currently histedit gets confused if an amend happens while histedit
1587 1588 # is in progress. Since we have a checkunfinished command, we are
1588 1589 # temporarily honoring it.
1589 1590 #
1590 1591 # Note: eventually this guard will be removed. Please do not expect
1591 1592 # this behavior to remain.
1592 1593 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1593 1594 cmdutil.checkunfinished(repo)
1594 1595
1595 1596 node = cmdutil.amend(ui, repo, old, extra, pats, opts)
1596 1597 if node == old.node():
1597 1598 ui.status(_("nothing changed\n"))
1598 1599 return 1
1599 1600 else:
1600 1601 def commitfunc(ui, repo, message, match, opts):
1601 1602 overrides = {}
1602 1603 if opts.get('secret'):
1603 1604 overrides[('phases', 'new-commit')] = 'secret'
1604 1605
1605 1606 baseui = repo.baseui
1606 1607 with baseui.configoverride(overrides, 'commit'):
1607 1608 with ui.configoverride(overrides, 'commit'):
1608 1609 editform = cmdutil.mergeeditform(repo[None],
1609 1610 'commit.normal')
1610 1611 editor = cmdutil.getcommiteditor(
1611 1612 editform=editform, **pycompat.strkwargs(opts))
1612 1613 return repo.commit(message,
1613 1614 opts.get('user'),
1614 1615 opts.get('date'),
1615 1616 match,
1616 1617 editor=editor,
1617 1618 extra=extra)
1618 1619
1619 1620 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1620 1621
1621 1622 if not node:
1622 1623 stat = cmdutil.postcommitstatus(repo, pats, opts)
1623 1624 if stat[3]:
1624 1625 ui.status(_("nothing changed (%d missing files, see "
1625 1626 "'hg status')\n") % len(stat[3]))
1626 1627 else:
1627 1628 ui.status(_("nothing changed\n"))
1628 1629 return 1
1629 1630
1630 1631 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1631 1632
1632 1633 @command('config|showconfig|debugconfig',
1633 1634 [('u', 'untrusted', None, _('show untrusted configuration options')),
1634 1635 ('e', 'edit', None, _('edit user config')),
1635 1636 ('l', 'local', None, _('edit repository config')),
1636 1637 ('g', 'global', None, _('edit global config'))] + formatteropts,
1637 1638 _('[-u] [NAME]...'),
1638 1639 optionalrepo=True, cmdtype=readonly)
1639 1640 def config(ui, repo, *values, **opts):
1640 1641 """show combined config settings from all hgrc files
1641 1642
1642 1643 With no arguments, print names and values of all config items.
1643 1644
1644 1645 With one argument of the form section.name, print just the value
1645 1646 of that config item.
1646 1647
1647 1648 With multiple arguments, print names and values of all config
1648 1649 items with matching section names or section.names.
1649 1650
1650 1651 With --edit, start an editor on the user-level config file. With
1651 1652 --global, edit the system-wide config file. With --local, edit the
1652 1653 repository-level config file.
1653 1654
1654 1655 With --debug, the source (filename and line number) is printed
1655 1656 for each config item.
1656 1657
1657 1658 See :hg:`help config` for more information about config files.
1658 1659
1659 1660 Returns 0 on success, 1 if NAME does not exist.
1660 1661
1661 1662 """
1662 1663
1663 1664 opts = pycompat.byteskwargs(opts)
1664 1665 if opts.get('edit') or opts.get('local') or opts.get('global'):
1665 1666 if opts.get('local') and opts.get('global'):
1666 1667 raise error.Abort(_("can't use --local and --global together"))
1667 1668
1668 1669 if opts.get('local'):
1669 1670 if not repo:
1670 1671 raise error.Abort(_("can't use --local outside a repository"))
1671 1672 paths = [repo.vfs.join('hgrc')]
1672 1673 elif opts.get('global'):
1673 1674 paths = rcutil.systemrcpath()
1674 1675 else:
1675 1676 paths = rcutil.userrcpath()
1676 1677
1677 1678 for f in paths:
1678 1679 if os.path.exists(f):
1679 1680 break
1680 1681 else:
1681 1682 if opts.get('global'):
1682 1683 samplehgrc = uimod.samplehgrcs['global']
1683 1684 elif opts.get('local'):
1684 1685 samplehgrc = uimod.samplehgrcs['local']
1685 1686 else:
1686 1687 samplehgrc = uimod.samplehgrcs['user']
1687 1688
1688 1689 f = paths[0]
1689 1690 fp = open(f, "wb")
1690 1691 fp.write(util.tonativeeol(samplehgrc))
1691 1692 fp.close()
1692 1693
1693 1694 editor = ui.geteditor()
1694 1695 ui.system("%s \"%s\"" % (editor, f),
1695 1696 onerr=error.Abort, errprefix=_("edit failed"),
1696 1697 blockedtag='config_edit')
1697 1698 return
1698 1699 ui.pager('config')
1699 1700 fm = ui.formatter('config', opts)
1700 1701 for t, f in rcutil.rccomponents():
1701 1702 if t == 'path':
1702 1703 ui.debug('read config from: %s\n' % f)
1703 1704 elif t == 'items':
1704 1705 for section, name, value, source in f:
1705 1706 ui.debug('set config by: %s\n' % source)
1706 1707 else:
1707 1708 raise error.ProgrammingError('unknown rctype: %s' % t)
1708 1709 untrusted = bool(opts.get('untrusted'))
1709 1710
1710 1711 selsections = selentries = []
1711 1712 if values:
1712 1713 selsections = [v for v in values if '.' not in v]
1713 1714 selentries = [v for v in values if '.' in v]
1714 1715 uniquesel = (len(selentries) == 1 and not selsections)
1715 1716 selsections = set(selsections)
1716 1717 selentries = set(selentries)
1717 1718
1718 1719 matched = False
1719 1720 for section, name, value in ui.walkconfig(untrusted=untrusted):
1720 1721 source = ui.configsource(section, name, untrusted)
1721 1722 value = pycompat.bytestr(value)
1722 1723 if fm.isplain():
1723 1724 source = source or 'none'
1724 1725 value = value.replace('\n', '\\n')
1725 1726 entryname = section + '.' + name
1726 1727 if values and not (section in selsections or entryname in selentries):
1727 1728 continue
1728 1729 fm.startitem()
1729 1730 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1730 1731 if uniquesel:
1731 1732 fm.data(name=entryname)
1732 1733 fm.write('value', '%s\n', value)
1733 1734 else:
1734 1735 fm.write('name value', '%s=%s\n', entryname, value)
1735 1736 matched = True
1736 1737 fm.end()
1737 1738 if matched:
1738 1739 return 0
1739 1740 return 1
1740 1741
1741 1742 @command('copy|cp',
1742 1743 [('A', 'after', None, _('record a copy that has already occurred')),
1743 1744 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1744 1745 ] + walkopts + dryrunopts,
1745 1746 _('[OPTION]... [SOURCE]... DEST'))
1746 1747 def copy(ui, repo, *pats, **opts):
1747 1748 """mark files as copied for the next commit
1748 1749
1749 1750 Mark dest as having copies of source files. If dest is a
1750 1751 directory, copies are put in that directory. If dest is a file,
1751 1752 the source must be a single file.
1752 1753
1753 1754 By default, this command copies the contents of files as they
1754 1755 exist in the working directory. If invoked with -A/--after, the
1755 1756 operation is recorded, but no copying is performed.
1756 1757
1757 1758 This command takes effect with the next commit. To undo a copy
1758 1759 before that, see :hg:`revert`.
1759 1760
1760 1761 Returns 0 on success, 1 if errors are encountered.
1761 1762 """
1762 1763 opts = pycompat.byteskwargs(opts)
1763 1764 with repo.wlock(False):
1764 1765 return cmdutil.copy(ui, repo, pats, opts)
1765 1766
1766 1767 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1767 1768 def debugcommands(ui, cmd='', *args):
1768 1769 """list all available commands and options"""
1769 1770 for cmd, vals in sorted(table.iteritems()):
1770 1771 cmd = cmd.split('|')[0].strip('^')
1771 1772 opts = ', '.join([i[1] for i in vals[1]])
1772 1773 ui.write('%s: %s\n' % (cmd, opts))
1773 1774
1774 1775 @command('debugcomplete',
1775 1776 [('o', 'options', None, _('show the command options'))],
1776 1777 _('[-o] CMD'),
1777 1778 norepo=True)
1778 1779 def debugcomplete(ui, cmd='', **opts):
1779 1780 """returns the completion list associated with the given command"""
1780 1781
1781 1782 if opts.get(r'options'):
1782 1783 options = []
1783 1784 otables = [globalopts]
1784 1785 if cmd:
1785 1786 aliases, entry = cmdutil.findcmd(cmd, table, False)
1786 1787 otables.append(entry[1])
1787 1788 for t in otables:
1788 1789 for o in t:
1789 1790 if "(DEPRECATED)" in o[3]:
1790 1791 continue
1791 1792 if o[0]:
1792 1793 options.append('-%s' % o[0])
1793 1794 options.append('--%s' % o[1])
1794 1795 ui.write("%s\n" % "\n".join(options))
1795 1796 return
1796 1797
1797 1798 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
1798 1799 if ui.verbose:
1799 1800 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1800 1801 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1801 1802
1802 1803 @command('^diff',
1803 1804 [('r', 'rev', [], _('revision'), _('REV')),
1804 1805 ('c', 'change', '', _('change made by revision'), _('REV'))
1805 1806 ] + diffopts + diffopts2 + walkopts + subrepoopts,
1806 1807 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
1807 1808 inferrepo=True, cmdtype=readonly)
1808 1809 def diff(ui, repo, *pats, **opts):
1809 1810 """diff repository (or selected files)
1810 1811
1811 1812 Show differences between revisions for the specified files.
1812 1813
1813 1814 Differences between files are shown using the unified diff format.
1814 1815
1815 1816 .. note::
1816 1817
1817 1818 :hg:`diff` may generate unexpected results for merges, as it will
1818 1819 default to comparing against the working directory's first
1819 1820 parent changeset if no revisions are specified.
1820 1821
1821 1822 When two revision arguments are given, then changes are shown
1822 1823 between those revisions. If only one revision is specified then
1823 1824 that revision is compared to the working directory, and, when no
1824 1825 revisions are specified, the working directory files are compared
1825 1826 to its first parent.
1826 1827
1827 1828 Alternatively you can specify -c/--change with a revision to see
1828 1829 the changes in that changeset relative to its first parent.
1829 1830
1830 1831 Without the -a/--text option, diff will avoid generating diffs of
1831 1832 files it detects as binary. With -a, diff will generate a diff
1832 1833 anyway, probably with undesirable results.
1833 1834
1834 1835 Use the -g/--git option to generate diffs in the git extended diff
1835 1836 format. For more information, read :hg:`help diffs`.
1836 1837
1837 1838 .. container:: verbose
1838 1839
1839 1840 Examples:
1840 1841
1841 1842 - compare a file in the current working directory to its parent::
1842 1843
1843 1844 hg diff foo.c
1844 1845
1845 1846 - compare two historical versions of a directory, with rename info::
1846 1847
1847 1848 hg diff --git -r 1.0:1.2 lib/
1848 1849
1849 1850 - get change stats relative to the last change on some date::
1850 1851
1851 1852 hg diff --stat -r "date('may 2')"
1852 1853
1853 1854 - diff all newly-added files that contain a keyword::
1854 1855
1855 1856 hg diff "set:added() and grep(GNU)"
1856 1857
1857 1858 - compare a revision and its parents::
1858 1859
1859 1860 hg diff -c 9353 # compare against first parent
1860 1861 hg diff -r 9353^:9353 # same using revset syntax
1861 1862 hg diff -r 9353^2:9353 # compare against the second parent
1862 1863
1863 1864 Returns 0 on success.
1864 1865 """
1865 1866
1866 1867 opts = pycompat.byteskwargs(opts)
1867 1868 revs = opts.get('rev')
1868 1869 change = opts.get('change')
1869 1870 stat = opts.get('stat')
1870 1871 reverse = opts.get('reverse')
1871 1872
1872 1873 if revs and change:
1873 1874 msg = _('cannot specify --rev and --change at the same time')
1874 1875 raise error.Abort(msg)
1875 1876 elif change:
1876 1877 repo = scmutil.unhidehashlikerevs(repo, [change], 'nowarn')
1877 1878 ctx2 = scmutil.revsingle(repo, change, None)
1878 1879 ctx1 = ctx2.p1()
1879 1880 else:
1880 1881 repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
1881 1882 ctx1, ctx2 = scmutil.revpair(repo, revs)
1882 1883 node1, node2 = ctx1.node(), ctx2.node()
1883 1884
1884 1885 if reverse:
1885 1886 node1, node2 = node2, node1
1886 1887
1887 1888 diffopts = patch.diffallopts(ui, opts)
1888 1889 m = scmutil.match(ctx2, pats, opts)
1889 1890 ui.pager('diff')
1890 1891 logcmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1891 1892 listsubrepos=opts.get('subrepos'),
1892 1893 root=opts.get('root'))
1893 1894
1894 1895 @command('^export',
1895 1896 [('o', 'output', '',
1896 1897 _('print output to file with formatted name'), _('FORMAT')),
1897 1898 ('', 'switch-parent', None, _('diff against the second parent')),
1898 1899 ('r', 'rev', [], _('revisions to export'), _('REV')),
1899 1900 ] + diffopts,
1900 1901 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'), cmdtype=readonly)
1901 1902 def export(ui, repo, *changesets, **opts):
1902 1903 """dump the header and diffs for one or more changesets
1903 1904
1904 1905 Print the changeset header and diffs for one or more revisions.
1905 1906 If no revision is given, the parent of the working directory is used.
1906 1907
1907 1908 The information shown in the changeset header is: author, date,
1908 1909 branch name (if non-default), changeset hash, parent(s) and commit
1909 1910 comment.
1910 1911
1911 1912 .. note::
1912 1913
1913 1914 :hg:`export` may generate unexpected diff output for merge
1914 1915 changesets, as it will compare the merge changeset against its
1915 1916 first parent only.
1916 1917
1917 1918 Output may be to a file, in which case the name of the file is
1918 1919 given using a template string. See :hg:`help templates`. In addition
1919 1920 to the common template keywords, the following formatting rules are
1920 1921 supported:
1921 1922
1922 1923 :``%%``: literal "%" character
1923 1924 :``%H``: changeset hash (40 hexadecimal digits)
1924 1925 :``%N``: number of patches being generated
1925 1926 :``%R``: changeset revision number
1926 1927 :``%b``: basename of the exporting repository
1927 1928 :``%h``: short-form changeset hash (12 hexadecimal digits)
1928 1929 :``%m``: first line of the commit message (only alphanumeric characters)
1929 1930 :``%n``: zero-padded sequence number, starting at 1
1930 1931 :``%r``: zero-padded changeset revision number
1931 1932 :``\\``: literal "\\" character
1932 1933
1933 1934 Without the -a/--text option, export will avoid generating diffs
1934 1935 of files it detects as binary. With -a, export will generate a
1935 1936 diff anyway, probably with undesirable results.
1936 1937
1937 1938 Use the -g/--git option to generate diffs in the git extended diff
1938 1939 format. See :hg:`help diffs` for more information.
1939 1940
1940 1941 With the --switch-parent option, the diff will be against the
1941 1942 second parent. It can be useful to review a merge.
1942 1943
1943 1944 .. container:: verbose
1944 1945
1945 1946 Examples:
1946 1947
1947 1948 - use export and import to transplant a bugfix to the current
1948 1949 branch::
1949 1950
1950 1951 hg export -r 9353 | hg import -
1951 1952
1952 1953 - export all the changesets between two revisions to a file with
1953 1954 rename information::
1954 1955
1955 1956 hg export --git -r 123:150 > changes.txt
1956 1957
1957 1958 - split outgoing changes into a series of patches with
1958 1959 descriptive names::
1959 1960
1960 1961 hg export -r "outgoing()" -o "%n-%m.patch"
1961 1962
1962 1963 Returns 0 on success.
1963 1964 """
1964 1965 opts = pycompat.byteskwargs(opts)
1965 1966 changesets += tuple(opts.get('rev', []))
1966 1967 if not changesets:
1967 1968 changesets = ['.']
1968 1969 repo = scmutil.unhidehashlikerevs(repo, changesets, 'nowarn')
1969 1970 revs = scmutil.revrange(repo, changesets)
1970 1971 if not revs:
1971 1972 raise error.Abort(_("export requires at least one changeset"))
1972 1973 if len(revs) > 1:
1973 1974 ui.note(_('exporting patches:\n'))
1974 1975 else:
1975 1976 ui.note(_('exporting patch:\n'))
1976 1977 ui.pager('export')
1977 1978 cmdutil.export(repo, revs, fntemplate=opts.get('output'),
1978 1979 switch_parent=opts.get('switch_parent'),
1979 1980 opts=patch.diffallopts(ui, opts))
1980 1981
1981 1982 @command('files',
1982 1983 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
1983 1984 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
1984 1985 ] + walkopts + formatteropts + subrepoopts,
1985 1986 _('[OPTION]... [FILE]...'), cmdtype=readonly)
1986 1987 def files(ui, repo, *pats, **opts):
1987 1988 """list tracked files
1988 1989
1989 1990 Print files under Mercurial control in the working directory or
1990 1991 specified revision for given files (excluding removed files).
1991 1992 Files can be specified as filenames or filesets.
1992 1993
1993 1994 If no files are given to match, this command prints the names
1994 1995 of all files under Mercurial control.
1995 1996
1996 1997 .. container:: verbose
1997 1998
1998 1999 Examples:
1999 2000
2000 2001 - list all files under the current directory::
2001 2002
2002 2003 hg files .
2003 2004
2004 2005 - shows sizes and flags for current revision::
2005 2006
2006 2007 hg files -vr .
2007 2008
2008 2009 - list all files named README::
2009 2010
2010 2011 hg files -I "**/README"
2011 2012
2012 2013 - list all binary files::
2013 2014
2014 2015 hg files "set:binary()"
2015 2016
2016 2017 - find files containing a regular expression::
2017 2018
2018 2019 hg files "set:grep('bob')"
2019 2020
2020 2021 - search tracked file contents with xargs and grep::
2021 2022
2022 2023 hg files -0 | xargs -0 grep foo
2023 2024
2024 2025 See :hg:`help patterns` and :hg:`help filesets` for more information
2025 2026 on specifying file patterns.
2026 2027
2027 2028 Returns 0 if a match is found, 1 otherwise.
2028 2029
2029 2030 """
2030 2031
2031 2032 opts = pycompat.byteskwargs(opts)
2032 2033 rev = opts.get('rev')
2033 2034 if rev:
2034 2035 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
2035 2036 ctx = scmutil.revsingle(repo, rev, None)
2036 2037
2037 2038 end = '\n'
2038 2039 if opts.get('print0'):
2039 2040 end = '\0'
2040 2041 fmt = '%s' + end
2041 2042
2042 2043 m = scmutil.match(ctx, pats, opts)
2043 2044 ui.pager('files')
2044 2045 with ui.formatter('files', opts) as fm:
2045 2046 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
2046 2047
2047 2048 @command(
2048 2049 '^forget',
2049 2050 walkopts + dryrunopts,
2050 2051 _('[OPTION]... FILE...'), inferrepo=True)
2051 2052 def forget(ui, repo, *pats, **opts):
2052 2053 """forget the specified files on the next commit
2053 2054
2054 2055 Mark the specified files so they will no longer be tracked
2055 2056 after the next commit.
2056 2057
2057 2058 This only removes files from the current branch, not from the
2058 2059 entire project history, and it does not delete them from the
2059 2060 working directory.
2060 2061
2061 2062 To delete the file from the working directory, see :hg:`remove`.
2062 2063
2063 2064 To undo a forget before the next commit, see :hg:`add`.
2064 2065
2065 2066 .. container:: verbose
2066 2067
2067 2068 Examples:
2068 2069
2069 2070 - forget newly-added binary files::
2070 2071
2071 2072 hg forget "set:added() and binary()"
2072 2073
2073 2074 - forget files that would be excluded by .hgignore::
2074 2075
2075 2076 hg forget "set:hgignore()"
2076 2077
2077 2078 Returns 0 on success.
2078 2079 """
2079 2080
2080 2081 opts = pycompat.byteskwargs(opts)
2081 2082 if not pats:
2082 2083 raise error.Abort(_('no files specified'))
2083 2084
2084 2085 m = scmutil.match(repo[None], pats, opts)
2085 2086 dryrun = opts.get(r'dry_run')
2086 2087 rejected = cmdutil.forget(ui, repo, m, prefix="",
2087 2088 explicitonly=False, dryrun=dryrun)[0]
2088 2089 return rejected and 1 or 0
2089 2090
2090 2091 @command(
2091 2092 'graft',
2092 2093 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2093 2094 ('c', 'continue', False, _('resume interrupted graft')),
2094 2095 ('e', 'edit', False, _('invoke editor on commit messages')),
2095 2096 ('', 'log', None, _('append graft info to log message')),
2096 2097 ('f', 'force', False, _('force graft')),
2097 2098 ('D', 'currentdate', False,
2098 2099 _('record the current date as commit date')),
2099 2100 ('U', 'currentuser', False,
2100 2101 _('record the current user as committer'), _('DATE'))]
2101 2102 + commitopts2 + mergetoolopts + dryrunopts,
2102 2103 _('[OPTION]... [-r REV]... REV...'))
2103 2104 def graft(ui, repo, *revs, **opts):
2104 2105 '''copy changes from other branches onto the current branch
2105 2106
2106 2107 This command uses Mercurial's merge logic to copy individual
2107 2108 changes from other branches without merging branches in the
2108 2109 history graph. This is sometimes known as 'backporting' or
2109 2110 'cherry-picking'. By default, graft will copy user, date, and
2110 2111 description from the source changesets.
2111 2112
2112 2113 Changesets that are ancestors of the current revision, that have
2113 2114 already been grafted, or that are merges will be skipped.
2114 2115
2115 2116 If --log is specified, log messages will have a comment appended
2116 2117 of the form::
2117 2118
2118 2119 (grafted from CHANGESETHASH)
2119 2120
2120 2121 If --force is specified, revisions will be grafted even if they
2121 2122 are already ancestors of, or have been grafted to, the destination.
2122 2123 This is useful when the revisions have since been backed out.
2123 2124
2124 2125 If a graft merge results in conflicts, the graft process is
2125 2126 interrupted so that the current merge can be manually resolved.
2126 2127 Once all conflicts are addressed, the graft process can be
2127 2128 continued with the -c/--continue option.
2128 2129
2129 2130 .. note::
2130 2131
2131 2132 The -c/--continue option does not reapply earlier options, except
2132 2133 for --force.
2133 2134
2134 2135 .. container:: verbose
2135 2136
2136 2137 Examples:
2137 2138
2138 2139 - copy a single change to the stable branch and edit its description::
2139 2140
2140 2141 hg update stable
2141 2142 hg graft --edit 9393
2142 2143
2143 2144 - graft a range of changesets with one exception, updating dates::
2144 2145
2145 2146 hg graft -D "2085::2093 and not 2091"
2146 2147
2147 2148 - continue a graft after resolving conflicts::
2148 2149
2149 2150 hg graft -c
2150 2151
2151 2152 - show the source of a grafted changeset::
2152 2153
2153 2154 hg log --debug -r .
2154 2155
2155 2156 - show revisions sorted by date::
2156 2157
2157 2158 hg log -r "sort(all(), date)"
2158 2159
2159 2160 See :hg:`help revisions` for more about specifying revisions.
2160 2161
2161 2162 Returns 0 on successful completion.
2162 2163 '''
2163 2164 with repo.wlock():
2164 2165 return _dograft(ui, repo, *revs, **opts)
2165 2166
2166 2167 def _dograft(ui, repo, *revs, **opts):
2167 2168 opts = pycompat.byteskwargs(opts)
2168 2169 if revs and opts.get('rev'):
2169 2170 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
2170 2171 'revision ordering!\n'))
2171 2172
2172 2173 revs = list(revs)
2173 2174 revs.extend(opts.get('rev'))
2174 2175
2175 2176 if not opts.get('user') and opts.get('currentuser'):
2176 2177 opts['user'] = ui.username()
2177 2178 if not opts.get('date') and opts.get('currentdate'):
2178 2179 opts['date'] = "%d %d" % dateutil.makedate()
2179 2180
2180 2181 editor = cmdutil.getcommiteditor(editform='graft',
2181 2182 **pycompat.strkwargs(opts))
2182 2183
2183 2184 cont = False
2184 2185 if opts.get('continue'):
2185 2186 cont = True
2186 2187 if revs:
2187 2188 raise error.Abort(_("can't specify --continue and revisions"))
2188 2189 # read in unfinished revisions
2189 2190 try:
2190 2191 nodes = repo.vfs.read('graftstate').splitlines()
2191 2192 revs = [repo[node].rev() for node in nodes]
2192 2193 except IOError as inst:
2193 2194 if inst.errno != errno.ENOENT:
2194 2195 raise
2195 2196 cmdutil.wrongtooltocontinue(repo, _('graft'))
2196 2197 else:
2197 2198 if not revs:
2198 2199 raise error.Abort(_('no revisions specified'))
2199 2200 cmdutil.checkunfinished(repo)
2200 2201 cmdutil.bailifchanged(repo)
2201 2202 revs = scmutil.revrange(repo, revs)
2202 2203
2203 2204 skipped = set()
2204 2205 # check for merges
2205 2206 for rev in repo.revs('%ld and merge()', revs):
2206 2207 ui.warn(_('skipping ungraftable merge revision %d\n') % rev)
2207 2208 skipped.add(rev)
2208 2209 revs = [r for r in revs if r not in skipped]
2209 2210 if not revs:
2210 2211 return -1
2211 2212
2212 2213 # Don't check in the --continue case, in effect retaining --force across
2213 2214 # --continues. That's because without --force, any revisions we decided to
2214 2215 # skip would have been filtered out here, so they wouldn't have made their
2215 2216 # way to the graftstate. With --force, any revisions we would have otherwise
2216 2217 # skipped would not have been filtered out, and if they hadn't been applied
2217 2218 # already, they'd have been in the graftstate.
2218 2219 if not (cont or opts.get('force')):
2219 2220 # check for ancestors of dest branch
2220 2221 crev = repo['.'].rev()
2221 2222 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2222 2223 # XXX make this lazy in the future
2223 2224 # don't mutate while iterating, create a copy
2224 2225 for rev in list(revs):
2225 2226 if rev in ancestors:
2226 2227 ui.warn(_('skipping ancestor revision %d:%s\n') %
2227 2228 (rev, repo[rev]))
2228 2229 # XXX remove on list is slow
2229 2230 revs.remove(rev)
2230 2231 if not revs:
2231 2232 return -1
2232 2233
2233 2234 # analyze revs for earlier grafts
2234 2235 ids = {}
2235 2236 for ctx in repo.set("%ld", revs):
2236 2237 ids[ctx.hex()] = ctx.rev()
2237 2238 n = ctx.extra().get('source')
2238 2239 if n:
2239 2240 ids[n] = ctx.rev()
2240 2241
2241 2242 # check ancestors for earlier grafts
2242 2243 ui.debug('scanning for duplicate grafts\n')
2243 2244
2244 2245 # The only changesets we can be sure doesn't contain grafts of any
2245 2246 # revs, are the ones that are common ancestors of *all* revs:
2246 2247 for rev in repo.revs('only(%d,ancestor(%ld))', crev, revs):
2247 2248 ctx = repo[rev]
2248 2249 n = ctx.extra().get('source')
2249 2250 if n in ids:
2250 2251 try:
2251 2252 r = repo[n].rev()
2252 2253 except error.RepoLookupError:
2253 2254 r = None
2254 2255 if r in revs:
2255 2256 ui.warn(_('skipping revision %d:%s '
2256 2257 '(already grafted to %d:%s)\n')
2257 2258 % (r, repo[r], rev, ctx))
2258 2259 revs.remove(r)
2259 2260 elif ids[n] in revs:
2260 2261 if r is None:
2261 2262 ui.warn(_('skipping already grafted revision %d:%s '
2262 2263 '(%d:%s also has unknown origin %s)\n')
2263 2264 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
2264 2265 else:
2265 2266 ui.warn(_('skipping already grafted revision %d:%s '
2266 2267 '(%d:%s also has origin %d:%s)\n')
2267 2268 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
2268 2269 revs.remove(ids[n])
2269 2270 elif ctx.hex() in ids:
2270 2271 r = ids[ctx.hex()]
2271 2272 ui.warn(_('skipping already grafted revision %d:%s '
2272 2273 '(was grafted from %d:%s)\n') %
2273 2274 (r, repo[r], rev, ctx))
2274 2275 revs.remove(r)
2275 2276 if not revs:
2276 2277 return -1
2277 2278
2278 2279 for pos, ctx in enumerate(repo.set("%ld", revs)):
2279 2280 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2280 2281 ctx.description().split('\n', 1)[0])
2281 2282 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
2282 2283 if names:
2283 2284 desc += ' (%s)' % ' '.join(names)
2284 2285 ui.status(_('grafting %s\n') % desc)
2285 2286 if opts.get('dry_run'):
2286 2287 continue
2287 2288
2288 2289 source = ctx.extra().get('source')
2289 2290 extra = {}
2290 2291 if source:
2291 2292 extra['source'] = source
2292 2293 extra['intermediate-source'] = ctx.hex()
2293 2294 else:
2294 2295 extra['source'] = ctx.hex()
2295 2296 user = ctx.user()
2296 2297 if opts.get('user'):
2297 2298 user = opts['user']
2298 2299 date = ctx.date()
2299 2300 if opts.get('date'):
2300 2301 date = opts['date']
2301 2302 message = ctx.description()
2302 2303 if opts.get('log'):
2303 2304 message += '\n(grafted from %s)' % ctx.hex()
2304 2305
2305 2306 # we don't merge the first commit when continuing
2306 2307 if not cont:
2307 2308 # perform the graft merge with p1(rev) as 'ancestor'
2308 2309 try:
2309 2310 # ui.forcemerge is an internal variable, do not document
2310 2311 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
2311 2312 'graft')
2312 2313 stats = mergemod.graft(repo, ctx, ctx.p1(),
2313 2314 ['local', 'graft'])
2314 2315 finally:
2315 2316 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
2316 2317 # report any conflicts
2317 2318 if stats.unresolvedcount > 0:
2318 2319 # write out state for --continue
2319 2320 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2320 2321 repo.vfs.write('graftstate', ''.join(nodelines))
2321 2322 extra = ''
2322 2323 if opts.get('user'):
2323 2324 extra += ' --user %s' % procutil.shellquote(opts['user'])
2324 2325 if opts.get('date'):
2325 2326 extra += ' --date %s' % procutil.shellquote(opts['date'])
2326 2327 if opts.get('log'):
2327 2328 extra += ' --log'
2328 2329 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
2329 2330 raise error.Abort(
2330 2331 _("unresolved conflicts, can't continue"),
2331 2332 hint=hint)
2332 2333 else:
2333 2334 cont = False
2334 2335
2335 2336 # commit
2336 2337 node = repo.commit(text=message, user=user,
2337 2338 date=date, extra=extra, editor=editor)
2338 2339 if node is None:
2339 2340 ui.warn(
2340 2341 _('note: graft of %d:%s created no changes to commit\n') %
2341 2342 (ctx.rev(), ctx))
2342 2343
2343 2344 # remove state when we complete successfully
2344 2345 if not opts.get('dry_run'):
2345 2346 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
2346 2347
2347 2348 return 0
2348 2349
2349 2350 @command('grep',
2350 2351 [('0', 'print0', None, _('end fields with NUL')),
2351 2352 ('', 'all', None, _('print all revisions that match')),
2352 2353 ('a', 'text', None, _('treat all files as text')),
2353 2354 ('f', 'follow', None,
2354 2355 _('follow changeset history,'
2355 2356 ' or file history across copies and renames')),
2356 2357 ('i', 'ignore-case', None, _('ignore case when matching')),
2357 2358 ('l', 'files-with-matches', None,
2358 2359 _('print only filenames and revisions that match')),
2359 2360 ('n', 'line-number', None, _('print matching line numbers')),
2360 2361 ('r', 'rev', [],
2361 2362 _('only search files changed within revision range'), _('REV')),
2362 2363 ('u', 'user', None, _('list the author (long with -v)')),
2363 2364 ('d', 'date', None, _('list the date (short with -q)')),
2364 2365 ] + formatteropts + walkopts,
2365 2366 _('[OPTION]... PATTERN [FILE]...'),
2366 2367 inferrepo=True, cmdtype=readonly)
2367 2368 def grep(ui, repo, pattern, *pats, **opts):
2368 2369 """search revision history for a pattern in specified files
2369 2370
2370 2371 Search revision history for a regular expression in the specified
2371 2372 files or the entire project.
2372 2373
2373 2374 By default, grep prints the most recent revision number for each
2374 2375 file in which it finds a match. To get it to print every revision
2375 2376 that contains a change in match status ("-" for a match that becomes
2376 2377 a non-match, or "+" for a non-match that becomes a match), use the
2377 2378 --all flag.
2378 2379
2379 2380 PATTERN can be any Python (roughly Perl-compatible) regular
2380 2381 expression.
2381 2382
2382 2383 If no FILEs are specified (and -f/--follow isn't set), all files in
2383 2384 the repository are searched, including those that don't exist in the
2384 2385 current branch or have been deleted in a prior changeset.
2385 2386
2386 2387 Returns 0 if a match is found, 1 otherwise.
2387 2388 """
2388 2389 opts = pycompat.byteskwargs(opts)
2389 2390 reflags = re.M
2390 2391 if opts.get('ignore_case'):
2391 2392 reflags |= re.I
2392 2393 try:
2393 2394 regexp = util.re.compile(pattern, reflags)
2394 2395 except re.error as inst:
2395 2396 ui.warn(_("grep: invalid match pattern: %s\n") % pycompat.bytestr(inst))
2396 2397 return 1
2397 2398 sep, eol = ':', '\n'
2398 2399 if opts.get('print0'):
2399 2400 sep = eol = '\0'
2400 2401
2401 2402 getfile = util.lrucachefunc(repo.file)
2402 2403
2403 2404 def matchlines(body):
2404 2405 begin = 0
2405 2406 linenum = 0
2406 2407 while begin < len(body):
2407 2408 match = regexp.search(body, begin)
2408 2409 if not match:
2409 2410 break
2410 2411 mstart, mend = match.span()
2411 2412 linenum += body.count('\n', begin, mstart) + 1
2412 2413 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2413 2414 begin = body.find('\n', mend) + 1 or len(body) + 1
2414 2415 lend = begin - 1
2415 2416 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2416 2417
2417 2418 class linestate(object):
2418 2419 def __init__(self, line, linenum, colstart, colend):
2419 2420 self.line = line
2420 2421 self.linenum = linenum
2421 2422 self.colstart = colstart
2422 2423 self.colend = colend
2423 2424
2424 2425 def __hash__(self):
2425 2426 return hash((self.linenum, self.line))
2426 2427
2427 2428 def __eq__(self, other):
2428 2429 return self.line == other.line
2429 2430
2430 2431 def findpos(self):
2431 2432 """Iterate all (start, end) indices of matches"""
2432 2433 yield self.colstart, self.colend
2433 2434 p = self.colend
2434 2435 while p < len(self.line):
2435 2436 m = regexp.search(self.line, p)
2436 2437 if not m:
2437 2438 break
2438 2439 yield m.span()
2439 2440 p = m.end()
2440 2441
2441 2442 matches = {}
2442 2443 copies = {}
2443 2444 def grepbody(fn, rev, body):
2444 2445 matches[rev].setdefault(fn, [])
2445 2446 m = matches[rev][fn]
2446 2447 for lnum, cstart, cend, line in matchlines(body):
2447 2448 s = linestate(line, lnum, cstart, cend)
2448 2449 m.append(s)
2449 2450
2450 2451 def difflinestates(a, b):
2451 2452 sm = difflib.SequenceMatcher(None, a, b)
2452 2453 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2453 2454 if tag == 'insert':
2454 2455 for i in xrange(blo, bhi):
2455 2456 yield ('+', b[i])
2456 2457 elif tag == 'delete':
2457 2458 for i in xrange(alo, ahi):
2458 2459 yield ('-', a[i])
2459 2460 elif tag == 'replace':
2460 2461 for i in xrange(alo, ahi):
2461 2462 yield ('-', a[i])
2462 2463 for i in xrange(blo, bhi):
2463 2464 yield ('+', b[i])
2464 2465
2465 2466 def display(fm, fn, ctx, pstates, states):
2466 2467 rev = ctx.rev()
2467 2468 if fm.isplain():
2468 2469 formatuser = ui.shortuser
2469 2470 else:
2470 2471 formatuser = str
2471 2472 if ui.quiet:
2472 2473 datefmt = '%Y-%m-%d'
2473 2474 else:
2474 2475 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
2475 2476 found = False
2476 2477 @util.cachefunc
2477 2478 def binary():
2478 2479 flog = getfile(fn)
2479 2480 return stringutil.binary(flog.read(ctx.filenode(fn)))
2480 2481
2481 2482 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2482 2483 if opts.get('all'):
2483 2484 iter = difflinestates(pstates, states)
2484 2485 else:
2485 2486 iter = [('', l) for l in states]
2486 2487 for change, l in iter:
2487 2488 fm.startitem()
2488 2489 fm.data(node=fm.hexfunc(ctx.node()))
2489 2490 cols = [
2490 2491 ('filename', fn, True),
2491 2492 ('rev', rev, True),
2492 2493 ('linenumber', l.linenum, opts.get('line_number')),
2493 2494 ]
2494 2495 if opts.get('all'):
2495 2496 cols.append(('change', change, True))
2496 2497 cols.extend([
2497 2498 ('user', formatuser(ctx.user()), opts.get('user')),
2498 2499 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
2499 2500 ])
2500 2501 lastcol = next(name for name, data, cond in reversed(cols) if cond)
2501 2502 for name, data, cond in cols:
2502 2503 field = fieldnamemap.get(name, name)
2503 2504 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
2504 2505 if cond and name != lastcol:
2505 2506 fm.plain(sep, label='grep.sep')
2506 2507 if not opts.get('files_with_matches'):
2507 2508 fm.plain(sep, label='grep.sep')
2508 2509 if not opts.get('text') and binary():
2509 2510 fm.plain(_(" Binary file matches"))
2510 2511 else:
2511 2512 displaymatches(fm.nested('texts'), l)
2512 2513 fm.plain(eol)
2513 2514 found = True
2514 2515 if opts.get('files_with_matches'):
2515 2516 break
2516 2517 return found
2517 2518
2518 2519 def displaymatches(fm, l):
2519 2520 p = 0
2520 2521 for s, e in l.findpos():
2521 2522 if p < s:
2522 2523 fm.startitem()
2523 2524 fm.write('text', '%s', l.line[p:s])
2524 2525 fm.data(matched=False)
2525 2526 fm.startitem()
2526 2527 fm.write('text', '%s', l.line[s:e], label='grep.match')
2527 2528 fm.data(matched=True)
2528 2529 p = e
2529 2530 if p < len(l.line):
2530 2531 fm.startitem()
2531 2532 fm.write('text', '%s', l.line[p:])
2532 2533 fm.data(matched=False)
2533 2534 fm.end()
2534 2535
2535 2536 skip = {}
2536 2537 revfiles = {}
2537 2538 match = scmutil.match(repo[None], pats, opts)
2538 2539 found = False
2539 2540 follow = opts.get('follow')
2540 2541
2541 2542 def prep(ctx, fns):
2542 2543 rev = ctx.rev()
2543 2544 pctx = ctx.p1()
2544 2545 parent = pctx.rev()
2545 2546 matches.setdefault(rev, {})
2546 2547 matches.setdefault(parent, {})
2547 2548 files = revfiles.setdefault(rev, [])
2548 2549 for fn in fns:
2549 2550 flog = getfile(fn)
2550 2551 try:
2551 2552 fnode = ctx.filenode(fn)
2552 2553 except error.LookupError:
2553 2554 continue
2554 2555
2555 2556 copied = flog.renamed(fnode)
2556 2557 copy = follow and copied and copied[0]
2557 2558 if copy:
2558 2559 copies.setdefault(rev, {})[fn] = copy
2559 2560 if fn in skip:
2560 2561 if copy:
2561 2562 skip[copy] = True
2562 2563 continue
2563 2564 files.append(fn)
2564 2565
2565 2566 if fn not in matches[rev]:
2566 2567 grepbody(fn, rev, flog.read(fnode))
2567 2568
2568 2569 pfn = copy or fn
2569 2570 if pfn not in matches[parent]:
2570 2571 try:
2571 2572 fnode = pctx.filenode(pfn)
2572 2573 grepbody(pfn, parent, flog.read(fnode))
2573 2574 except error.LookupError:
2574 2575 pass
2575 2576
2576 2577 ui.pager('grep')
2577 2578 fm = ui.formatter('grep', opts)
2578 2579 for ctx in cmdutil.walkchangerevs(repo, match, opts, prep):
2579 2580 rev = ctx.rev()
2580 2581 parent = ctx.p1().rev()
2581 2582 for fn in sorted(revfiles.get(rev, [])):
2582 2583 states = matches[rev][fn]
2583 2584 copy = copies.get(rev, {}).get(fn)
2584 2585 if fn in skip:
2585 2586 if copy:
2586 2587 skip[copy] = True
2587 2588 continue
2588 2589 pstates = matches.get(parent, {}).get(copy or fn, [])
2589 2590 if pstates or states:
2590 2591 r = display(fm, fn, ctx, pstates, states)
2591 2592 found = found or r
2592 2593 if r and not opts.get('all'):
2593 2594 skip[fn] = True
2594 2595 if copy:
2595 2596 skip[copy] = True
2596 2597 del revfiles[rev]
2597 2598 # We will keep the matches dict for the duration of the window
2598 2599 # clear the matches dict once the window is over
2599 2600 if not revfiles:
2600 2601 matches.clear()
2601 2602 fm.end()
2602 2603
2603 2604 return not found
2604 2605
2605 2606 @command('heads',
2606 2607 [('r', 'rev', '',
2607 2608 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2608 2609 ('t', 'topo', False, _('show topological heads only')),
2609 2610 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2610 2611 ('c', 'closed', False, _('show normal and closed branch heads')),
2611 2612 ] + templateopts,
2612 2613 _('[-ct] [-r STARTREV] [REV]...'), cmdtype=readonly)
2613 2614 def heads(ui, repo, *branchrevs, **opts):
2614 2615 """show branch heads
2615 2616
2616 2617 With no arguments, show all open branch heads in the repository.
2617 2618 Branch heads are changesets that have no descendants on the
2618 2619 same branch. They are where development generally takes place and
2619 2620 are the usual targets for update and merge operations.
2620 2621
2621 2622 If one or more REVs are given, only open branch heads on the
2622 2623 branches associated with the specified changesets are shown. This
2623 2624 means that you can use :hg:`heads .` to see the heads on the
2624 2625 currently checked-out branch.
2625 2626
2626 2627 If -c/--closed is specified, also show branch heads marked closed
2627 2628 (see :hg:`commit --close-branch`).
2628 2629
2629 2630 If STARTREV is specified, only those heads that are descendants of
2630 2631 STARTREV will be displayed.
2631 2632
2632 2633 If -t/--topo is specified, named branch mechanics will be ignored and only
2633 2634 topological heads (changesets with no children) will be shown.
2634 2635
2635 2636 Returns 0 if matching heads are found, 1 if not.
2636 2637 """
2637 2638
2638 2639 opts = pycompat.byteskwargs(opts)
2639 2640 start = None
2640 2641 rev = opts.get('rev')
2641 2642 if rev:
2642 2643 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
2643 2644 start = scmutil.revsingle(repo, rev, None).node()
2644 2645
2645 2646 if opts.get('topo'):
2646 2647 heads = [repo[h] for h in repo.heads(start)]
2647 2648 else:
2648 2649 heads = []
2649 2650 for branch in repo.branchmap():
2650 2651 heads += repo.branchheads(branch, start, opts.get('closed'))
2651 2652 heads = [repo[h] for h in heads]
2652 2653
2653 2654 if branchrevs:
2654 2655 branches = set(repo[br].branch() for br in branchrevs)
2655 2656 heads = [h for h in heads if h.branch() in branches]
2656 2657
2657 2658 if opts.get('active') and branchrevs:
2658 2659 dagheads = repo.heads(start)
2659 2660 heads = [h for h in heads if h.node() in dagheads]
2660 2661
2661 2662 if branchrevs:
2662 2663 haveheads = set(h.branch() for h in heads)
2663 2664 if branches - haveheads:
2664 2665 headless = ', '.join(b for b in branches - haveheads)
2665 2666 msg = _('no open branch heads found on branches %s')
2666 2667 if opts.get('rev'):
2667 2668 msg += _(' (started at %s)') % opts['rev']
2668 2669 ui.warn((msg + '\n') % headless)
2669 2670
2670 2671 if not heads:
2671 2672 return 1
2672 2673
2673 2674 ui.pager('heads')
2674 2675 heads = sorted(heads, key=lambda x: -x.rev())
2675 2676 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
2676 2677 for ctx in heads:
2677 2678 displayer.show(ctx)
2678 2679 displayer.close()
2679 2680
2680 2681 @command('help',
2681 2682 [('e', 'extension', None, _('show only help for extensions')),
2682 2683 ('c', 'command', None, _('show only help for commands')),
2683 2684 ('k', 'keyword', None, _('show topics matching keyword')),
2684 2685 ('s', 'system', [], _('show help for specific platform(s)')),
2685 2686 ],
2686 2687 _('[-ecks] [TOPIC]'),
2687 2688 norepo=True, cmdtype=readonly)
2688 2689 def help_(ui, name=None, **opts):
2689 2690 """show help for a given topic or a help overview
2690 2691
2691 2692 With no arguments, print a list of commands with short help messages.
2692 2693
2693 2694 Given a topic, extension, or command name, print help for that
2694 2695 topic.
2695 2696
2696 2697 Returns 0 if successful.
2697 2698 """
2698 2699
2699 2700 keep = opts.get(r'system') or []
2700 2701 if len(keep) == 0:
2701 2702 if pycompat.sysplatform.startswith('win'):
2702 2703 keep.append('windows')
2703 2704 elif pycompat.sysplatform == 'OpenVMS':
2704 2705 keep.append('vms')
2705 2706 elif pycompat.sysplatform == 'plan9':
2706 2707 keep.append('plan9')
2707 2708 else:
2708 2709 keep.append('unix')
2709 2710 keep.append(pycompat.sysplatform.lower())
2710 2711 if ui.verbose:
2711 2712 keep.append('verbose')
2712 2713
2713 2714 commands = sys.modules[__name__]
2714 2715 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
2715 2716 ui.pager('help')
2716 2717 ui.write(formatted)
2717 2718
2718 2719
2719 2720 @command('identify|id',
2720 2721 [('r', 'rev', '',
2721 2722 _('identify the specified revision'), _('REV')),
2722 2723 ('n', 'num', None, _('show local revision number')),
2723 2724 ('i', 'id', None, _('show global revision id')),
2724 2725 ('b', 'branch', None, _('show branch')),
2725 2726 ('t', 'tags', None, _('show tags')),
2726 2727 ('B', 'bookmarks', None, _('show bookmarks')),
2727 2728 ] + remoteopts + formatteropts,
2728 2729 _('[-nibtB] [-r REV] [SOURCE]'),
2729 2730 optionalrepo=True, cmdtype=readonly)
2730 2731 def identify(ui, repo, source=None, rev=None,
2731 2732 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
2732 2733 """identify the working directory or specified revision
2733 2734
2734 2735 Print a summary identifying the repository state at REV using one or
2735 2736 two parent hash identifiers, followed by a "+" if the working
2736 2737 directory has uncommitted changes, the branch name (if not default),
2737 2738 a list of tags, and a list of bookmarks.
2738 2739
2739 2740 When REV is not given, print a summary of the current state of the
2740 2741 repository including the working directory. Specify -r. to get information
2741 2742 of the working directory parent without scanning uncommitted changes.
2742 2743
2743 2744 Specifying a path to a repository root or Mercurial bundle will
2744 2745 cause lookup to operate on that repository/bundle.
2745 2746
2746 2747 .. container:: verbose
2747 2748
2748 2749 Examples:
2749 2750
2750 2751 - generate a build identifier for the working directory::
2751 2752
2752 2753 hg id --id > build-id.dat
2753 2754
2754 2755 - find the revision corresponding to a tag::
2755 2756
2756 2757 hg id -n -r 1.3
2757 2758
2758 2759 - check the most recent revision of a remote repository::
2759 2760
2760 2761 hg id -r tip https://www.mercurial-scm.org/repo/hg/
2761 2762
2762 2763 See :hg:`log` for generating more information about specific revisions,
2763 2764 including full hash identifiers.
2764 2765
2765 2766 Returns 0 if successful.
2766 2767 """
2767 2768
2768 2769 opts = pycompat.byteskwargs(opts)
2769 2770 if not repo and not source:
2770 2771 raise error.Abort(_("there is no Mercurial repository here "
2771 2772 "(.hg not found)"))
2772 2773
2773 2774 if ui.debugflag:
2774 2775 hexfunc = hex
2775 2776 else:
2776 2777 hexfunc = short
2777 2778 default = not (num or id or branch or tags or bookmarks)
2778 2779 output = []
2779 2780 revs = []
2780 2781
2781 2782 if source:
2782 2783 source, branches = hg.parseurl(ui.expandpath(source))
2783 2784 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
2784 2785 repo = peer.local()
2785 2786 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
2786 2787
2787 2788 fm = ui.formatter('identify', opts)
2788 2789 fm.startitem()
2789 2790
2790 2791 if not repo:
2791 2792 if num or branch or tags:
2792 2793 raise error.Abort(
2793 2794 _("can't query remote revision number, branch, or tags"))
2794 2795 if not rev and revs:
2795 2796 rev = revs[0]
2796 2797 if not rev:
2797 2798 rev = "tip"
2798 2799
2799 2800 remoterev = peer.lookup(rev)
2800 2801 hexrev = hexfunc(remoterev)
2801 2802 if default or id:
2802 2803 output = [hexrev]
2803 2804 fm.data(id=hexrev)
2804 2805
2805 2806 def getbms():
2806 2807 bms = []
2807 2808
2808 2809 if 'bookmarks' in peer.listkeys('namespaces'):
2809 2810 hexremoterev = hex(remoterev)
2810 2811 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
2811 2812 if bmr == hexremoterev]
2812 2813
2813 2814 return sorted(bms)
2814 2815
2815 2816 bms = getbms()
2816 2817 if bookmarks:
2817 2818 output.extend(bms)
2818 2819 elif default and not ui.quiet:
2819 2820 # multiple bookmarks for a single parent separated by '/'
2820 2821 bm = '/'.join(bms)
2821 2822 if bm:
2822 2823 output.append(bm)
2823 2824
2824 2825 fm.data(node=hex(remoterev))
2825 2826 fm.data(bookmarks=fm.formatlist(bms, name='bookmark'))
2826 2827 else:
2827 2828 if rev:
2828 2829 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
2829 2830 ctx = scmutil.revsingle(repo, rev, None)
2830 2831
2831 2832 if ctx.rev() is None:
2832 2833 ctx = repo[None]
2833 2834 parents = ctx.parents()
2834 2835 taglist = []
2835 2836 for p in parents:
2836 2837 taglist.extend(p.tags())
2837 2838
2838 2839 dirty = ""
2839 2840 if ctx.dirty(missing=True, merge=False, branch=False):
2840 2841 dirty = '+'
2841 2842 fm.data(dirty=dirty)
2842 2843
2843 2844 hexoutput = [hexfunc(p.node()) for p in parents]
2844 2845 if default or id:
2845 2846 output = ["%s%s" % ('+'.join(hexoutput), dirty)]
2846 2847 fm.data(id="%s%s" % ('+'.join(hexoutput), dirty))
2847 2848
2848 2849 if num:
2849 2850 numoutput = ["%d" % p.rev() for p in parents]
2850 2851 output.append("%s%s" % ('+'.join(numoutput), dirty))
2851 2852
2852 2853 fn = fm.nested('parents')
2853 2854 for p in parents:
2854 2855 fn.startitem()
2855 2856 fn.data(rev=p.rev())
2856 2857 fn.data(node=p.hex())
2857 2858 fn.context(ctx=p)
2858 2859 fn.end()
2859 2860 else:
2860 2861 hexoutput = hexfunc(ctx.node())
2861 2862 if default or id:
2862 2863 output = [hexoutput]
2863 2864 fm.data(id=hexoutput)
2864 2865
2865 2866 if num:
2866 2867 output.append(pycompat.bytestr(ctx.rev()))
2867 2868 taglist = ctx.tags()
2868 2869
2869 2870 if default and not ui.quiet:
2870 2871 b = ctx.branch()
2871 2872 if b != 'default':
2872 2873 output.append("(%s)" % b)
2873 2874
2874 2875 # multiple tags for a single parent separated by '/'
2875 2876 t = '/'.join(taglist)
2876 2877 if t:
2877 2878 output.append(t)
2878 2879
2879 2880 # multiple bookmarks for a single parent separated by '/'
2880 2881 bm = '/'.join(ctx.bookmarks())
2881 2882 if bm:
2882 2883 output.append(bm)
2883 2884 else:
2884 2885 if branch:
2885 2886 output.append(ctx.branch())
2886 2887
2887 2888 if tags:
2888 2889 output.extend(taglist)
2889 2890
2890 2891 if bookmarks:
2891 2892 output.extend(ctx.bookmarks())
2892 2893
2893 2894 fm.data(node=ctx.hex())
2894 2895 fm.data(branch=ctx.branch())
2895 2896 fm.data(tags=fm.formatlist(taglist, name='tag', sep=':'))
2896 2897 fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name='bookmark'))
2897 2898 fm.context(ctx=ctx)
2898 2899
2899 2900 fm.plain("%s\n" % ' '.join(output))
2900 2901 fm.end()
2901 2902
2902 2903 @command('import|patch',
2903 2904 [('p', 'strip', 1,
2904 2905 _('directory strip option for patch. This has the same '
2905 2906 'meaning as the corresponding patch option'), _('NUM')),
2906 2907 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
2907 2908 ('e', 'edit', False, _('invoke editor on commit messages')),
2908 2909 ('f', 'force', None,
2909 2910 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
2910 2911 ('', 'no-commit', None,
2911 2912 _("don't commit, just update the working directory")),
2912 2913 ('', 'bypass', None,
2913 2914 _("apply patch without touching the working directory")),
2914 2915 ('', 'partial', None,
2915 2916 _('commit even if some hunks fail')),
2916 2917 ('', 'exact', None,
2917 2918 _('abort if patch would apply lossily')),
2918 2919 ('', 'prefix', '',
2919 2920 _('apply patch to subdirectory'), _('DIR')),
2920 2921 ('', 'import-branch', None,
2921 2922 _('use any branch information in patch (implied by --exact)'))] +
2922 2923 commitopts + commitopts2 + similarityopts,
2923 2924 _('[OPTION]... PATCH...'))
2924 2925 def import_(ui, repo, patch1=None, *patches, **opts):
2925 2926 """import an ordered set of patches
2926 2927
2927 2928 Import a list of patches and commit them individually (unless
2928 2929 --no-commit is specified).
2929 2930
2930 2931 To read a patch from standard input (stdin), use "-" as the patch
2931 2932 name. If a URL is specified, the patch will be downloaded from
2932 2933 there.
2933 2934
2934 2935 Import first applies changes to the working directory (unless
2935 2936 --bypass is specified), import will abort if there are outstanding
2936 2937 changes.
2937 2938
2938 2939 Use --bypass to apply and commit patches directly to the
2939 2940 repository, without affecting the working directory. Without
2940 2941 --exact, patches will be applied on top of the working directory
2941 2942 parent revision.
2942 2943
2943 2944 You can import a patch straight from a mail message. Even patches
2944 2945 as attachments work (to use the body part, it must have type
2945 2946 text/plain or text/x-patch). From and Subject headers of email
2946 2947 message are used as default committer and commit message. All
2947 2948 text/plain body parts before first diff are added to the commit
2948 2949 message.
2949 2950
2950 2951 If the imported patch was generated by :hg:`export`, user and
2951 2952 description from patch override values from message headers and
2952 2953 body. Values given on command line with -m/--message and -u/--user
2953 2954 override these.
2954 2955
2955 2956 If --exact is specified, import will set the working directory to
2956 2957 the parent of each patch before applying it, and will abort if the
2957 2958 resulting changeset has a different ID than the one recorded in
2958 2959 the patch. This will guard against various ways that portable
2959 2960 patch formats and mail systems might fail to transfer Mercurial
2960 2961 data or metadata. See :hg:`bundle` for lossless transmission.
2961 2962
2962 2963 Use --partial to ensure a changeset will be created from the patch
2963 2964 even if some hunks fail to apply. Hunks that fail to apply will be
2964 2965 written to a <target-file>.rej file. Conflicts can then be resolved
2965 2966 by hand before :hg:`commit --amend` is run to update the created
2966 2967 changeset. This flag exists to let people import patches that
2967 2968 partially apply without losing the associated metadata (author,
2968 2969 date, description, ...).
2969 2970
2970 2971 .. note::
2971 2972
2972 2973 When no hunks apply cleanly, :hg:`import --partial` will create
2973 2974 an empty changeset, importing only the patch metadata.
2974 2975
2975 2976 With -s/--similarity, hg will attempt to discover renames and
2976 2977 copies in the patch in the same way as :hg:`addremove`.
2977 2978
2978 2979 It is possible to use external patch programs to perform the patch
2979 2980 by setting the ``ui.patch`` configuration option. For the default
2980 2981 internal tool, the fuzz can also be configured via ``patch.fuzz``.
2981 2982 See :hg:`help config` for more information about configuration
2982 2983 files and how to use these options.
2983 2984
2984 2985 See :hg:`help dates` for a list of formats valid for -d/--date.
2985 2986
2986 2987 .. container:: verbose
2987 2988
2988 2989 Examples:
2989 2990
2990 2991 - import a traditional patch from a website and detect renames::
2991 2992
2992 2993 hg import -s 80 http://example.com/bugfix.patch
2993 2994
2994 2995 - import a changeset from an hgweb server::
2995 2996
2996 2997 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
2997 2998
2998 2999 - import all the patches in an Unix-style mbox::
2999 3000
3000 3001 hg import incoming-patches.mbox
3001 3002
3002 3003 - import patches from stdin::
3003 3004
3004 3005 hg import -
3005 3006
3006 3007 - attempt to exactly restore an exported changeset (not always
3007 3008 possible)::
3008 3009
3009 3010 hg import --exact proposed-fix.patch
3010 3011
3011 3012 - use an external tool to apply a patch which is too fuzzy for
3012 3013 the default internal tool.
3013 3014
3014 3015 hg import --config ui.patch="patch --merge" fuzzy.patch
3015 3016
3016 3017 - change the default fuzzing from 2 to a less strict 7
3017 3018
3018 3019 hg import --config ui.fuzz=7 fuzz.patch
3019 3020
3020 3021 Returns 0 on success, 1 on partial success (see --partial).
3021 3022 """
3022 3023
3023 3024 opts = pycompat.byteskwargs(opts)
3024 3025 if not patch1:
3025 3026 raise error.Abort(_('need at least one patch to import'))
3026 3027
3027 3028 patches = (patch1,) + patches
3028 3029
3029 3030 date = opts.get('date')
3030 3031 if date:
3031 3032 opts['date'] = dateutil.parsedate(date)
3032 3033
3033 3034 exact = opts.get('exact')
3034 3035 update = not opts.get('bypass')
3035 3036 if not update and opts.get('no_commit'):
3036 3037 raise error.Abort(_('cannot use --no-commit with --bypass'))
3037 3038 try:
3038 3039 sim = float(opts.get('similarity') or 0)
3039 3040 except ValueError:
3040 3041 raise error.Abort(_('similarity must be a number'))
3041 3042 if sim < 0 or sim > 100:
3042 3043 raise error.Abort(_('similarity must be between 0 and 100'))
3043 3044 if sim and not update:
3044 3045 raise error.Abort(_('cannot use --similarity with --bypass'))
3045 3046 if exact:
3046 3047 if opts.get('edit'):
3047 3048 raise error.Abort(_('cannot use --exact with --edit'))
3048 3049 if opts.get('prefix'):
3049 3050 raise error.Abort(_('cannot use --exact with --prefix'))
3050 3051
3051 3052 base = opts["base"]
3052 3053 wlock = dsguard = lock = tr = None
3053 3054 msgs = []
3054 3055 ret = 0
3055 3056
3056 3057
3057 3058 try:
3058 3059 wlock = repo.wlock()
3059 3060
3060 3061 if update:
3061 3062 cmdutil.checkunfinished(repo)
3062 3063 if (exact or not opts.get('force')):
3063 3064 cmdutil.bailifchanged(repo)
3064 3065
3065 3066 if not opts.get('no_commit'):
3066 3067 lock = repo.lock()
3067 3068 tr = repo.transaction('import')
3068 3069 else:
3069 3070 dsguard = dirstateguard.dirstateguard(repo, 'import')
3070 3071 parents = repo[None].parents()
3071 3072 for patchurl in patches:
3072 3073 if patchurl == '-':
3073 3074 ui.status(_('applying patch from stdin\n'))
3074 3075 patchfile = ui.fin
3075 3076 patchurl = 'stdin' # for error message
3076 3077 else:
3077 3078 patchurl = os.path.join(base, patchurl)
3078 3079 ui.status(_('applying %s\n') % patchurl)
3079 3080 patchfile = hg.openpath(ui, patchurl)
3080 3081
3081 3082 haspatch = False
3082 3083 for hunk in patch.split(patchfile):
3083 3084 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
3084 3085 parents, opts,
3085 3086 msgs, hg.clean)
3086 3087 if msg:
3087 3088 haspatch = True
3088 3089 ui.note(msg + '\n')
3089 3090 if update or exact:
3090 3091 parents = repo[None].parents()
3091 3092 else:
3092 3093 parents = [repo[node]]
3093 3094 if rej:
3094 3095 ui.write_err(_("patch applied partially\n"))
3095 3096 ui.write_err(_("(fix the .rej files and run "
3096 3097 "`hg commit --amend`)\n"))
3097 3098 ret = 1
3098 3099 break
3099 3100
3100 3101 if not haspatch:
3101 3102 raise error.Abort(_('%s: no diffs found') % patchurl)
3102 3103
3103 3104 if tr:
3104 3105 tr.close()
3105 3106 if msgs:
3106 3107 repo.savecommitmessage('\n* * *\n'.join(msgs))
3107 3108 if dsguard:
3108 3109 dsguard.close()
3109 3110 return ret
3110 3111 finally:
3111 3112 if tr:
3112 3113 tr.release()
3113 3114 release(lock, dsguard, wlock)
3114 3115
3115 3116 @command('incoming|in',
3116 3117 [('f', 'force', None,
3117 3118 _('run even if remote repository is unrelated')),
3118 3119 ('n', 'newest-first', None, _('show newest record first')),
3119 3120 ('', 'bundle', '',
3120 3121 _('file to store the bundles into'), _('FILE')),
3121 3122 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3122 3123 ('B', 'bookmarks', False, _("compare bookmarks")),
3123 3124 ('b', 'branch', [],
3124 3125 _('a specific branch you would like to pull'), _('BRANCH')),
3125 3126 ] + logopts + remoteopts + subrepoopts,
3126 3127 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3127 3128 def incoming(ui, repo, source="default", **opts):
3128 3129 """show new changesets found in source
3129 3130
3130 3131 Show new changesets found in the specified path/URL or the default
3131 3132 pull location. These are the changesets that would have been pulled
3132 3133 by :hg:`pull` at the time you issued this command.
3133 3134
3134 3135 See pull for valid source format details.
3135 3136
3136 3137 .. container:: verbose
3137 3138
3138 3139 With -B/--bookmarks, the result of bookmark comparison between
3139 3140 local and remote repositories is displayed. With -v/--verbose,
3140 3141 status is also displayed for each bookmark like below::
3141 3142
3142 3143 BM1 01234567890a added
3143 3144 BM2 1234567890ab advanced
3144 3145 BM3 234567890abc diverged
3145 3146 BM4 34567890abcd changed
3146 3147
3147 3148 The action taken locally when pulling depends on the
3148 3149 status of each bookmark:
3149 3150
3150 3151 :``added``: pull will create it
3151 3152 :``advanced``: pull will update it
3152 3153 :``diverged``: pull will create a divergent bookmark
3153 3154 :``changed``: result depends on remote changesets
3154 3155
3155 3156 From the point of view of pulling behavior, bookmark
3156 3157 existing only in the remote repository are treated as ``added``,
3157 3158 even if it is in fact locally deleted.
3158 3159
3159 3160 .. container:: verbose
3160 3161
3161 3162 For remote repository, using --bundle avoids downloading the
3162 3163 changesets twice if the incoming is followed by a pull.
3163 3164
3164 3165 Examples:
3165 3166
3166 3167 - show incoming changes with patches and full description::
3167 3168
3168 3169 hg incoming -vp
3169 3170
3170 3171 - show incoming changes excluding merges, store a bundle::
3171 3172
3172 3173 hg in -vpM --bundle incoming.hg
3173 3174 hg pull incoming.hg
3174 3175
3175 3176 - briefly list changes inside a bundle::
3176 3177
3177 3178 hg in changes.hg -T "{desc|firstline}\\n"
3178 3179
3179 3180 Returns 0 if there are incoming changes, 1 otherwise.
3180 3181 """
3181 3182 opts = pycompat.byteskwargs(opts)
3182 3183 if opts.get('graph'):
3183 3184 logcmdutil.checkunsupportedgraphflags([], opts)
3184 3185 def display(other, chlist, displayer):
3185 3186 revdag = logcmdutil.graphrevs(other, chlist, opts)
3186 3187 logcmdutil.displaygraph(ui, repo, revdag, displayer,
3187 3188 graphmod.asciiedges)
3188 3189
3189 3190 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3190 3191 return 0
3191 3192
3192 3193 if opts.get('bundle') and opts.get('subrepos'):
3193 3194 raise error.Abort(_('cannot combine --bundle and --subrepos'))
3194 3195
3195 3196 if opts.get('bookmarks'):
3196 3197 source, branches = hg.parseurl(ui.expandpath(source),
3197 3198 opts.get('branch'))
3198 3199 other = hg.peer(repo, opts, source)
3199 3200 if 'bookmarks' not in other.listkeys('namespaces'):
3200 3201 ui.warn(_("remote doesn't support bookmarks\n"))
3201 3202 return 0
3202 3203 ui.pager('incoming')
3203 3204 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3204 3205 return bookmarks.incoming(ui, repo, other)
3205 3206
3206 3207 repo._subtoppath = ui.expandpath(source)
3207 3208 try:
3208 3209 return hg.incoming(ui, repo, source, opts)
3209 3210 finally:
3210 3211 del repo._subtoppath
3211 3212
3212 3213
3213 3214 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3214 3215 norepo=True)
3215 3216 def init(ui, dest=".", **opts):
3216 3217 """create a new repository in the given directory
3217 3218
3218 3219 Initialize a new repository in the given directory. If the given
3219 3220 directory does not exist, it will be created.
3220 3221
3221 3222 If no directory is given, the current directory is used.
3222 3223
3223 3224 It is possible to specify an ``ssh://`` URL as the destination.
3224 3225 See :hg:`help urls` for more information.
3225 3226
3226 3227 Returns 0 on success.
3227 3228 """
3228 3229 opts = pycompat.byteskwargs(opts)
3229 3230 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3230 3231
3231 3232 @command('locate',
3232 3233 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3233 3234 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3234 3235 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3235 3236 ] + walkopts,
3236 3237 _('[OPTION]... [PATTERN]...'))
3237 3238 def locate(ui, repo, *pats, **opts):
3238 3239 """locate files matching specific patterns (DEPRECATED)
3239 3240
3240 3241 Print files under Mercurial control in the working directory whose
3241 3242 names match the given patterns.
3242 3243
3243 3244 By default, this command searches all directories in the working
3244 3245 directory. To search just the current directory and its
3245 3246 subdirectories, use "--include .".
3246 3247
3247 3248 If no patterns are given to match, this command prints the names
3248 3249 of all files under Mercurial control in the working directory.
3249 3250
3250 3251 If you want to feed the output of this command into the "xargs"
3251 3252 command, use the -0 option to both this command and "xargs". This
3252 3253 will avoid the problem of "xargs" treating single filenames that
3253 3254 contain whitespace as multiple filenames.
3254 3255
3255 3256 See :hg:`help files` for a more versatile command.
3256 3257
3257 3258 Returns 0 if a match is found, 1 otherwise.
3258 3259 """
3259 3260 opts = pycompat.byteskwargs(opts)
3260 3261 if opts.get('print0'):
3261 3262 end = '\0'
3262 3263 else:
3263 3264 end = '\n'
3264 3265 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3265 3266
3266 3267 ret = 1
3267 3268 m = scmutil.match(ctx, pats, opts, default='relglob',
3268 3269 badfn=lambda x, y: False)
3269 3270
3270 3271 ui.pager('locate')
3271 3272 for abs in ctx.matches(m):
3272 3273 if opts.get('fullpath'):
3273 3274 ui.write(repo.wjoin(abs), end)
3274 3275 else:
3275 3276 ui.write(((pats and m.rel(abs)) or abs), end)
3276 3277 ret = 0
3277 3278
3278 3279 return ret
3279 3280
3280 3281 @command('^log|history',
3281 3282 [('f', 'follow', None,
3282 3283 _('follow changeset history, or file history across copies and renames')),
3283 3284 ('', 'follow-first', None,
3284 3285 _('only follow the first parent of merge changesets (DEPRECATED)')),
3285 3286 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3286 3287 ('C', 'copies', None, _('show copied files')),
3287 3288 ('k', 'keyword', [],
3288 3289 _('do case-insensitive search for a given text'), _('TEXT')),
3289 3290 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
3290 3291 ('L', 'line-range', [],
3291 3292 _('follow line range of specified file (EXPERIMENTAL)'),
3292 3293 _('FILE,RANGE')),
3293 3294 ('', 'removed', None, _('include revisions where files were removed')),
3294 3295 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3295 3296 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3296 3297 ('', 'only-branch', [],
3297 3298 _('show only changesets within the given named branch (DEPRECATED)'),
3298 3299 _('BRANCH')),
3299 3300 ('b', 'branch', [],
3300 3301 _('show changesets within the given named branch'), _('BRANCH')),
3301 3302 ('P', 'prune', [],
3302 3303 _('do not display revision or any of its ancestors'), _('REV')),
3303 3304 ] + logopts + walkopts,
3304 3305 _('[OPTION]... [FILE]'),
3305 3306 inferrepo=True, cmdtype=readonly)
3306 3307 def log(ui, repo, *pats, **opts):
3307 3308 """show revision history of entire repository or files
3308 3309
3309 3310 Print the revision history of the specified files or the entire
3310 3311 project.
3311 3312
3312 3313 If no revision range is specified, the default is ``tip:0`` unless
3313 3314 --follow is set, in which case the working directory parent is
3314 3315 used as the starting revision.
3315 3316
3316 3317 File history is shown without following rename or copy history of
3317 3318 files. Use -f/--follow with a filename to follow history across
3318 3319 renames and copies. --follow without a filename will only show
3319 3320 ancestors of the starting revision.
3320 3321
3321 3322 By default this command prints revision number and changeset id,
3322 3323 tags, non-trivial parents, user, date and time, and a summary for
3323 3324 each commit. When the -v/--verbose switch is used, the list of
3324 3325 changed files and full commit message are shown.
3325 3326
3326 3327 With --graph the revisions are shown as an ASCII art DAG with the most
3327 3328 recent changeset at the top.
3328 3329 'o' is a changeset, '@' is a working directory parent, '_' closes a branch,
3329 3330 'x' is obsolete, '*' is unstable, and '+' represents a fork where the
3330 3331 changeset from the lines below is a parent of the 'o' merge on the same
3331 3332 line.
3332 3333 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
3333 3334 of a '|' indicates one or more revisions in a path are omitted.
3334 3335
3335 3336 .. container:: verbose
3336 3337
3337 3338 Use -L/--line-range FILE,M:N options to follow the history of lines
3338 3339 from M to N in FILE. With -p/--patch only diff hunks affecting
3339 3340 specified line range will be shown. This option requires --follow;
3340 3341 it can be specified multiple times. Currently, this option is not
3341 3342 compatible with --graph. This option is experimental.
3342 3343
3343 3344 .. note::
3344 3345
3345 3346 :hg:`log --patch` may generate unexpected diff output for merge
3346 3347 changesets, as it will only compare the merge changeset against
3347 3348 its first parent. Also, only files different from BOTH parents
3348 3349 will appear in files:.
3349 3350
3350 3351 .. note::
3351 3352
3352 3353 For performance reasons, :hg:`log FILE` may omit duplicate changes
3353 3354 made on branches and will not show removals or mode changes. To
3354 3355 see all such changes, use the --removed switch.
3355 3356
3356 3357 .. container:: verbose
3357 3358
3358 3359 .. note::
3359 3360
3360 3361 The history resulting from -L/--line-range options depends on diff
3361 3362 options; for instance if white-spaces are ignored, respective changes
3362 3363 with only white-spaces in specified line range will not be listed.
3363 3364
3364 3365 .. container:: verbose
3365 3366
3366 3367 Some examples:
3367 3368
3368 3369 - changesets with full descriptions and file lists::
3369 3370
3370 3371 hg log -v
3371 3372
3372 3373 - changesets ancestral to the working directory::
3373 3374
3374 3375 hg log -f
3375 3376
3376 3377 - last 10 commits on the current branch::
3377 3378
3378 3379 hg log -l 10 -b .
3379 3380
3380 3381 - changesets showing all modifications of a file, including removals::
3381 3382
3382 3383 hg log --removed file.c
3383 3384
3384 3385 - all changesets that touch a directory, with diffs, excluding merges::
3385 3386
3386 3387 hg log -Mp lib/
3387 3388
3388 3389 - all revision numbers that match a keyword::
3389 3390
3390 3391 hg log -k bug --template "{rev}\\n"
3391 3392
3392 3393 - the full hash identifier of the working directory parent::
3393 3394
3394 3395 hg log -r . --template "{node}\\n"
3395 3396
3396 3397 - list available log templates::
3397 3398
3398 3399 hg log -T list
3399 3400
3400 3401 - check if a given changeset is included in a tagged release::
3401 3402
3402 3403 hg log -r "a21ccf and ancestor(1.9)"
3403 3404
3404 3405 - find all changesets by some user in a date range::
3405 3406
3406 3407 hg log -k alice -d "may 2008 to jul 2008"
3407 3408
3408 3409 - summary of all changesets after the last tag::
3409 3410
3410 3411 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3411 3412
3412 3413 - changesets touching lines 13 to 23 for file.c::
3413 3414
3414 3415 hg log -L file.c,13:23
3415 3416
3416 3417 - changesets touching lines 13 to 23 for file.c and lines 2 to 6 of
3417 3418 main.c with patch::
3418 3419
3419 3420 hg log -L file.c,13:23 -L main.c,2:6 -p
3420 3421
3421 3422 See :hg:`help dates` for a list of formats valid for -d/--date.
3422 3423
3423 3424 See :hg:`help revisions` for more about specifying and ordering
3424 3425 revisions.
3425 3426
3426 3427 See :hg:`help templates` for more about pre-packaged styles and
3427 3428 specifying custom templates. The default template used by the log
3428 3429 command can be customized via the ``ui.logtemplate`` configuration
3429 3430 setting.
3430 3431
3431 3432 Returns 0 on success.
3432 3433
3433 3434 """
3434 3435 opts = pycompat.byteskwargs(opts)
3435 3436 linerange = opts.get('line_range')
3436 3437
3437 3438 if linerange and not opts.get('follow'):
3438 3439 raise error.Abort(_('--line-range requires --follow'))
3439 3440
3440 3441 if linerange and pats:
3441 3442 # TODO: take pats as patterns with no line-range filter
3442 3443 raise error.Abort(
3443 3444 _('FILE arguments are not compatible with --line-range option')
3444 3445 )
3445 3446
3446 3447 repo = scmutil.unhidehashlikerevs(repo, opts.get('rev'), 'nowarn')
3447 3448 revs, differ = logcmdutil.getrevs(repo, pats, opts)
3448 3449 if linerange:
3449 3450 # TODO: should follow file history from logcmdutil._initialrevs(),
3450 3451 # then filter the result by logcmdutil._makerevset() and --limit
3451 3452 revs, differ = logcmdutil.getlinerangerevs(repo, revs, opts)
3452 3453
3453 3454 getrenamed = None
3454 3455 if opts.get('copies'):
3455 3456 endrev = None
3456 3457 if opts.get('rev'):
3457 3458 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
3458 3459 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3459 3460
3460 3461 ui.pager('log')
3461 3462 displayer = logcmdutil.changesetdisplayer(ui, repo, opts, differ,
3462 3463 buffered=True)
3463 3464 if opts.get('graph'):
3464 3465 displayfn = logcmdutil.displaygraphrevs
3465 3466 else:
3466 3467 displayfn = logcmdutil.displayrevs
3467 3468 displayfn(ui, repo, revs, displayer, getrenamed)
3468 3469
3469 3470 @command('manifest',
3470 3471 [('r', 'rev', '', _('revision to display'), _('REV')),
3471 3472 ('', 'all', False, _("list files from all revisions"))]
3472 3473 + formatteropts,
3473 3474 _('[-r REV]'), cmdtype=readonly)
3474 3475 def manifest(ui, repo, node=None, rev=None, **opts):
3475 3476 """output the current or given revision of the project manifest
3476 3477
3477 3478 Print a list of version controlled files for the given revision.
3478 3479 If no revision is given, the first parent of the working directory
3479 3480 is used, or the null revision if no revision is checked out.
3480 3481
3481 3482 With -v, print file permissions, symlink and executable bits.
3482 3483 With --debug, print file revision hashes.
3483 3484
3484 3485 If option --all is specified, the list of all files from all revisions
3485 3486 is printed. This includes deleted and renamed files.
3486 3487
3487 3488 Returns 0 on success.
3488 3489 """
3489 3490 opts = pycompat.byteskwargs(opts)
3490 3491 fm = ui.formatter('manifest', opts)
3491 3492
3492 3493 if opts.get('all'):
3493 3494 if rev or node:
3494 3495 raise error.Abort(_("can't specify a revision with --all"))
3495 3496
3496 3497 res = []
3497 3498 prefix = "data/"
3498 3499 suffix = ".i"
3499 3500 plen = len(prefix)
3500 3501 slen = len(suffix)
3501 3502 with repo.lock():
3502 3503 for fn, b, size in repo.store.datafiles():
3503 3504 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3504 3505 res.append(fn[plen:-slen])
3505 3506 ui.pager('manifest')
3506 3507 for f in res:
3507 3508 fm.startitem()
3508 3509 fm.write("path", '%s\n', f)
3509 3510 fm.end()
3510 3511 return
3511 3512
3512 3513 if rev and node:
3513 3514 raise error.Abort(_("please specify just one revision"))
3514 3515
3515 3516 if not node:
3516 3517 node = rev
3517 3518
3518 3519 char = {'l': '@', 'x': '*', '': '', 't': 'd'}
3519 3520 mode = {'l': '644', 'x': '755', '': '644', 't': '755'}
3520 3521 if node:
3521 3522 repo = scmutil.unhidehashlikerevs(repo, [node], 'nowarn')
3522 3523 ctx = scmutil.revsingle(repo, node)
3523 3524 mf = ctx.manifest()
3524 3525 ui.pager('manifest')
3525 3526 for f in ctx:
3526 3527 fm.startitem()
3527 3528 fl = ctx[f].flags()
3528 3529 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3529 3530 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3530 3531 fm.write('path', '%s\n', f)
3531 3532 fm.end()
3532 3533
3533 3534 @command('^merge',
3534 3535 [('f', 'force', None,
3535 3536 _('force a merge including outstanding changes (DEPRECATED)')),
3536 3537 ('r', 'rev', '', _('revision to merge'), _('REV')),
3537 3538 ('P', 'preview', None,
3538 3539 _('review revisions to merge (no merge is performed)')),
3539 3540 ('', 'abort', None, _('abort the ongoing merge')),
3540 3541 ] + mergetoolopts,
3541 3542 _('[-P] [[-r] REV]'))
3542 3543 def merge(ui, repo, node=None, **opts):
3543 3544 """merge another revision into working directory
3544 3545
3545 3546 The current working directory is updated with all changes made in
3546 3547 the requested revision since the last common predecessor revision.
3547 3548
3548 3549 Files that changed between either parent are marked as changed for
3549 3550 the next commit and a commit must be performed before any further
3550 3551 updates to the repository are allowed. The next commit will have
3551 3552 two parents.
3552 3553
3553 3554 ``--tool`` can be used to specify the merge tool used for file
3554 3555 merges. It overrides the HGMERGE environment variable and your
3555 3556 configuration files. See :hg:`help merge-tools` for options.
3556 3557
3557 3558 If no revision is specified, the working directory's parent is a
3558 3559 head revision, and the current branch contains exactly one other
3559 3560 head, the other head is merged with by default. Otherwise, an
3560 3561 explicit revision with which to merge with must be provided.
3561 3562
3562 3563 See :hg:`help resolve` for information on handling file conflicts.
3563 3564
3564 3565 To undo an uncommitted merge, use :hg:`merge --abort` which
3565 3566 will check out a clean copy of the original merge parent, losing
3566 3567 all changes.
3567 3568
3568 3569 Returns 0 on success, 1 if there are unresolved files.
3569 3570 """
3570 3571
3571 3572 opts = pycompat.byteskwargs(opts)
3572 3573 abort = opts.get('abort')
3573 3574 if abort and repo.dirstate.p2() == nullid:
3574 3575 cmdutil.wrongtooltocontinue(repo, _('merge'))
3575 3576 if abort:
3576 3577 if node:
3577 3578 raise error.Abort(_("cannot specify a node with --abort"))
3578 3579 if opts.get('rev'):
3579 3580 raise error.Abort(_("cannot specify both --rev and --abort"))
3580 3581 if opts.get('preview'):
3581 3582 raise error.Abort(_("cannot specify --preview with --abort"))
3582 3583 if opts.get('rev') and node:
3583 3584 raise error.Abort(_("please specify just one revision"))
3584 3585 if not node:
3585 3586 node = opts.get('rev')
3586 3587
3587 3588 if node:
3588 3589 node = scmutil.revsingle(repo, node).node()
3589 3590
3590 3591 if not node and not abort:
3591 3592 node = repo[destutil.destmerge(repo)].node()
3592 3593
3593 3594 if opts.get('preview'):
3594 3595 # find nodes that are ancestors of p2 but not of p1
3595 3596 p1 = repo.lookup('.')
3596 3597 p2 = repo.lookup(node)
3597 3598 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
3598 3599
3599 3600 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
3600 3601 for node in nodes:
3601 3602 displayer.show(repo[node])
3602 3603 displayer.close()
3603 3604 return 0
3604 3605
3605 3606 try:
3606 3607 # ui.forcemerge is an internal variable, do not document
3607 3608 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
3608 3609 force = opts.get('force')
3609 3610 labels = ['working copy', 'merge rev']
3610 3611 return hg.merge(repo, node, force=force, mergeforce=force,
3611 3612 labels=labels, abort=abort)
3612 3613 finally:
3613 3614 ui.setconfig('ui', 'forcemerge', '', 'merge')
3614 3615
3615 3616 @command('outgoing|out',
3616 3617 [('f', 'force', None, _('run even when the destination is unrelated')),
3617 3618 ('r', 'rev', [],
3618 3619 _('a changeset intended to be included in the destination'), _('REV')),
3619 3620 ('n', 'newest-first', None, _('show newest record first')),
3620 3621 ('B', 'bookmarks', False, _('compare bookmarks')),
3621 3622 ('b', 'branch', [], _('a specific branch you would like to push'),
3622 3623 _('BRANCH')),
3623 3624 ] + logopts + remoteopts + subrepoopts,
3624 3625 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
3625 3626 def outgoing(ui, repo, dest=None, **opts):
3626 3627 """show changesets not found in the destination
3627 3628
3628 3629 Show changesets not found in the specified destination repository
3629 3630 or the default push location. These are the changesets that would
3630 3631 be pushed if a push was requested.
3631 3632
3632 3633 See pull for details of valid destination formats.
3633 3634
3634 3635 .. container:: verbose
3635 3636
3636 3637 With -B/--bookmarks, the result of bookmark comparison between
3637 3638 local and remote repositories is displayed. With -v/--verbose,
3638 3639 status is also displayed for each bookmark like below::
3639 3640
3640 3641 BM1 01234567890a added
3641 3642 BM2 deleted
3642 3643 BM3 234567890abc advanced
3643 3644 BM4 34567890abcd diverged
3644 3645 BM5 4567890abcde changed
3645 3646
3646 3647 The action taken when pushing depends on the
3647 3648 status of each bookmark:
3648 3649
3649 3650 :``added``: push with ``-B`` will create it
3650 3651 :``deleted``: push with ``-B`` will delete it
3651 3652 :``advanced``: push will update it
3652 3653 :``diverged``: push with ``-B`` will update it
3653 3654 :``changed``: push with ``-B`` will update it
3654 3655
3655 3656 From the point of view of pushing behavior, bookmarks
3656 3657 existing only in the remote repository are treated as
3657 3658 ``deleted``, even if it is in fact added remotely.
3658 3659
3659 3660 Returns 0 if there are outgoing changes, 1 otherwise.
3660 3661 """
3661 3662 opts = pycompat.byteskwargs(opts)
3662 3663 if opts.get('graph'):
3663 3664 logcmdutil.checkunsupportedgraphflags([], opts)
3664 3665 o, other = hg._outgoing(ui, repo, dest, opts)
3665 3666 if not o:
3666 3667 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3667 3668 return
3668 3669
3669 3670 revdag = logcmdutil.graphrevs(repo, o, opts)
3670 3671 ui.pager('outgoing')
3671 3672 displayer = logcmdutil.changesetdisplayer(ui, repo, opts, buffered=True)
3672 3673 logcmdutil.displaygraph(ui, repo, revdag, displayer,
3673 3674 graphmod.asciiedges)
3674 3675 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3675 3676 return 0
3676 3677
3677 3678 if opts.get('bookmarks'):
3678 3679 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3679 3680 dest, branches = hg.parseurl(dest, opts.get('branch'))
3680 3681 other = hg.peer(repo, opts, dest)
3681 3682 if 'bookmarks' not in other.listkeys('namespaces'):
3682 3683 ui.warn(_("remote doesn't support bookmarks\n"))
3683 3684 return 0
3684 3685 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
3685 3686 ui.pager('outgoing')
3686 3687 return bookmarks.outgoing(ui, repo, other)
3687 3688
3688 3689 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
3689 3690 try:
3690 3691 return hg.outgoing(ui, repo, dest, opts)
3691 3692 finally:
3692 3693 del repo._subtoppath
3693 3694
3694 3695 @command('parents',
3695 3696 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
3696 3697 ] + templateopts,
3697 3698 _('[-r REV] [FILE]'),
3698 3699 inferrepo=True)
3699 3700 def parents(ui, repo, file_=None, **opts):
3700 3701 """show the parents of the working directory or revision (DEPRECATED)
3701 3702
3702 3703 Print the working directory's parent revisions. If a revision is
3703 3704 given via -r/--rev, the parent of that revision will be printed.
3704 3705 If a file argument is given, the revision in which the file was
3705 3706 last changed (before the working directory revision or the
3706 3707 argument to --rev if given) is printed.
3707 3708
3708 3709 This command is equivalent to::
3709 3710
3710 3711 hg log -r "p1()+p2()" or
3711 3712 hg log -r "p1(REV)+p2(REV)" or
3712 3713 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
3713 3714 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
3714 3715
3715 3716 See :hg:`summary` and :hg:`help revsets` for related information.
3716 3717
3717 3718 Returns 0 on success.
3718 3719 """
3719 3720
3720 3721 opts = pycompat.byteskwargs(opts)
3721 3722 rev = opts.get('rev')
3722 3723 if rev:
3723 3724 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
3724 3725 ctx = scmutil.revsingle(repo, rev, None)
3725 3726
3726 3727 if file_:
3727 3728 m = scmutil.match(ctx, (file_,), opts)
3728 3729 if m.anypats() or len(m.files()) != 1:
3729 3730 raise error.Abort(_('can only specify an explicit filename'))
3730 3731 file_ = m.files()[0]
3731 3732 filenodes = []
3732 3733 for cp in ctx.parents():
3733 3734 if not cp:
3734 3735 continue
3735 3736 try:
3736 3737 filenodes.append(cp.filenode(file_))
3737 3738 except error.LookupError:
3738 3739 pass
3739 3740 if not filenodes:
3740 3741 raise error.Abort(_("'%s' not found in manifest!") % file_)
3741 3742 p = []
3742 3743 for fn in filenodes:
3743 3744 fctx = repo.filectx(file_, fileid=fn)
3744 3745 p.append(fctx.node())
3745 3746 else:
3746 3747 p = [cp.node() for cp in ctx.parents()]
3747 3748
3748 3749 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
3749 3750 for n in p:
3750 3751 if n != nullid:
3751 3752 displayer.show(repo[n])
3752 3753 displayer.close()
3753 3754
3754 3755 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True,
3755 3756 cmdtype=readonly)
3756 3757 def paths(ui, repo, search=None, **opts):
3757 3758 """show aliases for remote repositories
3758 3759
3759 3760 Show definition of symbolic path name NAME. If no name is given,
3760 3761 show definition of all available names.
3761 3762
3762 3763 Option -q/--quiet suppresses all output when searching for NAME
3763 3764 and shows only the path names when listing all definitions.
3764 3765
3765 3766 Path names are defined in the [paths] section of your
3766 3767 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
3767 3768 repository, ``.hg/hgrc`` is used, too.
3768 3769
3769 3770 The path names ``default`` and ``default-push`` have a special
3770 3771 meaning. When performing a push or pull operation, they are used
3771 3772 as fallbacks if no location is specified on the command-line.
3772 3773 When ``default-push`` is set, it will be used for push and
3773 3774 ``default`` will be used for pull; otherwise ``default`` is used
3774 3775 as the fallback for both. When cloning a repository, the clone
3775 3776 source is written as ``default`` in ``.hg/hgrc``.
3776 3777
3777 3778 .. note::
3778 3779
3779 3780 ``default`` and ``default-push`` apply to all inbound (e.g.
3780 3781 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
3781 3782 and :hg:`bundle`) operations.
3782 3783
3783 3784 See :hg:`help urls` for more information.
3784 3785
3785 3786 Returns 0 on success.
3786 3787 """
3787 3788
3788 3789 opts = pycompat.byteskwargs(opts)
3789 3790 ui.pager('paths')
3790 3791 if search:
3791 3792 pathitems = [(name, path) for name, path in ui.paths.iteritems()
3792 3793 if name == search]
3793 3794 else:
3794 3795 pathitems = sorted(ui.paths.iteritems())
3795 3796
3796 3797 fm = ui.formatter('paths', opts)
3797 3798 if fm.isplain():
3798 3799 hidepassword = util.hidepassword
3799 3800 else:
3800 3801 hidepassword = bytes
3801 3802 if ui.quiet:
3802 3803 namefmt = '%s\n'
3803 3804 else:
3804 3805 namefmt = '%s = '
3805 3806 showsubopts = not search and not ui.quiet
3806 3807
3807 3808 for name, path in pathitems:
3808 3809 fm.startitem()
3809 3810 fm.condwrite(not search, 'name', namefmt, name)
3810 3811 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
3811 3812 for subopt, value in sorted(path.suboptions.items()):
3812 3813 assert subopt not in ('name', 'url')
3813 3814 if showsubopts:
3814 3815 fm.plain('%s:%s = ' % (name, subopt))
3815 3816 fm.condwrite(showsubopts, subopt, '%s\n', value)
3816 3817
3817 3818 fm.end()
3818 3819
3819 3820 if search and not pathitems:
3820 3821 if not ui.quiet:
3821 3822 ui.warn(_("not found!\n"))
3822 3823 return 1
3823 3824 else:
3824 3825 return 0
3825 3826
3826 3827 @command('phase',
3827 3828 [('p', 'public', False, _('set changeset phase to public')),
3828 3829 ('d', 'draft', False, _('set changeset phase to draft')),
3829 3830 ('s', 'secret', False, _('set changeset phase to secret')),
3830 3831 ('f', 'force', False, _('allow to move boundary backward')),
3831 3832 ('r', 'rev', [], _('target revision'), _('REV')),
3832 3833 ],
3833 3834 _('[-p|-d|-s] [-f] [-r] [REV...]'))
3834 3835 def phase(ui, repo, *revs, **opts):
3835 3836 """set or show the current phase name
3836 3837
3837 3838 With no argument, show the phase name of the current revision(s).
3838 3839
3839 3840 With one of -p/--public, -d/--draft or -s/--secret, change the
3840 3841 phase value of the specified revisions.
3841 3842
3842 3843 Unless -f/--force is specified, :hg:`phase` won't move changesets from a
3843 3844 lower phase to a higher phase. Phases are ordered as follows::
3844 3845
3845 3846 public < draft < secret
3846 3847
3847 3848 Returns 0 on success, 1 if some phases could not be changed.
3848 3849
3849 3850 (For more information about the phases concept, see :hg:`help phases`.)
3850 3851 """
3851 3852 opts = pycompat.byteskwargs(opts)
3852 3853 # search for a unique phase argument
3853 3854 targetphase = None
3854 3855 for idx, name in enumerate(phases.phasenames):
3855 3856 if opts[name]:
3856 3857 if targetphase is not None:
3857 3858 raise error.Abort(_('only one phase can be specified'))
3858 3859 targetphase = idx
3859 3860
3860 3861 # look for specified revision
3861 3862 revs = list(revs)
3862 3863 revs.extend(opts['rev'])
3863 3864 if not revs:
3864 3865 # display both parents as the second parent phase can influence
3865 3866 # the phase of a merge commit
3866 3867 revs = [c.rev() for c in repo[None].parents()]
3867 3868
3868 3869 revs = scmutil.revrange(repo, revs)
3869 3870
3870 3871 ret = 0
3871 3872 if targetphase is None:
3872 3873 # display
3873 3874 for r in revs:
3874 3875 ctx = repo[r]
3875 3876 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
3876 3877 else:
3877 3878 with repo.lock(), repo.transaction("phase") as tr:
3878 3879 # set phase
3879 3880 if not revs:
3880 3881 raise error.Abort(_('empty revision set'))
3881 3882 nodes = [repo[r].node() for r in revs]
3882 3883 # moving revision from public to draft may hide them
3883 3884 # We have to check result on an unfiltered repository
3884 3885 unfi = repo.unfiltered()
3885 3886 getphase = unfi._phasecache.phase
3886 3887 olddata = [getphase(unfi, r) for r in unfi]
3887 3888 phases.advanceboundary(repo, tr, targetphase, nodes)
3888 3889 if opts['force']:
3889 3890 phases.retractboundary(repo, tr, targetphase, nodes)
3890 3891 getphase = unfi._phasecache.phase
3891 3892 newdata = [getphase(unfi, r) for r in unfi]
3892 3893 changes = sum(newdata[r] != olddata[r] for r in unfi)
3893 3894 cl = unfi.changelog
3894 3895 rejected = [n for n in nodes
3895 3896 if newdata[cl.rev(n)] < targetphase]
3896 3897 if rejected:
3897 3898 ui.warn(_('cannot move %i changesets to a higher '
3898 3899 'phase, use --force\n') % len(rejected))
3899 3900 ret = 1
3900 3901 if changes:
3901 3902 msg = _('phase changed for %i changesets\n') % changes
3902 3903 if ret:
3903 3904 ui.status(msg)
3904 3905 else:
3905 3906 ui.note(msg)
3906 3907 else:
3907 3908 ui.warn(_('no phases changed\n'))
3908 3909 return ret
3909 3910
3910 3911 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
3911 3912 """Run after a changegroup has been added via pull/unbundle
3912 3913
3913 3914 This takes arguments below:
3914 3915
3915 3916 :modheads: change of heads by pull/unbundle
3916 3917 :optupdate: updating working directory is needed or not
3917 3918 :checkout: update destination revision (or None to default destination)
3918 3919 :brev: a name, which might be a bookmark to be activated after updating
3919 3920 """
3920 3921 if modheads == 0:
3921 3922 return
3922 3923 if optupdate:
3923 3924 try:
3924 3925 return hg.updatetotally(ui, repo, checkout, brev)
3925 3926 except error.UpdateAbort as inst:
3926 3927 msg = _("not updating: %s") % stringutil.forcebytestr(inst)
3927 3928 hint = inst.hint
3928 3929 raise error.UpdateAbort(msg, hint=hint)
3929 3930 if modheads > 1:
3930 3931 currentbranchheads = len(repo.branchheads())
3931 3932 if currentbranchheads == modheads:
3932 3933 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3933 3934 elif currentbranchheads > 1:
3934 3935 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
3935 3936 "merge)\n"))
3936 3937 else:
3937 3938 ui.status(_("(run 'hg heads' to see heads)\n"))
3938 3939 elif not ui.configbool('commands', 'update.requiredest'):
3939 3940 ui.status(_("(run 'hg update' to get a working copy)\n"))
3940 3941
3941 3942 @command('^pull',
3942 3943 [('u', 'update', None,
3943 3944 _('update to new branch head if new descendants were pulled')),
3944 3945 ('f', 'force', None, _('run even when remote repository is unrelated')),
3945 3946 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3946 3947 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
3947 3948 ('b', 'branch', [], _('a specific branch you would like to pull'),
3948 3949 _('BRANCH')),
3949 3950 ] + remoteopts,
3950 3951 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
3951 3952 def pull(ui, repo, source="default", **opts):
3952 3953 """pull changes from the specified source
3953 3954
3954 3955 Pull changes from a remote repository to a local one.
3955 3956
3956 3957 This finds all changes from the repository at the specified path
3957 3958 or URL and adds them to a local repository (the current one unless
3958 3959 -R is specified). By default, this does not update the copy of the
3959 3960 project in the working directory.
3960 3961
3961 3962 Use :hg:`incoming` if you want to see what would have been added
3962 3963 by a pull at the time you issued this command. If you then decide
3963 3964 to add those changes to the repository, you should use :hg:`pull
3964 3965 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3965 3966
3966 3967 If SOURCE is omitted, the 'default' path will be used.
3967 3968 See :hg:`help urls` for more information.
3968 3969
3969 3970 Specifying bookmark as ``.`` is equivalent to specifying the active
3970 3971 bookmark's name.
3971 3972
3972 3973 Returns 0 on success, 1 if an update had unresolved files.
3973 3974 """
3974 3975
3975 3976 opts = pycompat.byteskwargs(opts)
3976 3977 if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
3977 3978 msg = _('update destination required by configuration')
3978 3979 hint = _('use hg pull followed by hg update DEST')
3979 3980 raise error.Abort(msg, hint=hint)
3980 3981
3981 3982 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3982 3983 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3983 3984 other = hg.peer(repo, opts, source)
3984 3985 try:
3985 3986 revs, checkout = hg.addbranchrevs(repo, other, branches,
3986 3987 opts.get('rev'))
3987 3988
3988 3989
3989 3990 pullopargs = {}
3990 3991 if opts.get('bookmark'):
3991 3992 if not revs:
3992 3993 revs = []
3993 3994 # The list of bookmark used here is not the one used to actually
3994 3995 # update the bookmark name. This can result in the revision pulled
3995 3996 # not ending up with the name of the bookmark because of a race
3996 3997 # condition on the server. (See issue 4689 for details)
3997 3998 remotebookmarks = other.listkeys('bookmarks')
3998 3999 remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks)
3999 4000 pullopargs['remotebookmarks'] = remotebookmarks
4000 4001 for b in opts['bookmark']:
4001 4002 b = repo._bookmarks.expandname(b)
4002 4003 if b not in remotebookmarks:
4003 4004 raise error.Abort(_('remote bookmark %s not found!') % b)
4004 4005 revs.append(hex(remotebookmarks[b]))
4005 4006
4006 4007 if revs:
4007 4008 try:
4008 4009 # When 'rev' is a bookmark name, we cannot guarantee that it
4009 4010 # will be updated with that name because of a race condition
4010 4011 # server side. (See issue 4689 for details)
4011 4012 oldrevs = revs
4012 4013 revs = [] # actually, nodes
4013 4014 for r in oldrevs:
4014 4015 node = other.lookup(r)
4015 4016 revs.append(node)
4016 4017 if r == checkout:
4017 4018 checkout = node
4018 4019 except error.CapabilityError:
4019 4020 err = _("other repository doesn't support revision lookup, "
4020 4021 "so a rev cannot be specified.")
4021 4022 raise error.Abort(err)
4022 4023
4023 4024 wlock = util.nullcontextmanager()
4024 4025 if opts.get('update'):
4025 4026 wlock = repo.wlock()
4026 4027 with wlock:
4027 4028 pullopargs.update(opts.get('opargs', {}))
4028 4029 modheads = exchange.pull(repo, other, heads=revs,
4029 4030 force=opts.get('force'),
4030 4031 bookmarks=opts.get('bookmark', ()),
4031 4032 opargs=pullopargs).cgresult
4032 4033
4033 4034 # brev is a name, which might be a bookmark to be activated at
4034 4035 # the end of the update. In other words, it is an explicit
4035 4036 # destination of the update
4036 4037 brev = None
4037 4038
4038 4039 if checkout:
4039 4040 checkout = "%d" % repo.changelog.rev(checkout)
4040 4041
4041 4042 # order below depends on implementation of
4042 4043 # hg.addbranchrevs(). opts['bookmark'] is ignored,
4043 4044 # because 'checkout' is determined without it.
4044 4045 if opts.get('rev'):
4045 4046 brev = opts['rev'][0]
4046 4047 elif opts.get('branch'):
4047 4048 brev = opts['branch'][0]
4048 4049 else:
4049 4050 brev = branches[0]
4050 4051 repo._subtoppath = source
4051 4052 try:
4052 4053 ret = postincoming(ui, repo, modheads, opts.get('update'),
4053 4054 checkout, brev)
4054 4055
4055 4056 finally:
4056 4057 del repo._subtoppath
4057 4058
4058 4059 finally:
4059 4060 other.close()
4060 4061 return ret
4061 4062
4062 4063 @command('^push',
4063 4064 [('f', 'force', None, _('force push')),
4064 4065 ('r', 'rev', [],
4065 4066 _('a changeset intended to be included in the destination'),
4066 4067 _('REV')),
4067 4068 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4068 4069 ('b', 'branch', [],
4069 4070 _('a specific branch you would like to push'), _('BRANCH')),
4070 4071 ('', 'new-branch', False, _('allow pushing a new branch')),
4071 4072 ('', 'pushvars', [], _('variables that can be sent to server (ADVANCED)')),
4072 4073 ] + remoteopts,
4073 4074 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4074 4075 def push(ui, repo, dest=None, **opts):
4075 4076 """push changes to the specified destination
4076 4077
4077 4078 Push changesets from the local repository to the specified
4078 4079 destination.
4079 4080
4080 4081 This operation is symmetrical to pull: it is identical to a pull
4081 4082 in the destination repository from the current one.
4082 4083
4083 4084 By default, push will not allow creation of new heads at the
4084 4085 destination, since multiple heads would make it unclear which head
4085 4086 to use. In this situation, it is recommended to pull and merge
4086 4087 before pushing.
4087 4088
4088 4089 Use --new-branch if you want to allow push to create a new named
4089 4090 branch that is not present at the destination. This allows you to
4090 4091 only create a new branch without forcing other changes.
4091 4092
4092 4093 .. note::
4093 4094
4094 4095 Extra care should be taken with the -f/--force option,
4095 4096 which will push all new heads on all branches, an action which will
4096 4097 almost always cause confusion for collaborators.
4097 4098
4098 4099 If -r/--rev is used, the specified revision and all its ancestors
4099 4100 will be pushed to the remote repository.
4100 4101
4101 4102 If -B/--bookmark is used, the specified bookmarked revision, its
4102 4103 ancestors, and the bookmark will be pushed to the remote
4103 4104 repository. Specifying ``.`` is equivalent to specifying the active
4104 4105 bookmark's name.
4105 4106
4106 4107 Please see :hg:`help urls` for important details about ``ssh://``
4107 4108 URLs. If DESTINATION is omitted, a default path will be used.
4108 4109
4109 4110 .. container:: verbose
4110 4111
4111 4112 The --pushvars option sends strings to the server that become
4112 4113 environment variables prepended with ``HG_USERVAR_``. For example,
4113 4114 ``--pushvars ENABLE_FEATURE=true``, provides the server side hooks with
4114 4115 ``HG_USERVAR_ENABLE_FEATURE=true`` as part of their environment.
4115 4116
4116 4117 pushvars can provide for user-overridable hooks as well as set debug
4117 4118 levels. One example is having a hook that blocks commits containing
4118 4119 conflict markers, but enables the user to override the hook if the file
4119 4120 is using conflict markers for testing purposes or the file format has
4120 4121 strings that look like conflict markers.
4121 4122
4122 4123 By default, servers will ignore `--pushvars`. To enable it add the
4123 4124 following to your configuration file::
4124 4125
4125 4126 [push]
4126 4127 pushvars.server = true
4127 4128
4128 4129 Returns 0 if push was successful, 1 if nothing to push.
4129 4130 """
4130 4131
4131 4132 opts = pycompat.byteskwargs(opts)
4132 4133 if opts.get('bookmark'):
4133 4134 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4134 4135 for b in opts['bookmark']:
4135 4136 # translate -B options to -r so changesets get pushed
4136 4137 b = repo._bookmarks.expandname(b)
4137 4138 if b in repo._bookmarks:
4138 4139 opts.setdefault('rev', []).append(b)
4139 4140 else:
4140 4141 # if we try to push a deleted bookmark, translate it to null
4141 4142 # this lets simultaneous -r, -b options continue working
4142 4143 opts.setdefault('rev', []).append("null")
4143 4144
4144 4145 path = ui.paths.getpath(dest, default=('default-push', 'default'))
4145 4146 if not path:
4146 4147 raise error.Abort(_('default repository not configured!'),
4147 4148 hint=_("see 'hg help config.paths'"))
4148 4149 dest = path.pushloc or path.loc
4149 4150 branches = (path.branch, opts.get('branch') or [])
4150 4151 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4151 4152 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4152 4153 other = hg.peer(repo, opts, dest)
4153 4154
4154 4155 if revs:
4155 4156 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4156 4157 if not revs:
4157 4158 raise error.Abort(_("specified revisions evaluate to an empty set"),
4158 4159 hint=_("use different revision arguments"))
4159 4160 elif path.pushrev:
4160 4161 # It doesn't make any sense to specify ancestor revisions. So limit
4161 4162 # to DAG heads to make discovery simpler.
4162 4163 expr = revsetlang.formatspec('heads(%r)', path.pushrev)
4163 4164 revs = scmutil.revrange(repo, [expr])
4164 4165 revs = [repo[rev].node() for rev in revs]
4165 4166 if not revs:
4166 4167 raise error.Abort(_('default push revset for path evaluates to an '
4167 4168 'empty set'))
4168 4169
4169 4170 repo._subtoppath = dest
4170 4171 try:
4171 4172 # push subrepos depth-first for coherent ordering
4172 4173 c = repo['.']
4173 4174 subs = c.substate # only repos that are committed
4174 4175 for s in sorted(subs):
4175 4176 result = c.sub(s).push(opts)
4176 4177 if result == 0:
4177 4178 return not result
4178 4179 finally:
4179 4180 del repo._subtoppath
4180 4181
4181 4182 opargs = dict(opts.get('opargs', {})) # copy opargs since we may mutate it
4182 4183 opargs.setdefault('pushvars', []).extend(opts.get('pushvars', []))
4183 4184
4184 4185 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
4185 4186 newbranch=opts.get('new_branch'),
4186 4187 bookmarks=opts.get('bookmark', ()),
4187 4188 opargs=opargs)
4188 4189
4189 4190 result = not pushop.cgresult
4190 4191
4191 4192 if pushop.bkresult is not None:
4192 4193 if pushop.bkresult == 2:
4193 4194 result = 2
4194 4195 elif not result and pushop.bkresult:
4195 4196 result = 2
4196 4197
4197 4198 return result
4198 4199
4199 4200 @command('recover', [])
4200 4201 def recover(ui, repo):
4201 4202 """roll back an interrupted transaction
4202 4203
4203 4204 Recover from an interrupted commit or pull.
4204 4205
4205 4206 This command tries to fix the repository status after an
4206 4207 interrupted operation. It should only be necessary when Mercurial
4207 4208 suggests it.
4208 4209
4209 4210 Returns 0 if successful, 1 if nothing to recover or verify fails.
4210 4211 """
4211 4212 if repo.recover():
4212 4213 return hg.verify(repo)
4213 4214 return 1
4214 4215
4215 4216 @command('^remove|rm',
4216 4217 [('A', 'after', None, _('record delete for missing files')),
4217 4218 ('f', 'force', None,
4218 4219 _('forget added files, delete modified files')),
4219 4220 ] + subrepoopts + walkopts + dryrunopts,
4220 4221 _('[OPTION]... FILE...'),
4221 4222 inferrepo=True)
4222 4223 def remove(ui, repo, *pats, **opts):
4223 4224 """remove the specified files on the next commit
4224 4225
4225 4226 Schedule the indicated files for removal from the current branch.
4226 4227
4227 4228 This command schedules the files to be removed at the next commit.
4228 4229 To undo a remove before that, see :hg:`revert`. To undo added
4229 4230 files, see :hg:`forget`.
4230 4231
4231 4232 .. container:: verbose
4232 4233
4233 4234 -A/--after can be used to remove only files that have already
4234 4235 been deleted, -f/--force can be used to force deletion, and -Af
4235 4236 can be used to remove files from the next revision without
4236 4237 deleting them from the working directory.
4237 4238
4238 4239 The following table details the behavior of remove for different
4239 4240 file states (columns) and option combinations (rows). The file
4240 4241 states are Added [A], Clean [C], Modified [M] and Missing [!]
4241 4242 (as reported by :hg:`status`). The actions are Warn, Remove
4242 4243 (from branch) and Delete (from disk):
4243 4244
4244 4245 ========= == == == ==
4245 4246 opt/state A C M !
4246 4247 ========= == == == ==
4247 4248 none W RD W R
4248 4249 -f R RD RD R
4249 4250 -A W W W R
4250 4251 -Af R R R R
4251 4252 ========= == == == ==
4252 4253
4253 4254 .. note::
4254 4255
4255 4256 :hg:`remove` never deletes files in Added [A] state from the
4256 4257 working directory, not even if ``--force`` is specified.
4257 4258
4258 4259 Returns 0 on success, 1 if any warnings encountered.
4259 4260 """
4260 4261
4261 4262 opts = pycompat.byteskwargs(opts)
4262 4263 after, force = opts.get('after'), opts.get('force')
4263 4264 dryrun = opts.get('dry_run')
4264 4265 if not pats and not after:
4265 4266 raise error.Abort(_('no files specified'))
4266 4267
4267 4268 m = scmutil.match(repo[None], pats, opts)
4268 4269 subrepos = opts.get('subrepos')
4269 4270 return cmdutil.remove(ui, repo, m, "", after, force, subrepos,
4270 4271 dryrun=dryrun)
4271 4272
4272 4273 @command('rename|move|mv',
4273 4274 [('A', 'after', None, _('record a rename that has already occurred')),
4274 4275 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4275 4276 ] + walkopts + dryrunopts,
4276 4277 _('[OPTION]... SOURCE... DEST'))
4277 4278 def rename(ui, repo, *pats, **opts):
4278 4279 """rename files; equivalent of copy + remove
4279 4280
4280 4281 Mark dest as copies of sources; mark sources for deletion. If dest
4281 4282 is a directory, copies are put in that directory. If dest is a
4282 4283 file, there can only be one source.
4283 4284
4284 4285 By default, this command copies the contents of files as they
4285 4286 exist in the working directory. If invoked with -A/--after, the
4286 4287 operation is recorded, but no copying is performed.
4287 4288
4288 4289 This command takes effect at the next commit. To undo a rename
4289 4290 before that, see :hg:`revert`.
4290 4291
4291 4292 Returns 0 on success, 1 if errors are encountered.
4292 4293 """
4293 4294 opts = pycompat.byteskwargs(opts)
4294 4295 with repo.wlock(False):
4295 4296 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4296 4297
4297 4298 @command('resolve',
4298 4299 [('a', 'all', None, _('select all unresolved files')),
4299 4300 ('l', 'list', None, _('list state of files needing merge')),
4300 4301 ('m', 'mark', None, _('mark files as resolved')),
4301 4302 ('u', 'unmark', None, _('mark files as unresolved')),
4302 4303 ('n', 'no-status', None, _('hide status prefix'))]
4303 4304 + mergetoolopts + walkopts + formatteropts,
4304 4305 _('[OPTION]... [FILE]...'),
4305 4306 inferrepo=True)
4306 4307 def resolve(ui, repo, *pats, **opts):
4307 4308 """redo merges or set/view the merge status of files
4308 4309
4309 4310 Merges with unresolved conflicts are often the result of
4310 4311 non-interactive merging using the ``internal:merge`` configuration
4311 4312 setting, or a command-line merge tool like ``diff3``. The resolve
4312 4313 command is used to manage the files involved in a merge, after
4313 4314 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4314 4315 working directory must have two parents). See :hg:`help
4315 4316 merge-tools` for information on configuring merge tools.
4316 4317
4317 4318 The resolve command can be used in the following ways:
4318 4319
4319 4320 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4320 4321 files, discarding any previous merge attempts. Re-merging is not
4321 4322 performed for files already marked as resolved. Use ``--all/-a``
4322 4323 to select all unresolved files. ``--tool`` can be used to specify
4323 4324 the merge tool used for the given files. It overrides the HGMERGE
4324 4325 environment variable and your configuration files. Previous file
4325 4326 contents are saved with a ``.orig`` suffix.
4326 4327
4327 4328 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4328 4329 (e.g. after having manually fixed-up the files). The default is
4329 4330 to mark all unresolved files.
4330 4331
4331 4332 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4332 4333 default is to mark all resolved files.
4333 4334
4334 4335 - :hg:`resolve -l`: list files which had or still have conflicts.
4335 4336 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4336 4337 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
4337 4338 the list. See :hg:`help filesets` for details.
4338 4339
4339 4340 .. note::
4340 4341
4341 4342 Mercurial will not let you commit files with unresolved merge
4342 4343 conflicts. You must use :hg:`resolve -m ...` before you can
4343 4344 commit after a conflicting merge.
4344 4345
4345 4346 Returns 0 on success, 1 if any files fail a resolve attempt.
4346 4347 """
4347 4348
4348 4349 opts = pycompat.byteskwargs(opts)
4349 4350 flaglist = 'all mark unmark list no_status'.split()
4350 4351 all, mark, unmark, show, nostatus = \
4351 4352 [opts.get(o) for o in flaglist]
4352 4353
4353 4354 if (show and (mark or unmark)) or (mark and unmark):
4354 4355 raise error.Abort(_("too many options specified"))
4355 4356 if pats and all:
4356 4357 raise error.Abort(_("can't specify --all and patterns"))
4357 4358 if not (all or pats or show or mark or unmark):
4358 4359 raise error.Abort(_('no files or directories specified'),
4359 4360 hint=('use --all to re-merge all unresolved files'))
4360 4361
4361 4362 if show:
4362 4363 ui.pager('resolve')
4363 4364 fm = ui.formatter('resolve', opts)
4364 4365 ms = mergemod.mergestate.read(repo)
4365 4366 m = scmutil.match(repo[None], pats, opts)
4366 4367
4367 4368 # Labels and keys based on merge state. Unresolved path conflicts show
4368 4369 # as 'P'. Resolved path conflicts show as 'R', the same as normal
4369 4370 # resolved conflicts.
4370 4371 mergestateinfo = {
4371 4372 mergemod.MERGE_RECORD_UNRESOLVED: ('resolve.unresolved', 'U'),
4372 4373 mergemod.MERGE_RECORD_RESOLVED: ('resolve.resolved', 'R'),
4373 4374 mergemod.MERGE_RECORD_UNRESOLVED_PATH: ('resolve.unresolved', 'P'),
4374 4375 mergemod.MERGE_RECORD_RESOLVED_PATH: ('resolve.resolved', 'R'),
4375 4376 mergemod.MERGE_RECORD_DRIVER_RESOLVED: ('resolve.driverresolved',
4376 4377 'D'),
4377 4378 }
4378 4379
4379 4380 for f in ms:
4380 4381 if not m(f):
4381 4382 continue
4382 4383
4383 4384 label, key = mergestateinfo[ms[f]]
4384 4385 fm.startitem()
4385 4386 fm.condwrite(not nostatus, 'status', '%s ', key, label=label)
4386 4387 fm.write('path', '%s\n', f, label=label)
4387 4388 fm.end()
4388 4389 return 0
4389 4390
4390 4391 with repo.wlock():
4391 4392 ms = mergemod.mergestate.read(repo)
4392 4393
4393 4394 if not (ms.active() or repo.dirstate.p2() != nullid):
4394 4395 raise error.Abort(
4395 4396 _('resolve command not applicable when not merging'))
4396 4397
4397 4398 wctx = repo[None]
4398 4399
4399 4400 if (ms.mergedriver
4400 4401 and ms.mdstate() == mergemod.MERGE_DRIVER_STATE_UNMARKED):
4401 4402 proceed = mergemod.driverpreprocess(repo, ms, wctx)
4402 4403 ms.commit()
4403 4404 # allow mark and unmark to go through
4404 4405 if not mark and not unmark and not proceed:
4405 4406 return 1
4406 4407
4407 4408 m = scmutil.match(wctx, pats, opts)
4408 4409 ret = 0
4409 4410 didwork = False
4410 4411 runconclude = False
4411 4412
4412 4413 tocomplete = []
4413 4414 for f in ms:
4414 4415 if not m(f):
4415 4416 continue
4416 4417
4417 4418 didwork = True
4418 4419
4419 4420 # don't let driver-resolved files be marked, and run the conclude
4420 4421 # step if asked to resolve
4421 4422 if ms[f] == mergemod.MERGE_RECORD_DRIVER_RESOLVED:
4422 4423 exact = m.exact(f)
4423 4424 if mark:
4424 4425 if exact:
4425 4426 ui.warn(_('not marking %s as it is driver-resolved\n')
4426 4427 % f)
4427 4428 elif unmark:
4428 4429 if exact:
4429 4430 ui.warn(_('not unmarking %s as it is driver-resolved\n')
4430 4431 % f)
4431 4432 else:
4432 4433 runconclude = True
4433 4434 continue
4434 4435
4435 4436 # path conflicts must be resolved manually
4436 4437 if ms[f] in (mergemod.MERGE_RECORD_UNRESOLVED_PATH,
4437 4438 mergemod.MERGE_RECORD_RESOLVED_PATH):
4438 4439 if mark:
4439 4440 ms.mark(f, mergemod.MERGE_RECORD_RESOLVED_PATH)
4440 4441 elif unmark:
4441 4442 ms.mark(f, mergemod.MERGE_RECORD_UNRESOLVED_PATH)
4442 4443 elif ms[f] == mergemod.MERGE_RECORD_UNRESOLVED_PATH:
4443 4444 ui.warn(_('%s: path conflict must be resolved manually\n')
4444 4445 % f)
4445 4446 continue
4446 4447
4447 4448 if mark:
4448 4449 ms.mark(f, mergemod.MERGE_RECORD_RESOLVED)
4449 4450 elif unmark:
4450 4451 ms.mark(f, mergemod.MERGE_RECORD_UNRESOLVED)
4451 4452 else:
4452 4453 # backup pre-resolve (merge uses .orig for its own purposes)
4453 4454 a = repo.wjoin(f)
4454 4455 try:
4455 4456 util.copyfile(a, a + ".resolve")
4456 4457 except (IOError, OSError) as inst:
4457 4458 if inst.errno != errno.ENOENT:
4458 4459 raise
4459 4460
4460 4461 try:
4461 4462 # preresolve file
4462 4463 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4463 4464 'resolve')
4464 4465 complete, r = ms.preresolve(f, wctx)
4465 4466 if not complete:
4466 4467 tocomplete.append(f)
4467 4468 elif r:
4468 4469 ret = 1
4469 4470 finally:
4470 4471 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4471 4472 ms.commit()
4472 4473
4473 4474 # replace filemerge's .orig file with our resolve file, but only
4474 4475 # for merges that are complete
4475 4476 if complete:
4476 4477 try:
4477 4478 util.rename(a + ".resolve",
4478 4479 scmutil.origpath(ui, repo, a))
4479 4480 except OSError as inst:
4480 4481 if inst.errno != errno.ENOENT:
4481 4482 raise
4482 4483
4483 4484 for f in tocomplete:
4484 4485 try:
4485 4486 # resolve file
4486 4487 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4487 4488 'resolve')
4488 4489 r = ms.resolve(f, wctx)
4489 4490 if r:
4490 4491 ret = 1
4491 4492 finally:
4492 4493 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4493 4494 ms.commit()
4494 4495
4495 4496 # replace filemerge's .orig file with our resolve file
4496 4497 a = repo.wjoin(f)
4497 4498 try:
4498 4499 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
4499 4500 except OSError as inst:
4500 4501 if inst.errno != errno.ENOENT:
4501 4502 raise
4502 4503
4503 4504 ms.commit()
4504 4505 ms.recordactions()
4505 4506
4506 4507 if not didwork and pats:
4507 4508 hint = None
4508 4509 if not any([p for p in pats if p.find(':') >= 0]):
4509 4510 pats = ['path:%s' % p for p in pats]
4510 4511 m = scmutil.match(wctx, pats, opts)
4511 4512 for f in ms:
4512 4513 if not m(f):
4513 4514 continue
4514 4515 flags = ''.join(['-%s ' % o[0:1] for o in flaglist
4515 4516 if opts.get(o)])
4516 4517 hint = _("(try: hg resolve %s%s)\n") % (
4517 4518 flags,
4518 4519 ' '.join(pats))
4519 4520 break
4520 4521 ui.warn(_("arguments do not match paths that need resolving\n"))
4521 4522 if hint:
4522 4523 ui.warn(hint)
4523 4524 elif ms.mergedriver and ms.mdstate() != 's':
4524 4525 # run conclude step when either a driver-resolved file is requested
4525 4526 # or there are no driver-resolved files
4526 4527 # we can't use 'ret' to determine whether any files are unresolved
4527 4528 # because we might not have tried to resolve some
4528 4529 if ((runconclude or not list(ms.driverresolved()))
4529 4530 and not list(ms.unresolved())):
4530 4531 proceed = mergemod.driverconclude(repo, ms, wctx)
4531 4532 ms.commit()
4532 4533 if not proceed:
4533 4534 return 1
4534 4535
4535 4536 # Nudge users into finishing an unfinished operation
4536 4537 unresolvedf = list(ms.unresolved())
4537 4538 driverresolvedf = list(ms.driverresolved())
4538 4539 if not unresolvedf and not driverresolvedf:
4539 4540 ui.status(_('(no more unresolved files)\n'))
4540 4541 cmdutil.checkafterresolved(repo)
4541 4542 elif not unresolvedf:
4542 4543 ui.status(_('(no more unresolved files -- '
4543 4544 'run "hg resolve --all" to conclude)\n'))
4544 4545
4545 4546 return ret
4546 4547
4547 4548 @command('revert',
4548 4549 [('a', 'all', None, _('revert all changes when no arguments given')),
4549 4550 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4550 4551 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4551 4552 ('C', 'no-backup', None, _('do not save backup copies of files')),
4552 4553 ('i', 'interactive', None, _('interactively select the changes')),
4553 4554 ] + walkopts + dryrunopts,
4554 4555 _('[OPTION]... [-r REV] [NAME]...'))
4555 4556 def revert(ui, repo, *pats, **opts):
4556 4557 """restore files to their checkout state
4557 4558
4558 4559 .. note::
4559 4560
4560 4561 To check out earlier revisions, you should use :hg:`update REV`.
4561 4562 To cancel an uncommitted merge (and lose your changes),
4562 4563 use :hg:`merge --abort`.
4563 4564
4564 4565 With no revision specified, revert the specified files or directories
4565 4566 to the contents they had in the parent of the working directory.
4566 4567 This restores the contents of files to an unmodified
4567 4568 state and unschedules adds, removes, copies, and renames. If the
4568 4569 working directory has two parents, you must explicitly specify a
4569 4570 revision.
4570 4571
4571 4572 Using the -r/--rev or -d/--date options, revert the given files or
4572 4573 directories to their states as of a specific revision. Because
4573 4574 revert does not change the working directory parents, this will
4574 4575 cause these files to appear modified. This can be helpful to "back
4575 4576 out" some or all of an earlier change. See :hg:`backout` for a
4576 4577 related method.
4577 4578
4578 4579 Modified files are saved with a .orig suffix before reverting.
4579 4580 To disable these backups, use --no-backup. It is possible to store
4580 4581 the backup files in a custom directory relative to the root of the
4581 4582 repository by setting the ``ui.origbackuppath`` configuration
4582 4583 option.
4583 4584
4584 4585 See :hg:`help dates` for a list of formats valid for -d/--date.
4585 4586
4586 4587 See :hg:`help backout` for a way to reverse the effect of an
4587 4588 earlier changeset.
4588 4589
4589 4590 Returns 0 on success.
4590 4591 """
4591 4592
4592 4593 opts = pycompat.byteskwargs(opts)
4593 4594 if opts.get("date"):
4594 4595 if opts.get("rev"):
4595 4596 raise error.Abort(_("you can't specify a revision and a date"))
4596 4597 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4597 4598
4598 4599 parent, p2 = repo.dirstate.parents()
4599 4600 if not opts.get('rev') and p2 != nullid:
4600 4601 # revert after merge is a trap for new users (issue2915)
4601 4602 raise error.Abort(_('uncommitted merge with no revision specified'),
4602 4603 hint=_("use 'hg update' or see 'hg help revert'"))
4603 4604
4604 4605 rev = opts.get('rev')
4605 4606 if rev:
4606 4607 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
4607 4608 ctx = scmutil.revsingle(repo, rev)
4608 4609
4609 4610 if (not (pats or opts.get('include') or opts.get('exclude') or
4610 4611 opts.get('all') or opts.get('interactive'))):
4611 4612 msg = _("no files or directories specified")
4612 4613 if p2 != nullid:
4613 4614 hint = _("uncommitted merge, use --all to discard all changes,"
4614 4615 " or 'hg update -C .' to abort the merge")
4615 4616 raise error.Abort(msg, hint=hint)
4616 4617 dirty = any(repo.status())
4617 4618 node = ctx.node()
4618 4619 if node != parent:
4619 4620 if dirty:
4620 4621 hint = _("uncommitted changes, use --all to discard all"
4621 4622 " changes, or 'hg update %s' to update") % ctx.rev()
4622 4623 else:
4623 4624 hint = _("use --all to revert all files,"
4624 4625 " or 'hg update %s' to update") % ctx.rev()
4625 4626 elif dirty:
4626 4627 hint = _("uncommitted changes, use --all to discard all changes")
4627 4628 else:
4628 4629 hint = _("use --all to revert all files")
4629 4630 raise error.Abort(msg, hint=hint)
4630 4631
4631 4632 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats,
4632 4633 **pycompat.strkwargs(opts))
4633 4634
4634 4635 @command('rollback', dryrunopts +
4635 4636 [('f', 'force', False, _('ignore safety measures'))])
4636 4637 def rollback(ui, repo, **opts):
4637 4638 """roll back the last transaction (DANGEROUS) (DEPRECATED)
4638 4639
4639 4640 Please use :hg:`commit --amend` instead of rollback to correct
4640 4641 mistakes in the last commit.
4641 4642
4642 4643 This command should be used with care. There is only one level of
4643 4644 rollback, and there is no way to undo a rollback. It will also
4644 4645 restore the dirstate at the time of the last transaction, losing
4645 4646 any dirstate changes since that time. This command does not alter
4646 4647 the working directory.
4647 4648
4648 4649 Transactions are used to encapsulate the effects of all commands
4649 4650 that create new changesets or propagate existing changesets into a
4650 4651 repository.
4651 4652
4652 4653 .. container:: verbose
4653 4654
4654 4655 For example, the following commands are transactional, and their
4655 4656 effects can be rolled back:
4656 4657
4657 4658 - commit
4658 4659 - import
4659 4660 - pull
4660 4661 - push (with this repository as the destination)
4661 4662 - unbundle
4662 4663
4663 4664 To avoid permanent data loss, rollback will refuse to rollback a
4664 4665 commit transaction if it isn't checked out. Use --force to
4665 4666 override this protection.
4666 4667
4667 4668 The rollback command can be entirely disabled by setting the
4668 4669 ``ui.rollback`` configuration setting to false. If you're here
4669 4670 because you want to use rollback and it's disabled, you can
4670 4671 re-enable the command by setting ``ui.rollback`` to true.
4671 4672
4672 4673 This command is not intended for use on public repositories. Once
4673 4674 changes are visible for pull by other users, rolling a transaction
4674 4675 back locally is ineffective (someone else may already have pulled
4675 4676 the changes). Furthermore, a race is possible with readers of the
4676 4677 repository; for example an in-progress pull from the repository
4677 4678 may fail if a rollback is performed.
4678 4679
4679 4680 Returns 0 on success, 1 if no rollback data is available.
4680 4681 """
4681 4682 if not ui.configbool('ui', 'rollback'):
4682 4683 raise error.Abort(_('rollback is disabled because it is unsafe'),
4683 4684 hint=('see `hg help -v rollback` for information'))
4684 4685 return repo.rollback(dryrun=opts.get(r'dry_run'),
4685 4686 force=opts.get(r'force'))
4686 4687
4687 4688 @command('root', [], cmdtype=readonly)
4688 4689 def root(ui, repo):
4689 4690 """print the root (top) of the current working directory
4690 4691
4691 4692 Print the root directory of the current repository.
4692 4693
4693 4694 Returns 0 on success.
4694 4695 """
4695 4696 ui.write(repo.root + "\n")
4696 4697
4697 4698 @command('^serve',
4698 4699 [('A', 'accesslog', '', _('name of access log file to write to'),
4699 4700 _('FILE')),
4700 4701 ('d', 'daemon', None, _('run server in background')),
4701 4702 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
4702 4703 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4703 4704 # use string type, then we can check if something was passed
4704 4705 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4705 4706 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4706 4707 _('ADDR')),
4707 4708 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4708 4709 _('PREFIX')),
4709 4710 ('n', 'name', '',
4710 4711 _('name to show in web pages (default: working directory)'), _('NAME')),
4711 4712 ('', 'web-conf', '',
4712 4713 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
4713 4714 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4714 4715 _('FILE')),
4715 4716 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4716 4717 ('', 'stdio', None, _('for remote clients (ADVANCED)')),
4717 4718 ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
4718 4719 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4719 4720 ('', 'style', '', _('template style to use'), _('STYLE')),
4720 4721 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4721 4722 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))]
4722 4723 + subrepoopts,
4723 4724 _('[OPTION]...'),
4724 4725 optionalrepo=True)
4725 4726 def serve(ui, repo, **opts):
4726 4727 """start stand-alone webserver
4727 4728
4728 4729 Start a local HTTP repository browser and pull server. You can use
4729 4730 this for ad-hoc sharing and browsing of repositories. It is
4730 4731 recommended to use a real web server to serve a repository for
4731 4732 longer periods of time.
4732 4733
4733 4734 Please note that the server does not implement access control.
4734 4735 This means that, by default, anybody can read from the server and
4735 4736 nobody can write to it by default. Set the ``web.allow-push``
4736 4737 option to ``*`` to allow everybody to push to the server. You
4737 4738 should use a real web server if you need to authenticate users.
4738 4739
4739 4740 By default, the server logs accesses to stdout and errors to
4740 4741 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4741 4742 files.
4742 4743
4743 4744 To have the server choose a free port number to listen on, specify
4744 4745 a port number of 0; in this case, the server will print the port
4745 4746 number it uses.
4746 4747
4747 4748 Returns 0 on success.
4748 4749 """
4749 4750
4750 4751 opts = pycompat.byteskwargs(opts)
4751 4752 if opts["stdio"] and opts["cmdserver"]:
4752 4753 raise error.Abort(_("cannot use --stdio with --cmdserver"))
4753 4754
4754 4755 if opts["stdio"]:
4755 4756 if repo is None:
4756 4757 raise error.RepoError(_("there is no Mercurial repository here"
4757 4758 " (.hg not found)"))
4758 4759 s = wireprotoserver.sshserver(ui, repo)
4759 4760 s.serve_forever()
4760 4761
4761 4762 service = server.createservice(ui, repo, opts)
4762 4763 return server.runservice(opts, initfn=service.init, runfn=service.run)
4763 4764
4764 4765 @command('^status|st',
4765 4766 [('A', 'all', None, _('show status of all files')),
4766 4767 ('m', 'modified', None, _('show only modified files')),
4767 4768 ('a', 'added', None, _('show only added files')),
4768 4769 ('r', 'removed', None, _('show only removed files')),
4769 4770 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4770 4771 ('c', 'clean', None, _('show only files without changes')),
4771 4772 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4772 4773 ('i', 'ignored', None, _('show only ignored files')),
4773 4774 ('n', 'no-status', None, _('hide status prefix')),
4774 4775 ('t', 'terse', '', _('show the terse output (EXPERIMENTAL)')),
4775 4776 ('C', 'copies', None, _('show source of copied files')),
4776 4777 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4777 4778 ('', 'rev', [], _('show difference from revision'), _('REV')),
4778 4779 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
4779 4780 ] + walkopts + subrepoopts + formatteropts,
4780 4781 _('[OPTION]... [FILE]...'),
4781 4782 inferrepo=True, cmdtype=readonly)
4782 4783 def status(ui, repo, *pats, **opts):
4783 4784 """show changed files in the working directory
4784 4785
4785 4786 Show status of files in the repository. If names are given, only
4786 4787 files that match are shown. Files that are clean or ignored or
4787 4788 the source of a copy/move operation, are not listed unless
4788 4789 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
4789 4790 Unless options described with "show only ..." are given, the
4790 4791 options -mardu are used.
4791 4792
4792 4793 Option -q/--quiet hides untracked (unknown and ignored) files
4793 4794 unless explicitly requested with -u/--unknown or -i/--ignored.
4794 4795
4795 4796 .. note::
4796 4797
4797 4798 :hg:`status` may appear to disagree with diff if permissions have
4798 4799 changed or a merge has occurred. The standard diff format does
4799 4800 not report permission changes and diff only reports changes
4800 4801 relative to one merge parent.
4801 4802
4802 4803 If one revision is given, it is used as the base revision.
4803 4804 If two revisions are given, the differences between them are
4804 4805 shown. The --change option can also be used as a shortcut to list
4805 4806 the changed files of a revision from its first parent.
4806 4807
4807 4808 The codes used to show the status of files are::
4808 4809
4809 4810 M = modified
4810 4811 A = added
4811 4812 R = removed
4812 4813 C = clean
4813 4814 ! = missing (deleted by non-hg command, but still tracked)
4814 4815 ? = not tracked
4815 4816 I = ignored
4816 4817 = origin of the previous file (with --copies)
4817 4818
4818 4819 .. container:: verbose
4819 4820
4820 4821 The -t/--terse option abbreviates the output by showing only the directory
4821 4822 name if all the files in it share the same status. The option takes an
4822 4823 argument indicating the statuses to abbreviate: 'm' for 'modified', 'a'
4823 4824 for 'added', 'r' for 'removed', 'd' for 'deleted', 'u' for 'unknown', 'i'
4824 4825 for 'ignored' and 'c' for clean.
4825 4826
4826 4827 It abbreviates only those statuses which are passed. Note that clean and
4827 4828 ignored files are not displayed with '--terse ic' unless the -c/--clean
4828 4829 and -i/--ignored options are also used.
4829 4830
4830 4831 The -v/--verbose option shows information when the repository is in an
4831 4832 unfinished merge, shelve, rebase state etc. You can have this behavior
4832 4833 turned on by default by enabling the ``commands.status.verbose`` option.
4833 4834
4834 4835 You can skip displaying some of these states by setting
4835 4836 ``commands.status.skipstates`` to one or more of: 'bisect', 'graft',
4836 4837 'histedit', 'merge', 'rebase', or 'unshelve'.
4837 4838
4838 4839 Examples:
4839 4840
4840 4841 - show changes in the working directory relative to a
4841 4842 changeset::
4842 4843
4843 4844 hg status --rev 9353
4844 4845
4845 4846 - show changes in the working directory relative to the
4846 4847 current directory (see :hg:`help patterns` for more information)::
4847 4848
4848 4849 hg status re:
4849 4850
4850 4851 - show all changes including copies in an existing changeset::
4851 4852
4852 4853 hg status --copies --change 9353
4853 4854
4854 4855 - get a NUL separated list of added files, suitable for xargs::
4855 4856
4856 4857 hg status -an0
4857 4858
4858 4859 - show more information about the repository status, abbreviating
4859 4860 added, removed, modified, deleted, and untracked paths::
4860 4861
4861 4862 hg status -v -t mardu
4862 4863
4863 4864 Returns 0 on success.
4864 4865
4865 4866 """
4866 4867
4867 4868 opts = pycompat.byteskwargs(opts)
4868 4869 revs = opts.get('rev')
4869 4870 change = opts.get('change')
4870 4871 terse = opts.get('terse')
4871 4872
4872 4873 if revs and change:
4873 4874 msg = _('cannot specify --rev and --change at the same time')
4874 4875 raise error.Abort(msg)
4875 4876 elif revs and terse:
4876 4877 msg = _('cannot use --terse with --rev')
4877 4878 raise error.Abort(msg)
4878 4879 elif change:
4879 4880 repo = scmutil.unhidehashlikerevs(repo, [change], 'nowarn')
4880 4881 ctx2 = scmutil.revsingle(repo, change, None)
4881 4882 ctx1 = ctx2.p1()
4882 4883 else:
4883 4884 repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
4884 4885 ctx1, ctx2 = scmutil.revpair(repo, revs)
4885 4886
4886 4887 if pats or ui.configbool('commands', 'status.relative'):
4887 4888 cwd = repo.getcwd()
4888 4889 else:
4889 4890 cwd = ''
4890 4891
4891 4892 if opts.get('print0'):
4892 4893 end = '\0'
4893 4894 else:
4894 4895 end = '\n'
4895 4896 copy = {}
4896 4897 states = 'modified added removed deleted unknown ignored clean'.split()
4897 4898 show = [k for k in states if opts.get(k)]
4898 4899 if opts.get('all'):
4899 4900 show += ui.quiet and (states[:4] + ['clean']) or states
4900 4901
4901 4902 if not show:
4902 4903 if ui.quiet:
4903 4904 show = states[:4]
4904 4905 else:
4905 4906 show = states[:5]
4906 4907
4907 4908 m = scmutil.match(ctx2, pats, opts)
4908 4909 if terse:
4909 4910 # we need to compute clean and unknown to terse
4910 4911 stat = repo.status(ctx1.node(), ctx2.node(), m,
4911 4912 'ignored' in show or 'i' in terse,
4912 4913 True, True, opts.get('subrepos'))
4913 4914
4914 4915 stat = cmdutil.tersedir(stat, terse)
4915 4916 else:
4916 4917 stat = repo.status(ctx1.node(), ctx2.node(), m,
4917 4918 'ignored' in show, 'clean' in show,
4918 4919 'unknown' in show, opts.get('subrepos'))
4919 4920
4920 4921 changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
4921 4922
4922 4923 if (opts.get('all') or opts.get('copies')
4923 4924 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
4924 4925 copy = copies.pathcopies(ctx1, ctx2, m)
4925 4926
4926 4927 ui.pager('status')
4927 4928 fm = ui.formatter('status', opts)
4928 4929 fmt = '%s' + end
4929 4930 showchar = not opts.get('no_status')
4930 4931
4931 4932 for state, char, files in changestates:
4932 4933 if state in show:
4933 4934 label = 'status.' + state
4934 4935 for f in files:
4935 4936 fm.startitem()
4936 4937 fm.condwrite(showchar, 'status', '%s ', char, label=label)
4937 4938 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
4938 4939 if f in copy:
4939 4940 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
4940 4941 label='status.copied')
4941 4942
4942 4943 if ((ui.verbose or ui.configbool('commands', 'status.verbose'))
4943 4944 and not ui.plain()):
4944 4945 cmdutil.morestatus(repo, fm)
4945 4946 fm.end()
4946 4947
4947 4948 @command('^summary|sum',
4948 4949 [('', 'remote', None, _('check for push and pull'))],
4949 4950 '[--remote]', cmdtype=readonly)
4950 4951 def summary(ui, repo, **opts):
4951 4952 """summarize working directory state
4952 4953
4953 4954 This generates a brief summary of the working directory state,
4954 4955 including parents, branch, commit status, phase and available updates.
4955 4956
4956 4957 With the --remote option, this will check the default paths for
4957 4958 incoming and outgoing changes. This can be time-consuming.
4958 4959
4959 4960 Returns 0 on success.
4960 4961 """
4961 4962
4962 4963 opts = pycompat.byteskwargs(opts)
4963 4964 ui.pager('summary')
4964 4965 ctx = repo[None]
4965 4966 parents = ctx.parents()
4966 4967 pnode = parents[0].node()
4967 4968 marks = []
4968 4969
4969 4970 ms = None
4970 4971 try:
4971 4972 ms = mergemod.mergestate.read(repo)
4972 4973 except error.UnsupportedMergeRecords as e:
4973 4974 s = ' '.join(e.recordtypes)
4974 4975 ui.warn(
4975 4976 _('warning: merge state has unsupported record types: %s\n') % s)
4976 4977 unresolved = []
4977 4978 else:
4978 4979 unresolved = list(ms.unresolved())
4979 4980
4980 4981 for p in parents:
4981 4982 # label with log.changeset (instead of log.parent) since this
4982 4983 # shows a working directory parent *changeset*:
4983 4984 # i18n: column positioning for "hg summary"
4984 4985 ui.write(_('parent: %d:%s ') % (p.rev(), p),
4985 4986 label=logcmdutil.changesetlabels(p))
4986 4987 ui.write(' '.join(p.tags()), label='log.tag')
4987 4988 if p.bookmarks():
4988 4989 marks.extend(p.bookmarks())
4989 4990 if p.rev() == -1:
4990 4991 if not len(repo):
4991 4992 ui.write(_(' (empty repository)'))
4992 4993 else:
4993 4994 ui.write(_(' (no revision checked out)'))
4994 4995 if p.obsolete():
4995 4996 ui.write(_(' (obsolete)'))
4996 4997 if p.isunstable():
4997 4998 instabilities = (ui.label(instability, 'trouble.%s' % instability)
4998 4999 for instability in p.instabilities())
4999 5000 ui.write(' ('
5000 5001 + ', '.join(instabilities)
5001 5002 + ')')
5002 5003 ui.write('\n')
5003 5004 if p.description():
5004 5005 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5005 5006 label='log.summary')
5006 5007
5007 5008 branch = ctx.branch()
5008 5009 bheads = repo.branchheads(branch)
5009 5010 # i18n: column positioning for "hg summary"
5010 5011 m = _('branch: %s\n') % branch
5011 5012 if branch != 'default':
5012 5013 ui.write(m, label='log.branch')
5013 5014 else:
5014 5015 ui.status(m, label='log.branch')
5015 5016
5016 5017 if marks:
5017 5018 active = repo._activebookmark
5018 5019 # i18n: column positioning for "hg summary"
5019 5020 ui.write(_('bookmarks:'), label='log.bookmark')
5020 5021 if active is not None:
5021 5022 if active in marks:
5022 5023 ui.write(' *' + active, label=bookmarks.activebookmarklabel)
5023 5024 marks.remove(active)
5024 5025 else:
5025 5026 ui.write(' [%s]' % active, label=bookmarks.activebookmarklabel)
5026 5027 for m in marks:
5027 5028 ui.write(' ' + m, label='log.bookmark')
5028 5029 ui.write('\n', label='log.bookmark')
5029 5030
5030 5031 status = repo.status(unknown=True)
5031 5032
5032 5033 c = repo.dirstate.copies()
5033 5034 copied, renamed = [], []
5034 5035 for d, s in c.iteritems():
5035 5036 if s in status.removed:
5036 5037 status.removed.remove(s)
5037 5038 renamed.append(d)
5038 5039 else:
5039 5040 copied.append(d)
5040 5041 if d in status.added:
5041 5042 status.added.remove(d)
5042 5043
5043 5044 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5044 5045
5045 5046 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
5046 5047 (ui.label(_('%d added'), 'status.added'), status.added),
5047 5048 (ui.label(_('%d removed'), 'status.removed'), status.removed),
5048 5049 (ui.label(_('%d renamed'), 'status.copied'), renamed),
5049 5050 (ui.label(_('%d copied'), 'status.copied'), copied),
5050 5051 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
5051 5052 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
5052 5053 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
5053 5054 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
5054 5055 t = []
5055 5056 for l, s in labels:
5056 5057 if s:
5057 5058 t.append(l % len(s))
5058 5059
5059 5060 t = ', '.join(t)
5060 5061 cleanworkdir = False
5061 5062
5062 5063 if repo.vfs.exists('graftstate'):
5063 5064 t += _(' (graft in progress)')
5064 5065 if repo.vfs.exists('updatestate'):
5065 5066 t += _(' (interrupted update)')
5066 5067 elif len(parents) > 1:
5067 5068 t += _(' (merge)')
5068 5069 elif branch != parents[0].branch():
5069 5070 t += _(' (new branch)')
5070 5071 elif (parents[0].closesbranch() and
5071 5072 pnode in repo.branchheads(branch, closed=True)):
5072 5073 t += _(' (head closed)')
5073 5074 elif not (status.modified or status.added or status.removed or renamed or
5074 5075 copied or subs):
5075 5076 t += _(' (clean)')
5076 5077 cleanworkdir = True
5077 5078 elif pnode not in bheads:
5078 5079 t += _(' (new branch head)')
5079 5080
5080 5081 if parents:
5081 5082 pendingphase = max(p.phase() for p in parents)
5082 5083 else:
5083 5084 pendingphase = phases.public
5084 5085
5085 5086 if pendingphase > phases.newcommitphase(ui):
5086 5087 t += ' (%s)' % phases.phasenames[pendingphase]
5087 5088
5088 5089 if cleanworkdir:
5089 5090 # i18n: column positioning for "hg summary"
5090 5091 ui.status(_('commit: %s\n') % t.strip())
5091 5092 else:
5092 5093 # i18n: column positioning for "hg summary"
5093 5094 ui.write(_('commit: %s\n') % t.strip())
5094 5095
5095 5096 # all ancestors of branch heads - all ancestors of parent = new csets
5096 5097 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
5097 5098 bheads))
5098 5099
5099 5100 if new == 0:
5100 5101 # i18n: column positioning for "hg summary"
5101 5102 ui.status(_('update: (current)\n'))
5102 5103 elif pnode not in bheads:
5103 5104 # i18n: column positioning for "hg summary"
5104 5105 ui.write(_('update: %d new changesets (update)\n') % new)
5105 5106 else:
5106 5107 # i18n: column positioning for "hg summary"
5107 5108 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5108 5109 (new, len(bheads)))
5109 5110
5110 5111 t = []
5111 5112 draft = len(repo.revs('draft()'))
5112 5113 if draft:
5113 5114 t.append(_('%d draft') % draft)
5114 5115 secret = len(repo.revs('secret()'))
5115 5116 if secret:
5116 5117 t.append(_('%d secret') % secret)
5117 5118
5118 5119 if draft or secret:
5119 5120 ui.status(_('phases: %s\n') % ', '.join(t))
5120 5121
5121 5122 if obsolete.isenabled(repo, obsolete.createmarkersopt):
5122 5123 for trouble in ("orphan", "contentdivergent", "phasedivergent"):
5123 5124 numtrouble = len(repo.revs(trouble + "()"))
5124 5125 # We write all the possibilities to ease translation
5125 5126 troublemsg = {
5126 5127 "orphan": _("orphan: %d changesets"),
5127 5128 "contentdivergent": _("content-divergent: %d changesets"),
5128 5129 "phasedivergent": _("phase-divergent: %d changesets"),
5129 5130 }
5130 5131 if numtrouble > 0:
5131 5132 ui.status(troublemsg[trouble] % numtrouble + "\n")
5132 5133
5133 5134 cmdutil.summaryhooks(ui, repo)
5134 5135
5135 5136 if opts.get('remote'):
5136 5137 needsincoming, needsoutgoing = True, True
5137 5138 else:
5138 5139 needsincoming, needsoutgoing = False, False
5139 5140 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
5140 5141 if i:
5141 5142 needsincoming = True
5142 5143 if o:
5143 5144 needsoutgoing = True
5144 5145 if not needsincoming and not needsoutgoing:
5145 5146 return
5146 5147
5147 5148 def getincoming():
5148 5149 source, branches = hg.parseurl(ui.expandpath('default'))
5149 5150 sbranch = branches[0]
5150 5151 try:
5151 5152 other = hg.peer(repo, {}, source)
5152 5153 except error.RepoError:
5153 5154 if opts.get('remote'):
5154 5155 raise
5155 5156 return source, sbranch, None, None, None
5156 5157 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5157 5158 if revs:
5158 5159 revs = [other.lookup(rev) for rev in revs]
5159 5160 ui.debug('comparing with %s\n' % util.hidepassword(source))
5160 5161 repo.ui.pushbuffer()
5161 5162 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5162 5163 repo.ui.popbuffer()
5163 5164 return source, sbranch, other, commoninc, commoninc[1]
5164 5165
5165 5166 if needsincoming:
5166 5167 source, sbranch, sother, commoninc, incoming = getincoming()
5167 5168 else:
5168 5169 source = sbranch = sother = commoninc = incoming = None
5169 5170
5170 5171 def getoutgoing():
5171 5172 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5172 5173 dbranch = branches[0]
5173 5174 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5174 5175 if source != dest:
5175 5176 try:
5176 5177 dother = hg.peer(repo, {}, dest)
5177 5178 except error.RepoError:
5178 5179 if opts.get('remote'):
5179 5180 raise
5180 5181 return dest, dbranch, None, None
5181 5182 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5182 5183 elif sother is None:
5183 5184 # there is no explicit destination peer, but source one is invalid
5184 5185 return dest, dbranch, None, None
5185 5186 else:
5186 5187 dother = sother
5187 5188 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5188 5189 common = None
5189 5190 else:
5190 5191 common = commoninc
5191 5192 if revs:
5192 5193 revs = [repo.lookup(rev) for rev in revs]
5193 5194 repo.ui.pushbuffer()
5194 5195 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5195 5196 commoninc=common)
5196 5197 repo.ui.popbuffer()
5197 5198 return dest, dbranch, dother, outgoing
5198 5199
5199 5200 if needsoutgoing:
5200 5201 dest, dbranch, dother, outgoing = getoutgoing()
5201 5202 else:
5202 5203 dest = dbranch = dother = outgoing = None
5203 5204
5204 5205 if opts.get('remote'):
5205 5206 t = []
5206 5207 if incoming:
5207 5208 t.append(_('1 or more incoming'))
5208 5209 o = outgoing.missing
5209 5210 if o:
5210 5211 t.append(_('%d outgoing') % len(o))
5211 5212 other = dother or sother
5212 5213 if 'bookmarks' in other.listkeys('namespaces'):
5213 5214 counts = bookmarks.summary(repo, other)
5214 5215 if counts[0] > 0:
5215 5216 t.append(_('%d incoming bookmarks') % counts[0])
5216 5217 if counts[1] > 0:
5217 5218 t.append(_('%d outgoing bookmarks') % counts[1])
5218 5219
5219 5220 if t:
5220 5221 # i18n: column positioning for "hg summary"
5221 5222 ui.write(_('remote: %s\n') % (', '.join(t)))
5222 5223 else:
5223 5224 # i18n: column positioning for "hg summary"
5224 5225 ui.status(_('remote: (synced)\n'))
5225 5226
5226 5227 cmdutil.summaryremotehooks(ui, repo, opts,
5227 5228 ((source, sbranch, sother, commoninc),
5228 5229 (dest, dbranch, dother, outgoing)))
5229 5230
5230 5231 @command('tag',
5231 5232 [('f', 'force', None, _('force tag')),
5232 5233 ('l', 'local', None, _('make the tag local')),
5233 5234 ('r', 'rev', '', _('revision to tag'), _('REV')),
5234 5235 ('', 'remove', None, _('remove a tag')),
5235 5236 # -l/--local is already there, commitopts cannot be used
5236 5237 ('e', 'edit', None, _('invoke editor on commit messages')),
5237 5238 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5238 5239 ] + commitopts2,
5239 5240 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5240 5241 def tag(ui, repo, name1, *names, **opts):
5241 5242 """add one or more tags for the current or given revision
5242 5243
5243 5244 Name a particular revision using <name>.
5244 5245
5245 5246 Tags are used to name particular revisions of the repository and are
5246 5247 very useful to compare different revisions, to go back to significant
5247 5248 earlier versions or to mark branch points as releases, etc. Changing
5248 5249 an existing tag is normally disallowed; use -f/--force to override.
5249 5250
5250 5251 If no revision is given, the parent of the working directory is
5251 5252 used.
5252 5253
5253 5254 To facilitate version control, distribution, and merging of tags,
5254 5255 they are stored as a file named ".hgtags" which is managed similarly
5255 5256 to other project files and can be hand-edited if necessary. This
5256 5257 also means that tagging creates a new commit. The file
5257 5258 ".hg/localtags" is used for local tags (not shared among
5258 5259 repositories).
5259 5260
5260 5261 Tag commits are usually made at the head of a branch. If the parent
5261 5262 of the working directory is not a branch head, :hg:`tag` aborts; use
5262 5263 -f/--force to force the tag commit to be based on a non-head
5263 5264 changeset.
5264 5265
5265 5266 See :hg:`help dates` for a list of formats valid for -d/--date.
5266 5267
5267 5268 Since tag names have priority over branch names during revision
5268 5269 lookup, using an existing branch name as a tag name is discouraged.
5269 5270
5270 5271 Returns 0 on success.
5271 5272 """
5272 5273 opts = pycompat.byteskwargs(opts)
5273 5274 wlock = lock = None
5274 5275 try:
5275 5276 wlock = repo.wlock()
5276 5277 lock = repo.lock()
5277 5278 rev_ = "."
5278 5279 names = [t.strip() for t in (name1,) + names]
5279 5280 if len(names) != len(set(names)):
5280 5281 raise error.Abort(_('tag names must be unique'))
5281 5282 for n in names:
5282 5283 scmutil.checknewlabel(repo, n, 'tag')
5283 5284 if not n:
5284 5285 raise error.Abort(_('tag names cannot consist entirely of '
5285 5286 'whitespace'))
5286 5287 if opts.get('rev') and opts.get('remove'):
5287 5288 raise error.Abort(_("--rev and --remove are incompatible"))
5288 5289 if opts.get('rev'):
5289 5290 rev_ = opts['rev']
5290 5291 message = opts.get('message')
5291 5292 if opts.get('remove'):
5292 5293 if opts.get('local'):
5293 5294 expectedtype = 'local'
5294 5295 else:
5295 5296 expectedtype = 'global'
5296 5297
5297 5298 for n in names:
5298 5299 if not repo.tagtype(n):
5299 5300 raise error.Abort(_("tag '%s' does not exist") % n)
5300 5301 if repo.tagtype(n) != expectedtype:
5301 5302 if expectedtype == 'global':
5302 5303 raise error.Abort(_("tag '%s' is not a global tag") % n)
5303 5304 else:
5304 5305 raise error.Abort(_("tag '%s' is not a local tag") % n)
5305 5306 rev_ = 'null'
5306 5307 if not message:
5307 5308 # we don't translate commit messages
5308 5309 message = 'Removed tag %s' % ', '.join(names)
5309 5310 elif not opts.get('force'):
5310 5311 for n in names:
5311 5312 if n in repo.tags():
5312 5313 raise error.Abort(_("tag '%s' already exists "
5313 5314 "(use -f to force)") % n)
5314 5315 if not opts.get('local'):
5315 5316 p1, p2 = repo.dirstate.parents()
5316 5317 if p2 != nullid:
5317 5318 raise error.Abort(_('uncommitted merge'))
5318 5319 bheads = repo.branchheads()
5319 5320 if not opts.get('force') and bheads and p1 not in bheads:
5320 5321 raise error.Abort(_('working directory is not at a branch head '
5321 5322 '(use -f to force)'))
5322 5323 node = scmutil.revsingle(repo, rev_).node()
5323 5324
5324 5325 if not message:
5325 5326 # we don't translate commit messages
5326 5327 message = ('Added tag %s for changeset %s' %
5327 5328 (', '.join(names), short(node)))
5328 5329
5329 5330 date = opts.get('date')
5330 5331 if date:
5331 5332 date = dateutil.parsedate(date)
5332 5333
5333 5334 if opts.get('remove'):
5334 5335 editform = 'tag.remove'
5335 5336 else:
5336 5337 editform = 'tag.add'
5337 5338 editor = cmdutil.getcommiteditor(editform=editform,
5338 5339 **pycompat.strkwargs(opts))
5339 5340
5340 5341 # don't allow tagging the null rev
5341 5342 if (not opts.get('remove') and
5342 5343 scmutil.revsingle(repo, rev_).rev() == nullrev):
5343 5344 raise error.Abort(_("cannot tag null revision"))
5344 5345
5345 5346 tagsmod.tag(repo, names, node, message, opts.get('local'),
5346 5347 opts.get('user'), date, editor=editor)
5347 5348 finally:
5348 5349 release(lock, wlock)
5349 5350
5350 5351 @command('tags', formatteropts, '', cmdtype=readonly)
5351 5352 def tags(ui, repo, **opts):
5352 5353 """list repository tags
5353 5354
5354 5355 This lists both regular and local tags. When the -v/--verbose
5355 5356 switch is used, a third column "local" is printed for local tags.
5356 5357 When the -q/--quiet switch is used, only the tag name is printed.
5357 5358
5358 5359 Returns 0 on success.
5359 5360 """
5360 5361
5361 5362 opts = pycompat.byteskwargs(opts)
5362 5363 ui.pager('tags')
5363 5364 fm = ui.formatter('tags', opts)
5364 5365 hexfunc = fm.hexfunc
5365 5366 tagtype = ""
5366 5367
5367 5368 for t, n in reversed(repo.tagslist()):
5368 5369 hn = hexfunc(n)
5369 5370 label = 'tags.normal'
5370 5371 tagtype = ''
5371 5372 if repo.tagtype(t) == 'local':
5372 5373 label = 'tags.local'
5373 5374 tagtype = 'local'
5374 5375
5375 5376 fm.startitem()
5376 5377 fm.write('tag', '%s', t, label=label)
5377 5378 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5378 5379 fm.condwrite(not ui.quiet, 'rev node', fmt,
5379 5380 repo.changelog.rev(n), hn, label=label)
5380 5381 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5381 5382 tagtype, label=label)
5382 5383 fm.plain('\n')
5383 5384 fm.end()
5384 5385
5385 5386 @command('tip',
5386 5387 [('p', 'patch', None, _('show patch')),
5387 5388 ('g', 'git', None, _('use git extended diff format')),
5388 5389 ] + templateopts,
5389 5390 _('[-p] [-g]'))
5390 5391 def tip(ui, repo, **opts):
5391 5392 """show the tip revision (DEPRECATED)
5392 5393
5393 5394 The tip revision (usually just called the tip) is the changeset
5394 5395 most recently added to the repository (and therefore the most
5395 5396 recently changed head).
5396 5397
5397 5398 If you have just made a commit, that commit will be the tip. If
5398 5399 you have just pulled changes from another repository, the tip of
5399 5400 that repository becomes the current tip. The "tip" tag is special
5400 5401 and cannot be renamed or assigned to a different changeset.
5401 5402
5402 5403 This command is deprecated, please use :hg:`heads` instead.
5403 5404
5404 5405 Returns 0 on success.
5405 5406 """
5406 5407 opts = pycompat.byteskwargs(opts)
5407 5408 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
5408 5409 displayer.show(repo['tip'])
5409 5410 displayer.close()
5410 5411
5411 5412 @command('unbundle',
5412 5413 [('u', 'update', None,
5413 5414 _('update to new branch head if changesets were unbundled'))],
5414 5415 _('[-u] FILE...'))
5415 5416 def unbundle(ui, repo, fname1, *fnames, **opts):
5416 5417 """apply one or more bundle files
5417 5418
5418 5419 Apply one or more bundle files generated by :hg:`bundle`.
5419 5420
5420 5421 Returns 0 on success, 1 if an update has unresolved files.
5421 5422 """
5422 5423 fnames = (fname1,) + fnames
5423 5424
5424 5425 with repo.lock():
5425 5426 for fname in fnames:
5426 5427 f = hg.openpath(ui, fname)
5427 5428 gen = exchange.readbundle(ui, f, fname)
5428 5429 if isinstance(gen, streamclone.streamcloneapplier):
5429 5430 raise error.Abort(
5430 5431 _('packed bundles cannot be applied with '
5431 5432 '"hg unbundle"'),
5432 5433 hint=_('use "hg debugapplystreamclonebundle"'))
5433 5434 url = 'bundle:' + fname
5434 5435 try:
5435 5436 txnname = 'unbundle'
5436 5437 if not isinstance(gen, bundle2.unbundle20):
5437 5438 txnname = 'unbundle\n%s' % util.hidepassword(url)
5438 5439 with repo.transaction(txnname) as tr:
5439 5440 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
5440 5441 url=url)
5441 5442 except error.BundleUnknownFeatureError as exc:
5442 5443 raise error.Abort(
5443 5444 _('%s: unknown bundle feature, %s') % (fname, exc),
5444 5445 hint=_("see https://mercurial-scm.org/"
5445 5446 "wiki/BundleFeature for more "
5446 5447 "information"))
5447 5448 modheads = bundle2.combinechangegroupresults(op)
5448 5449
5449 5450 return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
5450 5451
5451 5452 @command('^update|up|checkout|co',
5452 5453 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5453 5454 ('c', 'check', None, _('require clean working directory')),
5454 5455 ('m', 'merge', None, _('merge uncommitted changes')),
5455 5456 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5456 5457 ('r', 'rev', '', _('revision'), _('REV'))
5457 5458 ] + mergetoolopts,
5458 5459 _('[-C|-c|-m] [-d DATE] [[-r] REV]'))
5459 5460 def update(ui, repo, node=None, **opts):
5460 5461 """update working directory (or switch revisions)
5461 5462
5462 5463 Update the repository's working directory to the specified
5463 5464 changeset. If no changeset is specified, update to the tip of the
5464 5465 current named branch and move the active bookmark (see :hg:`help
5465 5466 bookmarks`).
5466 5467
5467 5468 Update sets the working directory's parent revision to the specified
5468 5469 changeset (see :hg:`help parents`).
5469 5470
5470 5471 If the changeset is not a descendant or ancestor of the working
5471 5472 directory's parent and there are uncommitted changes, the update is
5472 5473 aborted. With the -c/--check option, the working directory is checked
5473 5474 for uncommitted changes; if none are found, the working directory is
5474 5475 updated to the specified changeset.
5475 5476
5476 5477 .. container:: verbose
5477 5478
5478 5479 The -C/--clean, -c/--check, and -m/--merge options control what
5479 5480 happens if the working directory contains uncommitted changes.
5480 5481 At most of one of them can be specified.
5481 5482
5482 5483 1. If no option is specified, and if
5483 5484 the requested changeset is an ancestor or descendant of
5484 5485 the working directory's parent, the uncommitted changes
5485 5486 are merged into the requested changeset and the merged
5486 5487 result is left uncommitted. If the requested changeset is
5487 5488 not an ancestor or descendant (that is, it is on another
5488 5489 branch), the update is aborted and the uncommitted changes
5489 5490 are preserved.
5490 5491
5491 5492 2. With the -m/--merge option, the update is allowed even if the
5492 5493 requested changeset is not an ancestor or descendant of
5493 5494 the working directory's parent.
5494 5495
5495 5496 3. With the -c/--check option, the update is aborted and the
5496 5497 uncommitted changes are preserved.
5497 5498
5498 5499 4. With the -C/--clean option, uncommitted changes are discarded and
5499 5500 the working directory is updated to the requested changeset.
5500 5501
5501 5502 To cancel an uncommitted merge (and lose your changes), use
5502 5503 :hg:`merge --abort`.
5503 5504
5504 5505 Use null as the changeset to remove the working directory (like
5505 5506 :hg:`clone -U`).
5506 5507
5507 5508 If you want to revert just one file to an older revision, use
5508 5509 :hg:`revert [-r REV] NAME`.
5509 5510
5510 5511 See :hg:`help dates` for a list of formats valid for -d/--date.
5511 5512
5512 5513 Returns 0 on success, 1 if there are unresolved files.
5513 5514 """
5514 5515 rev = opts.get(r'rev')
5515 5516 date = opts.get(r'date')
5516 5517 clean = opts.get(r'clean')
5517 5518 check = opts.get(r'check')
5518 5519 merge = opts.get(r'merge')
5519 5520 if rev and node:
5520 5521 raise error.Abort(_("please specify just one revision"))
5521 5522
5522 5523 if ui.configbool('commands', 'update.requiredest'):
5523 5524 if not node and not rev and not date:
5524 5525 raise error.Abort(_('you must specify a destination'),
5525 5526 hint=_('for example: hg update ".::"'))
5526 5527
5527 5528 if rev is None or rev == '':
5528 5529 rev = node
5529 5530
5530 5531 if date and rev is not None:
5531 5532 raise error.Abort(_("you can't specify a revision and a date"))
5532 5533
5533 5534 if len([x for x in (clean, check, merge) if x]) > 1:
5534 5535 raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
5535 5536 "or -m/--merge"))
5536 5537
5537 5538 updatecheck = None
5538 5539 if check:
5539 5540 updatecheck = 'abort'
5540 5541 elif merge:
5541 5542 updatecheck = 'none'
5542 5543
5543 5544 with repo.wlock():
5544 5545 cmdutil.clearunfinished(repo)
5545 5546
5546 5547 if date:
5547 5548 rev = cmdutil.finddate(ui, repo, date)
5548 5549
5549 5550 # if we defined a bookmark, we have to remember the original name
5550 5551 brev = rev
5551 5552 if rev:
5552 5553 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
5553 5554 ctx = scmutil.revsingle(repo, rev, rev)
5554 5555 rev = ctx.rev()
5555 5556 if ctx.hidden():
5556 5557 ctxstr = ctx.hex()[:12]
5557 5558 ui.warn(_("updating to a hidden changeset %s\n") % ctxstr)
5558 5559
5559 5560 if ctx.obsolete():
5560 5561 obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
5561 5562 ui.warn("(%s)\n" % obsfatemsg)
5562 5563
5563 5564 repo.ui.setconfig('ui', 'forcemerge', opts.get(r'tool'), 'update')
5564 5565
5565 5566 return hg.updatetotally(ui, repo, rev, brev, clean=clean,
5566 5567 updatecheck=updatecheck)
5567 5568
5568 5569 @command('verify', [])
5569 5570 def verify(ui, repo):
5570 5571 """verify the integrity of the repository
5571 5572
5572 5573 Verify the integrity of the current repository.
5573 5574
5574 5575 This will perform an extensive check of the repository's
5575 5576 integrity, validating the hashes and checksums of each entry in
5576 5577 the changelog, manifest, and tracked files, as well as the
5577 5578 integrity of their crosslinks and indices.
5578 5579
5579 5580 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
5580 5581 for more information about recovery from corruption of the
5581 5582 repository.
5582 5583
5583 5584 Returns 0 on success, 1 if errors are encountered.
5584 5585 """
5585 5586 return hg.verify(repo)
5586 5587
5587 5588 @command('version', [] + formatteropts, norepo=True, cmdtype=readonly)
5588 5589 def version_(ui, **opts):
5589 5590 """output version and copyright information"""
5590 5591 opts = pycompat.byteskwargs(opts)
5591 5592 if ui.verbose:
5592 5593 ui.pager('version')
5593 5594 fm = ui.formatter("version", opts)
5594 5595 fm.startitem()
5595 5596 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
5596 5597 util.version())
5597 5598 license = _(
5598 5599 "(see https://mercurial-scm.org for more information)\n"
5599 5600 "\nCopyright (C) 2005-2018 Matt Mackall and others\n"
5600 5601 "This is free software; see the source for copying conditions. "
5601 5602 "There is NO\nwarranty; "
5602 5603 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5603 5604 )
5604 5605 if not ui.quiet:
5605 5606 fm.plain(license)
5606 5607
5607 5608 if ui.verbose:
5608 5609 fm.plain(_("\nEnabled extensions:\n\n"))
5609 5610 # format names and versions into columns
5610 5611 names = []
5611 5612 vers = []
5612 5613 isinternals = []
5613 5614 for name, module in extensions.extensions():
5614 5615 names.append(name)
5615 5616 vers.append(extensions.moduleversion(module) or None)
5616 5617 isinternals.append(extensions.ismoduleinternal(module))
5617 5618 fn = fm.nested("extensions")
5618 5619 if names:
5619 5620 namefmt = " %%-%ds " % max(len(n) for n in names)
5620 5621 places = [_("external"), _("internal")]
5621 5622 for n, v, p in zip(names, vers, isinternals):
5622 5623 fn.startitem()
5623 5624 fn.condwrite(ui.verbose, "name", namefmt, n)
5624 5625 if ui.verbose:
5625 5626 fn.plain("%s " % places[p])
5626 5627 fn.data(bundled=p)
5627 5628 fn.condwrite(ui.verbose and v, "ver", "%s", v)
5628 5629 if ui.verbose:
5629 5630 fn.plain("\n")
5630 5631 fn.end()
5631 5632 fm.end()
5632 5633
5633 5634 def loadcmdtable(ui, name, cmdtable):
5634 5635 """Load command functions from specified cmdtable
5635 5636 """
5636 5637 overrides = [cmd for cmd in cmdtable if cmd in table]
5637 5638 if overrides:
5638 5639 ui.warn(_("extension '%s' overrides commands: %s\n")
5639 5640 % (name, " ".join(overrides)))
5640 5641 table.update(cmdtable)
@@ -1,1436 +1,1434 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 hex,
22 22 nullid,
23 23 short,
24 24 wdirid,
25 25 wdirrev,
26 26 )
27 27
28 28 from . import (
29 29 encoding,
30 30 error,
31 31 match as matchmod,
32 32 obsolete,
33 33 obsutil,
34 34 pathutil,
35 35 phases,
36 36 pycompat,
37 37 revsetlang,
38 38 similar,
39 39 url,
40 40 util,
41 41 vfs,
42 42 )
43 43
44 44 from .utils import (
45 45 procutil,
46 46 stringutil,
47 47 )
48 48
49 49 if pycompat.iswindows:
50 50 from . import scmwindows as scmplatform
51 51 else:
52 52 from . import scmposix as scmplatform
53 53
54 54 termsize = scmplatform.termsize
55 55
56 56 class status(tuple):
57 57 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
58 58 and 'ignored' properties are only relevant to the working copy.
59 59 '''
60 60
61 61 __slots__ = ()
62 62
63 63 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
64 64 clean):
65 65 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
66 66 ignored, clean))
67 67
68 68 @property
69 69 def modified(self):
70 70 '''files that have been modified'''
71 71 return self[0]
72 72
73 73 @property
74 74 def added(self):
75 75 '''files that have been added'''
76 76 return self[1]
77 77
78 78 @property
79 79 def removed(self):
80 80 '''files that have been removed'''
81 81 return self[2]
82 82
83 83 @property
84 84 def deleted(self):
85 85 '''files that are in the dirstate, but have been deleted from the
86 86 working copy (aka "missing")
87 87 '''
88 88 return self[3]
89 89
90 90 @property
91 91 def unknown(self):
92 92 '''files not in the dirstate that are not ignored'''
93 93 return self[4]
94 94
95 95 @property
96 96 def ignored(self):
97 97 '''files not in the dirstate that are ignored (by _dirignore())'''
98 98 return self[5]
99 99
100 100 @property
101 101 def clean(self):
102 102 '''files that have not been modified'''
103 103 return self[6]
104 104
105 105 def __repr__(self, *args, **kwargs):
106 106 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
107 107 'unknown=%r, ignored=%r, clean=%r>') % self)
108 108
109 109 def itersubrepos(ctx1, ctx2):
110 110 """find subrepos in ctx1 or ctx2"""
111 111 # Create a (subpath, ctx) mapping where we prefer subpaths from
112 112 # ctx1. The subpaths from ctx2 are important when the .hgsub file
113 113 # has been modified (in ctx2) but not yet committed (in ctx1).
114 114 subpaths = dict.fromkeys(ctx2.substate, ctx2)
115 115 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
116 116
117 117 missing = set()
118 118
119 119 for subpath in ctx2.substate:
120 120 if subpath not in ctx1.substate:
121 121 del subpaths[subpath]
122 122 missing.add(subpath)
123 123
124 124 for subpath, ctx in sorted(subpaths.iteritems()):
125 125 yield subpath, ctx.sub(subpath)
126 126
127 127 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
128 128 # status and diff will have an accurate result when it does
129 129 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
130 130 # against itself.
131 131 for subpath in missing:
132 132 yield subpath, ctx2.nullsub(subpath, ctx1)
133 133
134 134 def nochangesfound(ui, repo, excluded=None):
135 135 '''Report no changes for push/pull, excluded is None or a list of
136 136 nodes excluded from the push/pull.
137 137 '''
138 138 secretlist = []
139 139 if excluded:
140 140 for n in excluded:
141 141 ctx = repo[n]
142 142 if ctx.phase() >= phases.secret and not ctx.extinct():
143 143 secretlist.append(n)
144 144
145 145 if secretlist:
146 146 ui.status(_("no changes found (ignored %d secret changesets)\n")
147 147 % len(secretlist))
148 148 else:
149 149 ui.status(_("no changes found\n"))
150 150
151 151 def callcatch(ui, func):
152 152 """call func() with global exception handling
153 153
154 154 return func() if no exception happens. otherwise do some error handling
155 155 and return an exit code accordingly. does not handle all exceptions.
156 156 """
157 157 try:
158 158 try:
159 159 return func()
160 160 except: # re-raises
161 161 ui.traceback()
162 162 raise
163 163 # Global exception handling, alphabetically
164 164 # Mercurial-specific first, followed by built-in and library exceptions
165 165 except error.LockHeld as inst:
166 166 if inst.errno == errno.ETIMEDOUT:
167 167 reason = _('timed out waiting for lock held by %r') % inst.locker
168 168 else:
169 169 reason = _('lock held by %r') % inst.locker
170 170 ui.warn(_("abort: %s: %s\n")
171 171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
172 172 if not inst.locker:
173 173 ui.warn(_("(lock might be very busy)\n"))
174 174 except error.LockUnavailable as inst:
175 175 ui.warn(_("abort: could not lock %s: %s\n") %
176 176 (inst.desc or stringutil.forcebytestr(inst.filename),
177 177 encoding.strtolocal(inst.strerror)))
178 178 except error.OutOfBandError as inst:
179 179 if inst.args:
180 180 msg = _("abort: remote error:\n")
181 181 else:
182 182 msg = _("abort: remote error\n")
183 183 ui.warn(msg)
184 184 if inst.args:
185 185 ui.warn(''.join(inst.args))
186 186 if inst.hint:
187 187 ui.warn('(%s)\n' % inst.hint)
188 188 except error.RepoError as inst:
189 189 ui.warn(_("abort: %s!\n") % inst)
190 190 if inst.hint:
191 191 ui.warn(_("(%s)\n") % inst.hint)
192 192 except error.ResponseError as inst:
193 193 ui.warn(_("abort: %s") % inst.args[0])
194 194 msg = inst.args[1]
195 195 if isinstance(msg, type(u'')):
196 196 msg = pycompat.sysbytes(msg)
197 197 if not isinstance(msg, bytes):
198 198 ui.warn(" %r\n" % (msg,))
199 199 elif not msg:
200 200 ui.warn(_(" empty string\n"))
201 201 else:
202 202 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
203 203 except error.CensoredNodeError as inst:
204 204 ui.warn(_("abort: file censored %s!\n") % inst)
205 205 except error.RevlogError as inst:
206 206 ui.warn(_("abort: %s!\n") % inst)
207 207 except error.InterventionRequired as inst:
208 208 ui.warn("%s\n" % inst)
209 209 if inst.hint:
210 210 ui.warn(_("(%s)\n") % inst.hint)
211 211 return 1
212 212 except error.WdirUnsupported:
213 213 ui.warn(_("abort: working directory revision cannot be specified\n"))
214 214 except error.Abort as inst:
215 215 ui.warn(_("abort: %s\n") % inst)
216 216 if inst.hint:
217 217 ui.warn(_("(%s)\n") % inst.hint)
218 218 except ImportError as inst:
219 219 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
220 220 m = stringutil.forcebytestr(inst).split()[-1]
221 221 if m in "mpatch bdiff".split():
222 222 ui.warn(_("(did you forget to compile extensions?)\n"))
223 223 elif m in "zlib".split():
224 224 ui.warn(_("(is your Python install correct?)\n"))
225 225 except IOError as inst:
226 226 if util.safehasattr(inst, "code"):
227 227 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
228 228 elif util.safehasattr(inst, "reason"):
229 229 try: # usually it is in the form (errno, strerror)
230 230 reason = inst.reason.args[1]
231 231 except (AttributeError, IndexError):
232 232 # it might be anything, for example a string
233 233 reason = inst.reason
234 234 if isinstance(reason, unicode):
235 235 # SSLError of Python 2.7.9 contains a unicode
236 236 reason = encoding.unitolocal(reason)
237 237 ui.warn(_("abort: error: %s\n") % reason)
238 238 elif (util.safehasattr(inst, "args")
239 239 and inst.args and inst.args[0] == errno.EPIPE):
240 240 pass
241 241 elif getattr(inst, "strerror", None):
242 242 if getattr(inst, "filename", None):
243 243 ui.warn(_("abort: %s: %s\n") % (
244 244 encoding.strtolocal(inst.strerror),
245 245 stringutil.forcebytestr(inst.filename)))
246 246 else:
247 247 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
248 248 else:
249 249 raise
250 250 except OSError as inst:
251 251 if getattr(inst, "filename", None) is not None:
252 252 ui.warn(_("abort: %s: '%s'\n") % (
253 253 encoding.strtolocal(inst.strerror),
254 254 stringutil.forcebytestr(inst.filename)))
255 255 else:
256 256 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 257 except MemoryError:
258 258 ui.warn(_("abort: out of memory\n"))
259 259 except SystemExit as inst:
260 260 # Commands shouldn't sys.exit directly, but give a return code.
261 261 # Just in case catch this and and pass exit code to caller.
262 262 return inst.code
263 263 except socket.error as inst:
264 264 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
265 265
266 266 return -1
267 267
268 268 def checknewlabel(repo, lbl, kind):
269 269 # Do not use the "kind" parameter in ui output.
270 270 # It makes strings difficult to translate.
271 271 if lbl in ['tip', '.', 'null']:
272 272 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 273 for c in (':', '\0', '\n', '\r'):
274 274 if c in lbl:
275 275 raise error.Abort(
276 276 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 277 try:
278 278 int(lbl)
279 279 raise error.Abort(_("cannot use an integer as a name"))
280 280 except ValueError:
281 281 pass
282 282 if lbl.strip() != lbl:
283 283 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284 284
285 285 def checkfilename(f):
286 286 '''Check that the filename f is an acceptable filename for a tracked file'''
287 287 if '\r' in f or '\n' in f:
288 288 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
289 289
290 290 def checkportable(ui, f):
291 291 '''Check if filename f is portable and warn or abort depending on config'''
292 292 checkfilename(f)
293 293 abort, warn = checkportabilityalert(ui)
294 294 if abort or warn:
295 295 msg = util.checkwinfilename(f)
296 296 if msg:
297 297 msg = "%s: %s" % (msg, procutil.shellquote(f))
298 298 if abort:
299 299 raise error.Abort(msg)
300 300 ui.warn(_("warning: %s\n") % msg)
301 301
302 302 def checkportabilityalert(ui):
303 303 '''check if the user's config requests nothing, a warning, or abort for
304 304 non-portable filenames'''
305 305 val = ui.config('ui', 'portablefilenames')
306 306 lval = val.lower()
307 307 bval = stringutil.parsebool(val)
308 308 abort = pycompat.iswindows or lval == 'abort'
309 309 warn = bval or lval == 'warn'
310 310 if bval is None and not (warn or abort or lval == 'ignore'):
311 311 raise error.ConfigError(
312 312 _("ui.portablefilenames value is invalid ('%s')") % val)
313 313 return abort, warn
314 314
315 315 class casecollisionauditor(object):
316 316 def __init__(self, ui, abort, dirstate):
317 317 self._ui = ui
318 318 self._abort = abort
319 319 allfiles = '\0'.join(dirstate._map)
320 320 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
321 321 self._dirstate = dirstate
322 322 # The purpose of _newfiles is so that we don't complain about
323 323 # case collisions if someone were to call this object with the
324 324 # same filename twice.
325 325 self._newfiles = set()
326 326
327 327 def __call__(self, f):
328 328 if f in self._newfiles:
329 329 return
330 330 fl = encoding.lower(f)
331 331 if fl in self._loweredfiles and f not in self._dirstate:
332 332 msg = _('possible case-folding collision for %s') % f
333 333 if self._abort:
334 334 raise error.Abort(msg)
335 335 self._ui.warn(_("warning: %s\n") % msg)
336 336 self._loweredfiles.add(fl)
337 337 self._newfiles.add(f)
338 338
339 339 def filteredhash(repo, maxrev):
340 340 """build hash of filtered revisions in the current repoview.
341 341
342 342 Multiple caches perform up-to-date validation by checking that the
343 343 tiprev and tipnode stored in the cache file match the current repository.
344 344 However, this is not sufficient for validating repoviews because the set
345 345 of revisions in the view may change without the repository tiprev and
346 346 tipnode changing.
347 347
348 348 This function hashes all the revs filtered from the view and returns
349 349 that SHA-1 digest.
350 350 """
351 351 cl = repo.changelog
352 352 if not cl.filteredrevs:
353 353 return None
354 354 key = None
355 355 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
356 356 if revs:
357 357 s = hashlib.sha1()
358 358 for rev in revs:
359 359 s.update('%d;' % rev)
360 360 key = s.digest()
361 361 return key
362 362
363 363 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
364 364 '''yield every hg repository under path, always recursively.
365 365 The recurse flag will only control recursion into repo working dirs'''
366 366 def errhandler(err):
367 367 if err.filename == path:
368 368 raise err
369 369 samestat = getattr(os.path, 'samestat', None)
370 370 if followsym and samestat is not None:
371 371 def adddir(dirlst, dirname):
372 372 dirstat = os.stat(dirname)
373 373 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
374 374 if not match:
375 375 dirlst.append(dirstat)
376 376 return not match
377 377 else:
378 378 followsym = False
379 379
380 380 if (seen_dirs is None) and followsym:
381 381 seen_dirs = []
382 382 adddir(seen_dirs, path)
383 383 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
384 384 dirs.sort()
385 385 if '.hg' in dirs:
386 386 yield root # found a repository
387 387 qroot = os.path.join(root, '.hg', 'patches')
388 388 if os.path.isdir(os.path.join(qroot, '.hg')):
389 389 yield qroot # we have a patch queue repo here
390 390 if recurse:
391 391 # avoid recursing inside the .hg directory
392 392 dirs.remove('.hg')
393 393 else:
394 394 dirs[:] = [] # don't descend further
395 395 elif followsym:
396 396 newdirs = []
397 397 for d in dirs:
398 398 fname = os.path.join(root, d)
399 399 if adddir(seen_dirs, fname):
400 400 if os.path.islink(fname):
401 401 for hgname in walkrepos(fname, True, seen_dirs):
402 402 yield hgname
403 403 else:
404 404 newdirs.append(d)
405 405 dirs[:] = newdirs
406 406
407 407 def binnode(ctx):
408 408 """Return binary node id for a given basectx"""
409 409 node = ctx.node()
410 410 if node is None:
411 411 return wdirid
412 412 return node
413 413
414 414 def intrev(ctx):
415 415 """Return integer for a given basectx that can be used in comparison or
416 416 arithmetic operation"""
417 417 rev = ctx.rev()
418 418 if rev is None:
419 419 return wdirrev
420 420 return rev
421 421
422 422 def formatchangeid(ctx):
423 423 """Format changectx as '{rev}:{node|formatnode}', which is the default
424 424 template provided by logcmdutil.changesettemplater"""
425 425 repo = ctx.repo()
426 426 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
427 427
428 428 def formatrevnode(ui, rev, node):
429 429 """Format given revision and node depending on the current verbosity"""
430 430 if ui.debugflag:
431 431 hexfunc = hex
432 432 else:
433 433 hexfunc = short
434 434 return '%d:%s' % (rev, hexfunc(node))
435 435
436 436 def revsingle(repo, revspec, default='.', localalias=None):
437 437 if not revspec and revspec != 0:
438 438 return repo[default]
439 439
440 440 l = revrange(repo, [revspec], localalias=localalias)
441 441 if not l:
442 442 raise error.Abort(_('empty revision set'))
443 443 return repo[l.last()]
444 444
445 445 def _pairspec(revspec):
446 446 tree = revsetlang.parse(revspec)
447 447 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
448 448
449 449 def revpairnodes(repo, revs):
450 450 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
451 451 ctx1, ctx2 = revpair(repo, revs)
452 452 return ctx1.node(), ctx2.node()
453 453
454 454 def revpair(repo, revs):
455 455 if not revs:
456 456 return repo['.'], repo[None]
457 457
458 458 l = revrange(repo, revs)
459 459
460 460 if not l:
461 461 first = second = None
462 462 elif l.isascending():
463 463 first = l.min()
464 464 second = l.max()
465 465 elif l.isdescending():
466 466 first = l.max()
467 467 second = l.min()
468 468 else:
469 469 first = l.first()
470 470 second = l.last()
471 471
472 472 if first is None:
473 473 raise error.Abort(_('empty revision range'))
474 474 if (first == second and len(revs) >= 2
475 475 and not all(revrange(repo, [r]) for r in revs)):
476 476 raise error.Abort(_('empty revision on one side of range'))
477 477
478 478 # if top-level is range expression, the result must always be a pair
479 479 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
480 480 return repo[first], repo[None]
481 481
482 482 return repo[first], repo[second]
483 483
484 484 def revrange(repo, specs, localalias=None):
485 485 """Execute 1 to many revsets and return the union.
486 486
487 487 This is the preferred mechanism for executing revsets using user-specified
488 488 config options, such as revset aliases.
489 489
490 490 The revsets specified by ``specs`` will be executed via a chained ``OR``
491 491 expression. If ``specs`` is empty, an empty result is returned.
492 492
493 493 ``specs`` can contain integers, in which case they are assumed to be
494 494 revision numbers.
495 495
496 496 It is assumed the revsets are already formatted. If you have arguments
497 497 that need to be expanded in the revset, call ``revsetlang.formatspec()``
498 498 and pass the result as an element of ``specs``.
499 499
500 500 Specifying a single revset is allowed.
501 501
502 502 Returns a ``revset.abstractsmartset`` which is a list-like interface over
503 503 integer revisions.
504 504 """
505 505 allspecs = []
506 506 for spec in specs:
507 507 if isinstance(spec, int):
508 508 spec = revsetlang.formatspec('rev(%d)', spec)
509 509 allspecs.append(spec)
510 510 return repo.anyrevs(allspecs, user=True, localalias=localalias)
511 511
512 512 def meaningfulparents(repo, ctx):
513 513 """Return list of meaningful (or all if debug) parentrevs for rev.
514 514
515 515 For merges (two non-nullrev revisions) both parents are meaningful.
516 516 Otherwise the first parent revision is considered meaningful if it
517 517 is not the preceding revision.
518 518 """
519 519 parents = ctx.parents()
520 520 if len(parents) > 1:
521 521 return parents
522 522 if repo.ui.debugflag:
523 523 return [parents[0], repo['null']]
524 524 if parents[0].rev() >= intrev(ctx) - 1:
525 525 return []
526 526 return parents
527 527
528 528 def expandpats(pats):
529 529 '''Expand bare globs when running on windows.
530 530 On posix we assume it already has already been done by sh.'''
531 531 if not util.expandglobs:
532 532 return list(pats)
533 533 ret = []
534 534 for kindpat in pats:
535 535 kind, pat = matchmod._patsplit(kindpat, None)
536 536 if kind is None:
537 537 try:
538 538 globbed = glob.glob(pat)
539 539 except re.error:
540 540 globbed = [pat]
541 541 if globbed:
542 542 ret.extend(globbed)
543 543 continue
544 544 ret.append(kindpat)
545 545 return ret
546 546
547 547 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
548 548 badfn=None):
549 549 '''Return a matcher and the patterns that were used.
550 550 The matcher will warn about bad matches, unless an alternate badfn callback
551 551 is provided.'''
552 552 if pats == ("",):
553 553 pats = []
554 554 if opts is None:
555 555 opts = {}
556 556 if not globbed and default == 'relpath':
557 557 pats = expandpats(pats or [])
558 558
559 559 def bad(f, msg):
560 560 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
561 561
562 562 if badfn is None:
563 563 badfn = bad
564 564
565 565 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
566 566 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
567 567
568 568 if m.always():
569 569 pats = []
570 570 return m, pats
571 571
572 572 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
573 573 badfn=None):
574 574 '''Return a matcher that will warn about bad matches.'''
575 575 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
576 576
577 577 def matchall(repo):
578 578 '''Return a matcher that will efficiently match everything.'''
579 579 return matchmod.always(repo.root, repo.getcwd())
580 580
581 581 def matchfiles(repo, files, badfn=None):
582 582 '''Return a matcher that will efficiently match exactly these files.'''
583 583 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
584 584
585 585 def parsefollowlinespattern(repo, rev, pat, msg):
586 586 """Return a file name from `pat` pattern suitable for usage in followlines
587 587 logic.
588 588 """
589 589 if not matchmod.patkind(pat):
590 590 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
591 591 else:
592 592 ctx = repo[rev]
593 593 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
594 594 files = [f for f in ctx if m(f)]
595 595 if len(files) != 1:
596 596 raise error.ParseError(msg)
597 597 return files[0]
598 598
599 599 def origpath(ui, repo, filepath):
600 600 '''customize where .orig files are created
601 601
602 602 Fetch user defined path from config file: [ui] origbackuppath = <path>
603 603 Fall back to default (filepath with .orig suffix) if not specified
604 604 '''
605 605 origbackuppath = ui.config('ui', 'origbackuppath')
606 606 if not origbackuppath:
607 607 return filepath + ".orig"
608 608
609 609 # Convert filepath from an absolute path into a path inside the repo.
610 610 filepathfromroot = util.normpath(os.path.relpath(filepath,
611 611 start=repo.root))
612 612
613 613 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
614 614 origbackupdir = origvfs.dirname(filepathfromroot)
615 615 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
616 616 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
617 617
618 618 # Remove any files that conflict with the backup file's path
619 619 for f in reversed(list(util.finddirs(filepathfromroot))):
620 620 if origvfs.isfileorlink(f):
621 621 ui.note(_('removing conflicting file: %s\n')
622 622 % origvfs.join(f))
623 623 origvfs.unlink(f)
624 624 break
625 625
626 626 origvfs.makedirs(origbackupdir)
627 627
628 628 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
629 629 ui.note(_('removing conflicting directory: %s\n')
630 630 % origvfs.join(filepathfromroot))
631 631 origvfs.rmtree(filepathfromroot, forcibly=True)
632 632
633 633 return origvfs.join(filepathfromroot)
634 634
635 635 class _containsnode(object):
636 636 """proxy __contains__(node) to container.__contains__ which accepts revs"""
637 637
638 638 def __init__(self, repo, revcontainer):
639 639 self._torev = repo.changelog.rev
640 640 self._revcontains = revcontainer.__contains__
641 641
642 642 def __contains__(self, node):
643 643 return self._revcontains(self._torev(node))
644 644
645 645 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
646 646 """do common cleanups when old nodes are replaced by new nodes
647 647
648 648 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
649 649 (we might also want to move working directory parent in the future)
650 650
651 651 By default, bookmark moves are calculated automatically from 'replacements',
652 652 but 'moves' can be used to override that. Also, 'moves' may include
653 653 additional bookmark moves that should not have associated obsmarkers.
654 654
655 655 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
656 656 have replacements. operation is a string, like "rebase".
657 657
658 658 metadata is dictionary containing metadata to be stored in obsmarker if
659 659 obsolescence is enabled.
660 660 """
661 661 if not replacements and not moves:
662 662 return
663 663
664 664 # translate mapping's other forms
665 665 if not util.safehasattr(replacements, 'items'):
666 666 replacements = {n: () for n in replacements}
667 667
668 668 # Calculate bookmark movements
669 669 if moves is None:
670 670 moves = {}
671 671 # Unfiltered repo is needed since nodes in replacements might be hidden.
672 672 unfi = repo.unfiltered()
673 673 for oldnode, newnodes in replacements.items():
674 674 if oldnode in moves:
675 675 continue
676 676 if len(newnodes) > 1:
677 677 # usually a split, take the one with biggest rev number
678 678 newnode = next(unfi.set('max(%ln)', newnodes)).node()
679 679 elif len(newnodes) == 0:
680 680 # move bookmark backwards
681 681 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
682 682 list(replacements)))
683 683 if roots:
684 684 newnode = roots[0].node()
685 685 else:
686 686 newnode = nullid
687 687 else:
688 688 newnode = newnodes[0]
689 689 moves[oldnode] = newnode
690 690
691 691 with repo.transaction('cleanup') as tr:
692 692 # Move bookmarks
693 693 bmarks = repo._bookmarks
694 694 bmarkchanges = []
695 695 allnewnodes = [n for ns in replacements.values() for n in ns]
696 696 for oldnode, newnode in moves.items():
697 697 oldbmarks = repo.nodebookmarks(oldnode)
698 698 if not oldbmarks:
699 699 continue
700 700 from . import bookmarks # avoid import cycle
701 701 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
702 702 (util.rapply(pycompat.maybebytestr, oldbmarks),
703 703 hex(oldnode), hex(newnode)))
704 704 # Delete divergent bookmarks being parents of related newnodes
705 705 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
706 706 allnewnodes, newnode, oldnode)
707 707 deletenodes = _containsnode(repo, deleterevs)
708 708 for name in oldbmarks:
709 709 bmarkchanges.append((name, newnode))
710 710 for b in bookmarks.divergent2delete(repo, deletenodes, name):
711 711 bmarkchanges.append((b, None))
712 712
713 713 if bmarkchanges:
714 714 bmarks.applychanges(repo, tr, bmarkchanges)
715 715
716 716 # Obsolete or strip nodes
717 717 if obsolete.isenabled(repo, obsolete.createmarkersopt):
718 718 # If a node is already obsoleted, and we want to obsolete it
719 719 # without a successor, skip that obssolete request since it's
720 720 # unnecessary. That's the "if s or not isobs(n)" check below.
721 721 # Also sort the node in topology order, that might be useful for
722 722 # some obsstore logic.
723 723 # NOTE: the filtering and sorting might belong to createmarkers.
724 724 isobs = unfi.obsstore.successors.__contains__
725 725 torev = unfi.changelog.rev
726 726 sortfunc = lambda ns: torev(ns[0])
727 727 rels = [(unfi[n], tuple(unfi[m] for m in s))
728 728 for n, s in sorted(replacements.items(), key=sortfunc)
729 729 if s or not isobs(n)]
730 730 if rels:
731 731 obsolete.createmarkers(repo, rels, operation=operation,
732 732 metadata=metadata)
733 733 else:
734 734 from . import repair # avoid import cycle
735 735 tostrip = list(replacements)
736 736 if tostrip:
737 737 repair.delayedstrip(repo.ui, repo, tostrip, operation)
738 738
739 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
739 def addremove(repo, matcher, prefix, opts=None):
740 740 if opts is None:
741 741 opts = {}
742 742 m = matcher
743 if dry_run is None:
744 743 dry_run = opts.get('dry_run')
745 if similarity is None:
746 744 similarity = float(opts.get('similarity') or 0)
747 745
748 746 ret = 0
749 747 join = lambda f: os.path.join(prefix, f)
750 748
751 749 wctx = repo[None]
752 750 for subpath in sorted(wctx.substate):
753 751 submatch = matchmod.subdirmatcher(subpath, m)
754 752 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
755 753 sub = wctx.sub(subpath)
756 754 try:
757 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
755 if sub.addremove(submatch, prefix, opts):
758 756 ret = 1
759 757 except error.LookupError:
760 758 repo.ui.status(_("skipping missing subrepository: %s\n")
761 759 % join(subpath))
762 760
763 761 rejected = []
764 762 def badfn(f, msg):
765 763 if f in m.files():
766 764 m.bad(f, msg)
767 765 rejected.append(f)
768 766
769 767 badmatch = matchmod.badmatch(m, badfn)
770 768 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
771 769 badmatch)
772 770
773 771 unknownset = set(unknown + forgotten)
774 772 toprint = unknownset.copy()
775 773 toprint.update(deleted)
776 774 for abs in sorted(toprint):
777 775 if repo.ui.verbose or not m.exact(abs):
778 776 if abs in unknownset:
779 777 status = _('adding %s\n') % m.uipath(abs)
780 778 else:
781 779 status = _('removing %s\n') % m.uipath(abs)
782 780 repo.ui.status(status)
783 781
784 782 renames = _findrenames(repo, m, added + unknown, removed + deleted,
785 783 similarity)
786 784
787 785 if not dry_run:
788 786 _markchanges(repo, unknown + forgotten, deleted, renames)
789 787
790 788 for f in rejected:
791 789 if f in m.files():
792 790 return 1
793 791 return ret
794 792
795 793 def marktouched(repo, files, similarity=0.0):
796 794 '''Assert that files have somehow been operated upon. files are relative to
797 795 the repo root.'''
798 796 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
799 797 rejected = []
800 798
801 799 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
802 800
803 801 if repo.ui.verbose:
804 802 unknownset = set(unknown + forgotten)
805 803 toprint = unknownset.copy()
806 804 toprint.update(deleted)
807 805 for abs in sorted(toprint):
808 806 if abs in unknownset:
809 807 status = _('adding %s\n') % abs
810 808 else:
811 809 status = _('removing %s\n') % abs
812 810 repo.ui.status(status)
813 811
814 812 renames = _findrenames(repo, m, added + unknown, removed + deleted,
815 813 similarity)
816 814
817 815 _markchanges(repo, unknown + forgotten, deleted, renames)
818 816
819 817 for f in rejected:
820 818 if f in m.files():
821 819 return 1
822 820 return 0
823 821
824 822 def _interestingfiles(repo, matcher):
825 823 '''Walk dirstate with matcher, looking for files that addremove would care
826 824 about.
827 825
828 826 This is different from dirstate.status because it doesn't care about
829 827 whether files are modified or clean.'''
830 828 added, unknown, deleted, removed, forgotten = [], [], [], [], []
831 829 audit_path = pathutil.pathauditor(repo.root, cached=True)
832 830
833 831 ctx = repo[None]
834 832 dirstate = repo.dirstate
835 833 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
836 834 unknown=True, ignored=False, full=False)
837 835 for abs, st in walkresults.iteritems():
838 836 dstate = dirstate[abs]
839 837 if dstate == '?' and audit_path.check(abs):
840 838 unknown.append(abs)
841 839 elif dstate != 'r' and not st:
842 840 deleted.append(abs)
843 841 elif dstate == 'r' and st:
844 842 forgotten.append(abs)
845 843 # for finding renames
846 844 elif dstate == 'r' and not st:
847 845 removed.append(abs)
848 846 elif dstate == 'a':
849 847 added.append(abs)
850 848
851 849 return added, unknown, deleted, removed, forgotten
852 850
853 851 def _findrenames(repo, matcher, added, removed, similarity):
854 852 '''Find renames from removed files to added ones.'''
855 853 renames = {}
856 854 if similarity > 0:
857 855 for old, new, score in similar.findrenames(repo, added, removed,
858 856 similarity):
859 857 if (repo.ui.verbose or not matcher.exact(old)
860 858 or not matcher.exact(new)):
861 859 repo.ui.status(_('recording removal of %s as rename to %s '
862 860 '(%d%% similar)\n') %
863 861 (matcher.rel(old), matcher.rel(new),
864 862 score * 100))
865 863 renames[new] = old
866 864 return renames
867 865
868 866 def _markchanges(repo, unknown, deleted, renames):
869 867 '''Marks the files in unknown as added, the files in deleted as removed,
870 868 and the files in renames as copied.'''
871 869 wctx = repo[None]
872 870 with repo.wlock():
873 871 wctx.forget(deleted)
874 872 wctx.add(unknown)
875 873 for new, old in renames.iteritems():
876 874 wctx.copy(old, new)
877 875
878 876 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
879 877 """Update the dirstate to reflect the intent of copying src to dst. For
880 878 different reasons it might not end with dst being marked as copied from src.
881 879 """
882 880 origsrc = repo.dirstate.copied(src) or src
883 881 if dst == origsrc: # copying back a copy?
884 882 if repo.dirstate[dst] not in 'mn' and not dryrun:
885 883 repo.dirstate.normallookup(dst)
886 884 else:
887 885 if repo.dirstate[origsrc] == 'a' and origsrc == src:
888 886 if not ui.quiet:
889 887 ui.warn(_("%s has not been committed yet, so no copy "
890 888 "data will be stored for %s.\n")
891 889 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
892 890 if repo.dirstate[dst] in '?r' and not dryrun:
893 891 wctx.add([dst])
894 892 elif not dryrun:
895 893 wctx.copy(origsrc, dst)
896 894
897 895 def readrequires(opener, supported):
898 896 '''Reads and parses .hg/requires and checks if all entries found
899 897 are in the list of supported features.'''
900 898 requirements = set(opener.read("requires").splitlines())
901 899 missings = []
902 900 for r in requirements:
903 901 if r not in supported:
904 902 if not r or not r[0:1].isalnum():
905 903 raise error.RequirementError(_(".hg/requires file is corrupt"))
906 904 missings.append(r)
907 905 missings.sort()
908 906 if missings:
909 907 raise error.RequirementError(
910 908 _("repository requires features unknown to this Mercurial: %s")
911 909 % " ".join(missings),
912 910 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
913 911 " for more information"))
914 912 return requirements
915 913
916 914 def writerequires(opener, requirements):
917 915 with opener('requires', 'w') as fp:
918 916 for r in sorted(requirements):
919 917 fp.write("%s\n" % r)
920 918
921 919 class filecachesubentry(object):
922 920 def __init__(self, path, stat):
923 921 self.path = path
924 922 self.cachestat = None
925 923 self._cacheable = None
926 924
927 925 if stat:
928 926 self.cachestat = filecachesubentry.stat(self.path)
929 927
930 928 if self.cachestat:
931 929 self._cacheable = self.cachestat.cacheable()
932 930 else:
933 931 # None means we don't know yet
934 932 self._cacheable = None
935 933
936 934 def refresh(self):
937 935 if self.cacheable():
938 936 self.cachestat = filecachesubentry.stat(self.path)
939 937
940 938 def cacheable(self):
941 939 if self._cacheable is not None:
942 940 return self._cacheable
943 941
944 942 # we don't know yet, assume it is for now
945 943 return True
946 944
947 945 def changed(self):
948 946 # no point in going further if we can't cache it
949 947 if not self.cacheable():
950 948 return True
951 949
952 950 newstat = filecachesubentry.stat(self.path)
953 951
954 952 # we may not know if it's cacheable yet, check again now
955 953 if newstat and self._cacheable is None:
956 954 self._cacheable = newstat.cacheable()
957 955
958 956 # check again
959 957 if not self._cacheable:
960 958 return True
961 959
962 960 if self.cachestat != newstat:
963 961 self.cachestat = newstat
964 962 return True
965 963 else:
966 964 return False
967 965
968 966 @staticmethod
969 967 def stat(path):
970 968 try:
971 969 return util.cachestat(path)
972 970 except OSError as e:
973 971 if e.errno != errno.ENOENT:
974 972 raise
975 973
976 974 class filecacheentry(object):
977 975 def __init__(self, paths, stat=True):
978 976 self._entries = []
979 977 for path in paths:
980 978 self._entries.append(filecachesubentry(path, stat))
981 979
982 980 def changed(self):
983 981 '''true if any entry has changed'''
984 982 for entry in self._entries:
985 983 if entry.changed():
986 984 return True
987 985 return False
988 986
989 987 def refresh(self):
990 988 for entry in self._entries:
991 989 entry.refresh()
992 990
993 991 class filecache(object):
994 992 '''A property like decorator that tracks files under .hg/ for updates.
995 993
996 994 Records stat info when called in _filecache.
997 995
998 996 On subsequent calls, compares old stat info with new info, and recreates the
999 997 object when any of the files changes, updating the new stat info in
1000 998 _filecache.
1001 999
1002 1000 Mercurial either atomic renames or appends for files under .hg,
1003 1001 so to ensure the cache is reliable we need the filesystem to be able
1004 1002 to tell us if a file has been replaced. If it can't, we fallback to
1005 1003 recreating the object on every call (essentially the same behavior as
1006 1004 propertycache).
1007 1005
1008 1006 '''
1009 1007 def __init__(self, *paths):
1010 1008 self.paths = paths
1011 1009
1012 1010 def join(self, obj, fname):
1013 1011 """Used to compute the runtime path of a cached file.
1014 1012
1015 1013 Users should subclass filecache and provide their own version of this
1016 1014 function to call the appropriate join function on 'obj' (an instance
1017 1015 of the class that its member function was decorated).
1018 1016 """
1019 1017 raise NotImplementedError
1020 1018
1021 1019 def __call__(self, func):
1022 1020 self.func = func
1023 1021 self.name = func.__name__.encode('ascii')
1024 1022 return self
1025 1023
1026 1024 def __get__(self, obj, type=None):
1027 1025 # if accessed on the class, return the descriptor itself.
1028 1026 if obj is None:
1029 1027 return self
1030 1028 # do we need to check if the file changed?
1031 1029 if self.name in obj.__dict__:
1032 1030 assert self.name in obj._filecache, self.name
1033 1031 return obj.__dict__[self.name]
1034 1032
1035 1033 entry = obj._filecache.get(self.name)
1036 1034
1037 1035 if entry:
1038 1036 if entry.changed():
1039 1037 entry.obj = self.func(obj)
1040 1038 else:
1041 1039 paths = [self.join(obj, path) for path in self.paths]
1042 1040
1043 1041 # We stat -before- creating the object so our cache doesn't lie if
1044 1042 # a writer modified between the time we read and stat
1045 1043 entry = filecacheentry(paths, True)
1046 1044 entry.obj = self.func(obj)
1047 1045
1048 1046 obj._filecache[self.name] = entry
1049 1047
1050 1048 obj.__dict__[self.name] = entry.obj
1051 1049 return entry.obj
1052 1050
1053 1051 def __set__(self, obj, value):
1054 1052 if self.name not in obj._filecache:
1055 1053 # we add an entry for the missing value because X in __dict__
1056 1054 # implies X in _filecache
1057 1055 paths = [self.join(obj, path) for path in self.paths]
1058 1056 ce = filecacheentry(paths, False)
1059 1057 obj._filecache[self.name] = ce
1060 1058 else:
1061 1059 ce = obj._filecache[self.name]
1062 1060
1063 1061 ce.obj = value # update cached copy
1064 1062 obj.__dict__[self.name] = value # update copy returned by obj.x
1065 1063
1066 1064 def __delete__(self, obj):
1067 1065 try:
1068 1066 del obj.__dict__[self.name]
1069 1067 except KeyError:
1070 1068 raise AttributeError(self.name)
1071 1069
1072 1070 def extdatasource(repo, source):
1073 1071 """Gather a map of rev -> value dict from the specified source
1074 1072
1075 1073 A source spec is treated as a URL, with a special case shell: type
1076 1074 for parsing the output from a shell command.
1077 1075
1078 1076 The data is parsed as a series of newline-separated records where
1079 1077 each record is a revision specifier optionally followed by a space
1080 1078 and a freeform string value. If the revision is known locally, it
1081 1079 is converted to a rev, otherwise the record is skipped.
1082 1080
1083 1081 Note that both key and value are treated as UTF-8 and converted to
1084 1082 the local encoding. This allows uniformity between local and
1085 1083 remote data sources.
1086 1084 """
1087 1085
1088 1086 spec = repo.ui.config("extdata", source)
1089 1087 if not spec:
1090 1088 raise error.Abort(_("unknown extdata source '%s'") % source)
1091 1089
1092 1090 data = {}
1093 1091 src = proc = None
1094 1092 try:
1095 1093 if spec.startswith("shell:"):
1096 1094 # external commands should be run relative to the repo root
1097 1095 cmd = spec[6:]
1098 1096 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1099 1097 close_fds=procutil.closefds,
1100 1098 stdout=subprocess.PIPE, cwd=repo.root)
1101 1099 src = proc.stdout
1102 1100 else:
1103 1101 # treat as a URL or file
1104 1102 src = url.open(repo.ui, spec)
1105 1103 for l in src:
1106 1104 if " " in l:
1107 1105 k, v = l.strip().split(" ", 1)
1108 1106 else:
1109 1107 k, v = l.strip(), ""
1110 1108
1111 1109 k = encoding.tolocal(k)
1112 1110 try:
1113 1111 data[repo[k].rev()] = encoding.tolocal(v)
1114 1112 except (error.LookupError, error.RepoLookupError):
1115 1113 pass # we ignore data for nodes that don't exist locally
1116 1114 finally:
1117 1115 if proc:
1118 1116 proc.communicate()
1119 1117 if src:
1120 1118 src.close()
1121 1119 if proc and proc.returncode != 0:
1122 1120 raise error.Abort(_("extdata command '%s' failed: %s")
1123 1121 % (cmd, procutil.explainexit(proc.returncode)[0]))
1124 1122
1125 1123 return data
1126 1124
1127 1125 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1128 1126 if lock is None:
1129 1127 raise error.LockInheritanceContractViolation(
1130 1128 'lock can only be inherited while held')
1131 1129 if environ is None:
1132 1130 environ = {}
1133 1131 with lock.inherit() as locker:
1134 1132 environ[envvar] = locker
1135 1133 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1136 1134
1137 1135 def wlocksub(repo, cmd, *args, **kwargs):
1138 1136 """run cmd as a subprocess that allows inheriting repo's wlock
1139 1137
1140 1138 This can only be called while the wlock is held. This takes all the
1141 1139 arguments that ui.system does, and returns the exit code of the
1142 1140 subprocess."""
1143 1141 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1144 1142 **kwargs)
1145 1143
1146 1144 def gdinitconfig(ui):
1147 1145 """helper function to know if a repo should be created as general delta
1148 1146 """
1149 1147 # experimental config: format.generaldelta
1150 1148 return (ui.configbool('format', 'generaldelta')
1151 1149 or ui.configbool('format', 'usegeneraldelta'))
1152 1150
1153 1151 def gddeltaconfig(ui):
1154 1152 """helper function to know if incoming delta should be optimised
1155 1153 """
1156 1154 # experimental config: format.generaldelta
1157 1155 return ui.configbool('format', 'generaldelta')
1158 1156
1159 1157 class simplekeyvaluefile(object):
1160 1158 """A simple file with key=value lines
1161 1159
1162 1160 Keys must be alphanumerics and start with a letter, values must not
1163 1161 contain '\n' characters"""
1164 1162 firstlinekey = '__firstline'
1165 1163
1166 1164 def __init__(self, vfs, path, keys=None):
1167 1165 self.vfs = vfs
1168 1166 self.path = path
1169 1167
1170 1168 def read(self, firstlinenonkeyval=False):
1171 1169 """Read the contents of a simple key-value file
1172 1170
1173 1171 'firstlinenonkeyval' indicates whether the first line of file should
1174 1172 be treated as a key-value pair or reuturned fully under the
1175 1173 __firstline key."""
1176 1174 lines = self.vfs.readlines(self.path)
1177 1175 d = {}
1178 1176 if firstlinenonkeyval:
1179 1177 if not lines:
1180 1178 e = _("empty simplekeyvalue file")
1181 1179 raise error.CorruptedState(e)
1182 1180 # we don't want to include '\n' in the __firstline
1183 1181 d[self.firstlinekey] = lines[0][:-1]
1184 1182 del lines[0]
1185 1183
1186 1184 try:
1187 1185 # the 'if line.strip()' part prevents us from failing on empty
1188 1186 # lines which only contain '\n' therefore are not skipped
1189 1187 # by 'if line'
1190 1188 updatedict = dict(line[:-1].split('=', 1) for line in lines
1191 1189 if line.strip())
1192 1190 if self.firstlinekey in updatedict:
1193 1191 e = _("%r can't be used as a key")
1194 1192 raise error.CorruptedState(e % self.firstlinekey)
1195 1193 d.update(updatedict)
1196 1194 except ValueError as e:
1197 1195 raise error.CorruptedState(str(e))
1198 1196 return d
1199 1197
1200 1198 def write(self, data, firstline=None):
1201 1199 """Write key=>value mapping to a file
1202 1200 data is a dict. Keys must be alphanumerical and start with a letter.
1203 1201 Values must not contain newline characters.
1204 1202
1205 1203 If 'firstline' is not None, it is written to file before
1206 1204 everything else, as it is, not in a key=value form"""
1207 1205 lines = []
1208 1206 if firstline is not None:
1209 1207 lines.append('%s\n' % firstline)
1210 1208
1211 1209 for k, v in data.items():
1212 1210 if k == self.firstlinekey:
1213 1211 e = "key name '%s' is reserved" % self.firstlinekey
1214 1212 raise error.ProgrammingError(e)
1215 1213 if not k[0:1].isalpha():
1216 1214 e = "keys must start with a letter in a key-value file"
1217 1215 raise error.ProgrammingError(e)
1218 1216 if not k.isalnum():
1219 1217 e = "invalid key name in a simple key-value file"
1220 1218 raise error.ProgrammingError(e)
1221 1219 if '\n' in v:
1222 1220 e = "invalid value in a simple key-value file"
1223 1221 raise error.ProgrammingError(e)
1224 1222 lines.append("%s=%s\n" % (k, v))
1225 1223 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1226 1224 fp.write(''.join(lines))
1227 1225
1228 1226 _reportobsoletedsource = [
1229 1227 'debugobsolete',
1230 1228 'pull',
1231 1229 'push',
1232 1230 'serve',
1233 1231 'unbundle',
1234 1232 ]
1235 1233
1236 1234 _reportnewcssource = [
1237 1235 'pull',
1238 1236 'unbundle',
1239 1237 ]
1240 1238
1241 1239 # a list of (repo, ctx, files) functions called by various commands to allow
1242 1240 # extensions to ensure the corresponding files are available locally, before the
1243 1241 # command uses them.
1244 1242 fileprefetchhooks = util.hooks()
1245 1243
1246 1244 # A marker that tells the evolve extension to suppress its own reporting
1247 1245 _reportstroubledchangesets = True
1248 1246
1249 1247 def registersummarycallback(repo, otr, txnname=''):
1250 1248 """register a callback to issue a summary after the transaction is closed
1251 1249 """
1252 1250 def txmatch(sources):
1253 1251 return any(txnname.startswith(source) for source in sources)
1254 1252
1255 1253 categories = []
1256 1254
1257 1255 def reportsummary(func):
1258 1256 """decorator for report callbacks."""
1259 1257 # The repoview life cycle is shorter than the one of the actual
1260 1258 # underlying repository. So the filtered object can die before the
1261 1259 # weakref is used leading to troubles. We keep a reference to the
1262 1260 # unfiltered object and restore the filtering when retrieving the
1263 1261 # repository through the weakref.
1264 1262 filtername = repo.filtername
1265 1263 reporef = weakref.ref(repo.unfiltered())
1266 1264 def wrapped(tr):
1267 1265 repo = reporef()
1268 1266 if filtername:
1269 1267 repo = repo.filtered(filtername)
1270 1268 func(repo, tr)
1271 1269 newcat = '%02i-txnreport' % len(categories)
1272 1270 otr.addpostclose(newcat, wrapped)
1273 1271 categories.append(newcat)
1274 1272 return wrapped
1275 1273
1276 1274 if txmatch(_reportobsoletedsource):
1277 1275 @reportsummary
1278 1276 def reportobsoleted(repo, tr):
1279 1277 obsoleted = obsutil.getobsoleted(repo, tr)
1280 1278 if obsoleted:
1281 1279 repo.ui.status(_('obsoleted %i changesets\n')
1282 1280 % len(obsoleted))
1283 1281
1284 1282 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1285 1283 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1286 1284 instabilitytypes = [
1287 1285 ('orphan', 'orphan'),
1288 1286 ('phase-divergent', 'phasedivergent'),
1289 1287 ('content-divergent', 'contentdivergent'),
1290 1288 ]
1291 1289
1292 1290 def getinstabilitycounts(repo):
1293 1291 filtered = repo.changelog.filteredrevs
1294 1292 counts = {}
1295 1293 for instability, revset in instabilitytypes:
1296 1294 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1297 1295 filtered)
1298 1296 return counts
1299 1297
1300 1298 oldinstabilitycounts = getinstabilitycounts(repo)
1301 1299 @reportsummary
1302 1300 def reportnewinstabilities(repo, tr):
1303 1301 newinstabilitycounts = getinstabilitycounts(repo)
1304 1302 for instability, revset in instabilitytypes:
1305 1303 delta = (newinstabilitycounts[instability] -
1306 1304 oldinstabilitycounts[instability])
1307 1305 if delta > 0:
1308 1306 repo.ui.warn(_('%i new %s changesets\n') %
1309 1307 (delta, instability))
1310 1308
1311 1309 if txmatch(_reportnewcssource):
1312 1310 @reportsummary
1313 1311 def reportnewcs(repo, tr):
1314 1312 """Report the range of new revisions pulled/unbundled."""
1315 1313 newrevs = tr.changes.get('revs', xrange(0, 0))
1316 1314 if not newrevs:
1317 1315 return
1318 1316
1319 1317 # Compute the bounds of new revisions' range, excluding obsoletes.
1320 1318 unfi = repo.unfiltered()
1321 1319 revs = unfi.revs('%ld and not obsolete()', newrevs)
1322 1320 if not revs:
1323 1321 # Got only obsoletes.
1324 1322 return
1325 1323 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1326 1324
1327 1325 if minrev == maxrev:
1328 1326 revrange = minrev
1329 1327 else:
1330 1328 revrange = '%s:%s' % (minrev, maxrev)
1331 1329 repo.ui.status(_('new changesets %s\n') % revrange)
1332 1330
1333 1331 def nodesummaries(repo, nodes, maxnumnodes=4):
1334 1332 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1335 1333 return ' '.join(short(h) for h in nodes)
1336 1334 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1337 1335 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1338 1336
1339 1337 def enforcesinglehead(repo, tr, desc):
1340 1338 """check that no named branch has multiple heads"""
1341 1339 if desc in ('strip', 'repair'):
1342 1340 # skip the logic during strip
1343 1341 return
1344 1342 visible = repo.filtered('visible')
1345 1343 # possible improvement: we could restrict the check to affected branch
1346 1344 for name, heads in visible.branchmap().iteritems():
1347 1345 if len(heads) > 1:
1348 1346 msg = _('rejecting multiple heads on branch "%s"')
1349 1347 msg %= name
1350 1348 hint = _('%d heads: %s')
1351 1349 hint %= (len(heads), nodesummaries(repo, heads))
1352 1350 raise error.Abort(msg, hint=hint)
1353 1351
1354 1352 def wrapconvertsink(sink):
1355 1353 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1356 1354 before it is used, whether or not the convert extension was formally loaded.
1357 1355 """
1358 1356 return sink
1359 1357
1360 1358 def unhidehashlikerevs(repo, specs, hiddentype):
1361 1359 """parse the user specs and unhide changesets whose hash or revision number
1362 1360 is passed.
1363 1361
1364 1362 hiddentype can be: 1) 'warn': warn while unhiding changesets
1365 1363 2) 'nowarn': don't warn while unhiding changesets
1366 1364
1367 1365 returns a repo object with the required changesets unhidden
1368 1366 """
1369 1367 if not repo.filtername or not repo.ui.configbool('experimental',
1370 1368 'directaccess'):
1371 1369 return repo
1372 1370
1373 1371 if repo.filtername not in ('visible', 'visible-hidden'):
1374 1372 return repo
1375 1373
1376 1374 symbols = set()
1377 1375 for spec in specs:
1378 1376 try:
1379 1377 tree = revsetlang.parse(spec)
1380 1378 except error.ParseError: # will be reported by scmutil.revrange()
1381 1379 continue
1382 1380
1383 1381 symbols.update(revsetlang.gethashlikesymbols(tree))
1384 1382
1385 1383 if not symbols:
1386 1384 return repo
1387 1385
1388 1386 revs = _getrevsfromsymbols(repo, symbols)
1389 1387
1390 1388 if not revs:
1391 1389 return repo
1392 1390
1393 1391 if hiddentype == 'warn':
1394 1392 unfi = repo.unfiltered()
1395 1393 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1396 1394 repo.ui.warn(_("warning: accessing hidden changesets for write "
1397 1395 "operation: %s\n") % revstr)
1398 1396
1399 1397 # we have to use new filtername to separate branch/tags cache until we can
1400 1398 # disbale these cache when revisions are dynamically pinned.
1401 1399 return repo.filtered('visible-hidden', revs)
1402 1400
1403 1401 def _getrevsfromsymbols(repo, symbols):
1404 1402 """parse the list of symbols and returns a set of revision numbers of hidden
1405 1403 changesets present in symbols"""
1406 1404 revs = set()
1407 1405 unfi = repo.unfiltered()
1408 1406 unficl = unfi.changelog
1409 1407 cl = repo.changelog
1410 1408 tiprev = len(unficl)
1411 1409 pmatch = unficl._partialmatch
1412 1410 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1413 1411 for s in symbols:
1414 1412 try:
1415 1413 n = int(s)
1416 1414 if n <= tiprev:
1417 1415 if not allowrevnums:
1418 1416 continue
1419 1417 else:
1420 1418 if n not in cl:
1421 1419 revs.add(n)
1422 1420 continue
1423 1421 except ValueError:
1424 1422 pass
1425 1423
1426 1424 try:
1427 1425 s = pmatch(s)
1428 1426 except (error.LookupError, error.WdirUnsupported):
1429 1427 s = None
1430 1428
1431 1429 if s is not None:
1432 1430 rev = unficl.rev(s)
1433 1431 if rev not in cl:
1434 1432 revs.add(rev)
1435 1433
1436 1434 return revs
@@ -1,1815 +1,1814 b''
1 1 # subrepo.py - sub-repository classes and factory
2 2 #
3 3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import copy
11 11 import errno
12 12 import hashlib
13 13 import os
14 14 import posixpath
15 15 import re
16 16 import stat
17 17 import subprocess
18 18 import sys
19 19 import tarfile
20 20 import xml.dom.minidom
21 21
22 22 from .i18n import _
23 23 from . import (
24 24 cmdutil,
25 25 encoding,
26 26 error,
27 27 exchange,
28 28 logcmdutil,
29 29 match as matchmod,
30 30 node,
31 31 pathutil,
32 32 phases,
33 33 pycompat,
34 34 scmutil,
35 35 subrepoutil,
36 36 util,
37 37 vfs as vfsmod,
38 38 )
39 39 from .utils import (
40 40 dateutil,
41 41 procutil,
42 42 stringutil,
43 43 )
44 44
45 45 hg = None
46 46 reporelpath = subrepoutil.reporelpath
47 47 subrelpath = subrepoutil.subrelpath
48 48 _abssource = subrepoutil._abssource
49 49 propertycache = util.propertycache
50 50
51 51 def _expandedabspath(path):
52 52 '''
53 53 get a path or url and if it is a path expand it and return an absolute path
54 54 '''
55 55 expandedpath = util.urllocalpath(util.expandpath(path))
56 56 u = util.url(expandedpath)
57 57 if not u.scheme:
58 58 path = util.normpath(os.path.abspath(u.path))
59 59 return path
60 60
61 61 def _getstorehashcachename(remotepath):
62 62 '''get a unique filename for the store hash cache of a remote repository'''
63 63 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
64 64
65 65 class SubrepoAbort(error.Abort):
66 66 """Exception class used to avoid handling a subrepo error more than once"""
67 67 def __init__(self, *args, **kw):
68 68 self.subrepo = kw.pop(r'subrepo', None)
69 69 self.cause = kw.pop(r'cause', None)
70 70 error.Abort.__init__(self, *args, **kw)
71 71
72 72 def annotatesubrepoerror(func):
73 73 def decoratedmethod(self, *args, **kargs):
74 74 try:
75 75 res = func(self, *args, **kargs)
76 76 except SubrepoAbort as ex:
77 77 # This exception has already been handled
78 78 raise ex
79 79 except error.Abort as ex:
80 80 subrepo = subrelpath(self)
81 81 errormsg = (stringutil.forcebytestr(ex) + ' '
82 82 + _('(in subrepository "%s")') % subrepo)
83 83 # avoid handling this exception by raising a SubrepoAbort exception
84 84 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
85 85 cause=sys.exc_info())
86 86 return res
87 87 return decoratedmethod
88 88
89 89 def _updateprompt(ui, sub, dirty, local, remote):
90 90 if dirty:
91 91 msg = (_(' subrepository sources for %s differ\n'
92 92 'use (l)ocal source (%s) or (r)emote source (%s)?'
93 93 '$$ &Local $$ &Remote')
94 94 % (subrelpath(sub), local, remote))
95 95 else:
96 96 msg = (_(' subrepository sources for %s differ (in checked out '
97 97 'version)\n'
98 98 'use (l)ocal source (%s) or (r)emote source (%s)?'
99 99 '$$ &Local $$ &Remote')
100 100 % (subrelpath(sub), local, remote))
101 101 return ui.promptchoice(msg, 0)
102 102
103 103 def _sanitize(ui, vfs, ignore):
104 104 for dirname, dirs, names in vfs.walk():
105 105 for i, d in enumerate(dirs):
106 106 if d.lower() == ignore:
107 107 del dirs[i]
108 108 break
109 109 if vfs.basename(dirname).lower() != '.hg':
110 110 continue
111 111 for f in names:
112 112 if f.lower() == 'hgrc':
113 113 ui.warn(_("warning: removing potentially hostile 'hgrc' "
114 114 "in '%s'\n") % vfs.join(dirname))
115 115 vfs.unlink(vfs.reljoin(dirname, f))
116 116
117 117 def _auditsubrepopath(repo, path):
118 118 # auditor doesn't check if the path itself is a symlink
119 119 pathutil.pathauditor(repo.root)(path)
120 120 if repo.wvfs.islink(path):
121 121 raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
122 122
123 123 SUBREPO_ALLOWED_DEFAULTS = {
124 124 'hg': True,
125 125 'git': False,
126 126 'svn': False,
127 127 }
128 128
129 129 def _checktype(ui, kind):
130 130 # subrepos.allowed is a master kill switch. If disabled, subrepos are
131 131 # disabled period.
132 132 if not ui.configbool('subrepos', 'allowed', True):
133 133 raise error.Abort(_('subrepos not enabled'),
134 134 hint=_("see 'hg help config.subrepos' for details"))
135 135
136 136 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
137 137 if not ui.configbool('subrepos', '%s:allowed' % kind, default):
138 138 raise error.Abort(_('%s subrepos not allowed') % kind,
139 139 hint=_("see 'hg help config.subrepos' for details"))
140 140
141 141 if kind not in types:
142 142 raise error.Abort(_('unknown subrepo type %s') % kind)
143 143
144 144 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
145 145 """return instance of the right subrepo class for subrepo in path"""
146 146 # subrepo inherently violates our import layering rules
147 147 # because it wants to make repo objects from deep inside the stack
148 148 # so we manually delay the circular imports to not break
149 149 # scripts that don't use our demand-loading
150 150 global hg
151 151 from . import hg as h
152 152 hg = h
153 153
154 154 repo = ctx.repo()
155 155 _auditsubrepopath(repo, path)
156 156 state = ctx.substate[path]
157 157 _checktype(repo.ui, state[2])
158 158 if allowwdir:
159 159 state = (state[0], ctx.subrev(path), state[2])
160 160 return types[state[2]](ctx, path, state[:2], allowcreate)
161 161
162 162 def nullsubrepo(ctx, path, pctx):
163 163 """return an empty subrepo in pctx for the extant subrepo in ctx"""
164 164 # subrepo inherently violates our import layering rules
165 165 # because it wants to make repo objects from deep inside the stack
166 166 # so we manually delay the circular imports to not break
167 167 # scripts that don't use our demand-loading
168 168 global hg
169 169 from . import hg as h
170 170 hg = h
171 171
172 172 repo = ctx.repo()
173 173 _auditsubrepopath(repo, path)
174 174 state = ctx.substate[path]
175 175 _checktype(repo.ui, state[2])
176 176 subrev = ''
177 177 if state[2] == 'hg':
178 178 subrev = "0" * 40
179 179 return types[state[2]](pctx, path, (state[0], subrev), True)
180 180
181 181 # subrepo classes need to implement the following abstract class:
182 182
183 183 class abstractsubrepo(object):
184 184
185 185 def __init__(self, ctx, path):
186 186 """Initialize abstractsubrepo part
187 187
188 188 ``ctx`` is the context referring this subrepository in the
189 189 parent repository.
190 190
191 191 ``path`` is the path to this subrepository as seen from
192 192 innermost repository.
193 193 """
194 194 self.ui = ctx.repo().ui
195 195 self._ctx = ctx
196 196 self._path = path
197 197
198 198 def addwebdirpath(self, serverpath, webconf):
199 199 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
200 200
201 201 ``serverpath`` is the path component of the URL for this repo.
202 202
203 203 ``webconf`` is the dictionary of hgwebdir entries.
204 204 """
205 205 pass
206 206
207 207 def storeclean(self, path):
208 208 """
209 209 returns true if the repository has not changed since it was last
210 210 cloned from or pushed to a given repository.
211 211 """
212 212 return False
213 213
214 214 def dirty(self, ignoreupdate=False, missing=False):
215 215 """returns true if the dirstate of the subrepo is dirty or does not
216 216 match current stored state. If ignoreupdate is true, only check
217 217 whether the subrepo has uncommitted changes in its dirstate. If missing
218 218 is true, check for deleted files.
219 219 """
220 220 raise NotImplementedError
221 221
222 222 def dirtyreason(self, ignoreupdate=False, missing=False):
223 223 """return reason string if it is ``dirty()``
224 224
225 225 Returned string should have enough information for the message
226 226 of exception.
227 227
228 228 This returns None, otherwise.
229 229 """
230 230 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
231 231 return _('uncommitted changes in subrepository "%s"'
232 232 ) % subrelpath(self)
233 233
234 234 def bailifchanged(self, ignoreupdate=False, hint=None):
235 235 """raise Abort if subrepository is ``dirty()``
236 236 """
237 237 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
238 238 missing=True)
239 239 if dirtyreason:
240 240 raise error.Abort(dirtyreason, hint=hint)
241 241
242 242 def basestate(self):
243 243 """current working directory base state, disregarding .hgsubstate
244 244 state and working directory modifications"""
245 245 raise NotImplementedError
246 246
247 247 def checknested(self, path):
248 248 """check if path is a subrepository within this repository"""
249 249 return False
250 250
251 251 def commit(self, text, user, date):
252 252 """commit the current changes to the subrepo with the given
253 253 log message. Use given user and date if possible. Return the
254 254 new state of the subrepo.
255 255 """
256 256 raise NotImplementedError
257 257
258 258 def phase(self, state):
259 259 """returns phase of specified state in the subrepository.
260 260 """
261 261 return phases.public
262 262
263 263 def remove(self):
264 264 """remove the subrepo
265 265
266 266 (should verify the dirstate is not dirty first)
267 267 """
268 268 raise NotImplementedError
269 269
270 270 def get(self, state, overwrite=False):
271 271 """run whatever commands are needed to put the subrepo into
272 272 this state
273 273 """
274 274 raise NotImplementedError
275 275
276 276 def merge(self, state):
277 277 """merge currently-saved state with the new state."""
278 278 raise NotImplementedError
279 279
280 280 def push(self, opts):
281 281 """perform whatever action is analogous to 'hg push'
282 282
283 283 This may be a no-op on some systems.
284 284 """
285 285 raise NotImplementedError
286 286
287 287 def add(self, ui, match, prefix, explicitonly, **opts):
288 288 return []
289 289
290 def addremove(self, matcher, prefix, opts, dry_run, similarity):
290 def addremove(self, matcher, prefix, opts):
291 291 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
292 292 return 1
293 293
294 294 def cat(self, match, fm, fntemplate, prefix, **opts):
295 295 return 1
296 296
297 297 def status(self, rev2, **opts):
298 298 return scmutil.status([], [], [], [], [], [], [])
299 299
300 300 def diff(self, ui, diffopts, node2, match, prefix, **opts):
301 301 pass
302 302
303 303 def outgoing(self, ui, dest, opts):
304 304 return 1
305 305
306 306 def incoming(self, ui, source, opts):
307 307 return 1
308 308
309 309 def files(self):
310 310 """return filename iterator"""
311 311 raise NotImplementedError
312 312
313 313 def filedata(self, name, decode):
314 314 """return file data, optionally passed through repo decoders"""
315 315 raise NotImplementedError
316 316
317 317 def fileflags(self, name):
318 318 """return file flags"""
319 319 return ''
320 320
321 321 def getfileset(self, expr):
322 322 """Resolve the fileset expression for this repo"""
323 323 return set()
324 324
325 325 def printfiles(self, ui, m, fm, fmt, subrepos):
326 326 """handle the files command for this subrepo"""
327 327 return 1
328 328
329 329 def archive(self, archiver, prefix, match=None, decode=True):
330 330 if match is not None:
331 331 files = [f for f in self.files() if match(f)]
332 332 else:
333 333 files = self.files()
334 334 total = len(files)
335 335 relpath = subrelpath(self)
336 336 self.ui.progress(_('archiving (%s)') % relpath, 0,
337 337 unit=_('files'), total=total)
338 338 for i, name in enumerate(files):
339 339 flags = self.fileflags(name)
340 340 mode = 'x' in flags and 0o755 or 0o644
341 341 symlink = 'l' in flags
342 342 archiver.addfile(prefix + self._path + '/' + name,
343 343 mode, symlink, self.filedata(name, decode))
344 344 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
345 345 unit=_('files'), total=total)
346 346 self.ui.progress(_('archiving (%s)') % relpath, None)
347 347 return total
348 348
349 349 def walk(self, match):
350 350 '''
351 351 walk recursively through the directory tree, finding all files
352 352 matched by the match function
353 353 '''
354 354
355 355 def forget(self, match, prefix, dryrun):
356 356 return ([], [])
357 357
358 358 def removefiles(self, matcher, prefix, after, force, subrepos,
359 359 dryrun, warnings):
360 360 """remove the matched files from the subrepository and the filesystem,
361 361 possibly by force and/or after the file has been removed from the
362 362 filesystem. Return 0 on success, 1 on any warning.
363 363 """
364 364 warnings.append(_("warning: removefiles not implemented (%s)")
365 365 % self._path)
366 366 return 1
367 367
368 368 def revert(self, substate, *pats, **opts):
369 369 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
370 370 % (substate[0], substate[2]))
371 371 return []
372 372
373 373 def shortid(self, revid):
374 374 return revid
375 375
376 376 def unshare(self):
377 377 '''
378 378 convert this repository from shared to normal storage.
379 379 '''
380 380
381 381 def verify(self):
382 382 '''verify the integrity of the repository. Return 0 on success or
383 383 warning, 1 on any error.
384 384 '''
385 385 return 0
386 386
387 387 @propertycache
388 388 def wvfs(self):
389 389 """return vfs to access the working directory of this subrepository
390 390 """
391 391 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
392 392
393 393 @propertycache
394 394 def _relpath(self):
395 395 """return path to this subrepository as seen from outermost repository
396 396 """
397 397 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
398 398
399 399 class hgsubrepo(abstractsubrepo):
400 400 def __init__(self, ctx, path, state, allowcreate):
401 401 super(hgsubrepo, self).__init__(ctx, path)
402 402 self._state = state
403 403 r = ctx.repo()
404 404 root = r.wjoin(path)
405 405 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
406 406 self._repo = hg.repository(r.baseui, root, create=create)
407 407
408 408 # Propagate the parent's --hidden option
409 409 if r is r.unfiltered():
410 410 self._repo = self._repo.unfiltered()
411 411
412 412 self.ui = self._repo.ui
413 413 for s, k in [('ui', 'commitsubrepos')]:
414 414 v = r.ui.config(s, k)
415 415 if v:
416 416 self.ui.setconfig(s, k, v, 'subrepo')
417 417 # internal config: ui._usedassubrepo
418 418 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
419 419 self._initrepo(r, state[0], create)
420 420
421 421 @annotatesubrepoerror
422 422 def addwebdirpath(self, serverpath, webconf):
423 423 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
424 424
425 425 def storeclean(self, path):
426 426 with self._repo.lock():
427 427 return self._storeclean(path)
428 428
429 429 def _storeclean(self, path):
430 430 clean = True
431 431 itercache = self._calcstorehash(path)
432 432 for filehash in self._readstorehashcache(path):
433 433 if filehash != next(itercache, None):
434 434 clean = False
435 435 break
436 436 if clean:
437 437 # if not empty:
438 438 # the cached and current pull states have a different size
439 439 clean = next(itercache, None) is None
440 440 return clean
441 441
442 442 def _calcstorehash(self, remotepath):
443 443 '''calculate a unique "store hash"
444 444
445 445 This method is used to to detect when there are changes that may
446 446 require a push to a given remote path.'''
447 447 # sort the files that will be hashed in increasing (likely) file size
448 448 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
449 449 yield '# %s\n' % _expandedabspath(remotepath)
450 450 vfs = self._repo.vfs
451 451 for relname in filelist:
452 452 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
453 453 yield '%s = %s\n' % (relname, filehash)
454 454
455 455 @propertycache
456 456 def _cachestorehashvfs(self):
457 457 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
458 458
459 459 def _readstorehashcache(self, remotepath):
460 460 '''read the store hash cache for a given remote repository'''
461 461 cachefile = _getstorehashcachename(remotepath)
462 462 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
463 463
464 464 def _cachestorehash(self, remotepath):
465 465 '''cache the current store hash
466 466
467 467 Each remote repo requires its own store hash cache, because a subrepo
468 468 store may be "clean" versus a given remote repo, but not versus another
469 469 '''
470 470 cachefile = _getstorehashcachename(remotepath)
471 471 with self._repo.lock():
472 472 storehash = list(self._calcstorehash(remotepath))
473 473 vfs = self._cachestorehashvfs
474 474 vfs.writelines(cachefile, storehash, mode='wb', notindexed=True)
475 475
476 476 def _getctx(self):
477 477 '''fetch the context for this subrepo revision, possibly a workingctx
478 478 '''
479 479 if self._ctx.rev() is None:
480 480 return self._repo[None] # workingctx if parent is workingctx
481 481 else:
482 482 rev = self._state[1]
483 483 return self._repo[rev]
484 484
485 485 @annotatesubrepoerror
486 486 def _initrepo(self, parentrepo, source, create):
487 487 self._repo._subparent = parentrepo
488 488 self._repo._subsource = source
489 489
490 490 if create:
491 491 lines = ['[paths]\n']
492 492
493 493 def addpathconfig(key, value):
494 494 if value:
495 495 lines.append('%s = %s\n' % (key, value))
496 496 self.ui.setconfig('paths', key, value, 'subrepo')
497 497
498 498 defpath = _abssource(self._repo, abort=False)
499 499 defpushpath = _abssource(self._repo, True, abort=False)
500 500 addpathconfig('default', defpath)
501 501 if defpath != defpushpath:
502 502 addpathconfig('default-push', defpushpath)
503 503
504 504 self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines)))
505 505
506 506 @annotatesubrepoerror
507 507 def add(self, ui, match, prefix, explicitonly, **opts):
508 508 return cmdutil.add(ui, self._repo, match,
509 509 self.wvfs.reljoin(prefix, self._path),
510 510 explicitonly, **opts)
511 511
512 512 @annotatesubrepoerror
513 def addremove(self, m, prefix, opts, dry_run, similarity):
513 def addremove(self, m, prefix, opts):
514 514 # In the same way as sub directories are processed, once in a subrepo,
515 515 # always entry any of its subrepos. Don't corrupt the options that will
516 516 # be used to process sibling subrepos however.
517 517 opts = copy.copy(opts)
518 518 opts['subrepos'] = True
519 519 return scmutil.addremove(self._repo, m,
520 self.wvfs.reljoin(prefix, self._path), opts,
521 dry_run, similarity)
520 self.wvfs.reljoin(prefix, self._path), opts)
522 521
523 522 @annotatesubrepoerror
524 523 def cat(self, match, fm, fntemplate, prefix, **opts):
525 524 rev = self._state[1]
526 525 ctx = self._repo[rev]
527 526 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
528 527 prefix, **opts)
529 528
530 529 @annotatesubrepoerror
531 530 def status(self, rev2, **opts):
532 531 try:
533 532 rev1 = self._state[1]
534 533 ctx1 = self._repo[rev1]
535 534 ctx2 = self._repo[rev2]
536 535 return self._repo.status(ctx1, ctx2, **opts)
537 536 except error.RepoLookupError as inst:
538 537 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
539 538 % (inst, subrelpath(self)))
540 539 return scmutil.status([], [], [], [], [], [], [])
541 540
542 541 @annotatesubrepoerror
543 542 def diff(self, ui, diffopts, node2, match, prefix, **opts):
544 543 try:
545 544 node1 = node.bin(self._state[1])
546 545 # We currently expect node2 to come from substate and be
547 546 # in hex format
548 547 if node2 is not None:
549 548 node2 = node.bin(node2)
550 549 logcmdutil.diffordiffstat(ui, self._repo, diffopts,
551 550 node1, node2, match,
552 551 prefix=posixpath.join(prefix, self._path),
553 552 listsubrepos=True, **opts)
554 553 except error.RepoLookupError as inst:
555 554 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
556 555 % (inst, subrelpath(self)))
557 556
558 557 @annotatesubrepoerror
559 558 def archive(self, archiver, prefix, match=None, decode=True):
560 559 self._get(self._state + ('hg',))
561 560 files = self.files()
562 561 if match:
563 562 files = [f for f in files if match(f)]
564 563 rev = self._state[1]
565 564 ctx = self._repo[rev]
566 565 scmutil.fileprefetchhooks(self._repo, ctx, files)
567 566 total = abstractsubrepo.archive(self, archiver, prefix, match)
568 567 for subpath in ctx.substate:
569 568 s = subrepo(ctx, subpath, True)
570 569 submatch = matchmod.subdirmatcher(subpath, match)
571 570 total += s.archive(archiver, prefix + self._path + '/', submatch,
572 571 decode)
573 572 return total
574 573
575 574 @annotatesubrepoerror
576 575 def dirty(self, ignoreupdate=False, missing=False):
577 576 r = self._state[1]
578 577 if r == '' and not ignoreupdate: # no state recorded
579 578 return True
580 579 w = self._repo[None]
581 580 if r != w.p1().hex() and not ignoreupdate:
582 581 # different version checked out
583 582 return True
584 583 return w.dirty(missing=missing) # working directory changed
585 584
586 585 def basestate(self):
587 586 return self._repo['.'].hex()
588 587
589 588 def checknested(self, path):
590 589 return self._repo._checknested(self._repo.wjoin(path))
591 590
592 591 @annotatesubrepoerror
593 592 def commit(self, text, user, date):
594 593 # don't bother committing in the subrepo if it's only been
595 594 # updated
596 595 if not self.dirty(True):
597 596 return self._repo['.'].hex()
598 597 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
599 598 n = self._repo.commit(text, user, date)
600 599 if not n:
601 600 return self._repo['.'].hex() # different version checked out
602 601 return node.hex(n)
603 602
604 603 @annotatesubrepoerror
605 604 def phase(self, state):
606 605 return self._repo[state or '.'].phase()
607 606
608 607 @annotatesubrepoerror
609 608 def remove(self):
610 609 # we can't fully delete the repository as it may contain
611 610 # local-only history
612 611 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
613 612 hg.clean(self._repo, node.nullid, False)
614 613
615 614 def _get(self, state):
616 615 source, revision, kind = state
617 616 parentrepo = self._repo._subparent
618 617
619 618 if revision in self._repo.unfiltered():
620 619 # Allow shared subrepos tracked at null to setup the sharedpath
621 620 if len(self._repo) != 0 or not parentrepo.shared():
622 621 return True
623 622 self._repo._subsource = source
624 623 srcurl = _abssource(self._repo)
625 624 other = hg.peer(self._repo, {}, srcurl)
626 625 if len(self._repo) == 0:
627 626 # use self._repo.vfs instead of self.wvfs to remove .hg only
628 627 self._repo.vfs.rmtree()
629 628
630 629 # A remote subrepo could be shared if there is a local copy
631 630 # relative to the parent's share source. But clone pooling doesn't
632 631 # assemble the repos in a tree, so that can't be consistently done.
633 632 # A simpler option is for the user to configure clone pooling, and
634 633 # work with that.
635 634 if parentrepo.shared() and hg.islocal(srcurl):
636 635 self.ui.status(_('sharing subrepo %s from %s\n')
637 636 % (subrelpath(self), srcurl))
638 637 shared = hg.share(self._repo._subparent.baseui,
639 638 other, self._repo.root,
640 639 update=False, bookmarks=False)
641 640 self._repo = shared.local()
642 641 else:
643 642 # TODO: find a common place for this and this code in the
644 643 # share.py wrap of the clone command.
645 644 if parentrepo.shared():
646 645 pool = self.ui.config('share', 'pool')
647 646 if pool:
648 647 pool = util.expandpath(pool)
649 648
650 649 shareopts = {
651 650 'pool': pool,
652 651 'mode': self.ui.config('share', 'poolnaming'),
653 652 }
654 653 else:
655 654 shareopts = {}
656 655
657 656 self.ui.status(_('cloning subrepo %s from %s\n')
658 657 % (subrelpath(self), srcurl))
659 658 other, cloned = hg.clone(self._repo._subparent.baseui, {},
660 659 other, self._repo.root,
661 660 update=False, shareopts=shareopts)
662 661 self._repo = cloned.local()
663 662 self._initrepo(parentrepo, source, create=True)
664 663 self._cachestorehash(srcurl)
665 664 else:
666 665 self.ui.status(_('pulling subrepo %s from %s\n')
667 666 % (subrelpath(self), srcurl))
668 667 cleansub = self.storeclean(srcurl)
669 668 exchange.pull(self._repo, other)
670 669 if cleansub:
671 670 # keep the repo clean after pull
672 671 self._cachestorehash(srcurl)
673 672 return False
674 673
675 674 @annotatesubrepoerror
676 675 def get(self, state, overwrite=False):
677 676 inrepo = self._get(state)
678 677 source, revision, kind = state
679 678 repo = self._repo
680 679 repo.ui.debug("getting subrepo %s\n" % self._path)
681 680 if inrepo:
682 681 urepo = repo.unfiltered()
683 682 ctx = urepo[revision]
684 683 if ctx.hidden():
685 684 urepo.ui.warn(
686 685 _('revision %s in subrepository "%s" is hidden\n') \
687 686 % (revision[0:12], self._path))
688 687 repo = urepo
689 688 hg.updaterepo(repo, revision, overwrite)
690 689
691 690 @annotatesubrepoerror
692 691 def merge(self, state):
693 692 self._get(state)
694 693 cur = self._repo['.']
695 694 dst = self._repo[state[1]]
696 695 anc = dst.ancestor(cur)
697 696
698 697 def mergefunc():
699 698 if anc == cur and dst.branch() == cur.branch():
700 699 self.ui.debug('updating subrepository "%s"\n'
701 700 % subrelpath(self))
702 701 hg.update(self._repo, state[1])
703 702 elif anc == dst:
704 703 self.ui.debug('skipping subrepository "%s"\n'
705 704 % subrelpath(self))
706 705 else:
707 706 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
708 707 hg.merge(self._repo, state[1], remind=False)
709 708
710 709 wctx = self._repo[None]
711 710 if self.dirty():
712 711 if anc != dst:
713 712 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
714 713 mergefunc()
715 714 else:
716 715 mergefunc()
717 716 else:
718 717 mergefunc()
719 718
720 719 @annotatesubrepoerror
721 720 def push(self, opts):
722 721 force = opts.get('force')
723 722 newbranch = opts.get('new_branch')
724 723 ssh = opts.get('ssh')
725 724
726 725 # push subrepos depth-first for coherent ordering
727 726 c = self._repo['.']
728 727 subs = c.substate # only repos that are committed
729 728 for s in sorted(subs):
730 729 if c.sub(s).push(opts) == 0:
731 730 return False
732 731
733 732 dsturl = _abssource(self._repo, True)
734 733 if not force:
735 734 if self.storeclean(dsturl):
736 735 self.ui.status(
737 736 _('no changes made to subrepo %s since last push to %s\n')
738 737 % (subrelpath(self), dsturl))
739 738 return None
740 739 self.ui.status(_('pushing subrepo %s to %s\n') %
741 740 (subrelpath(self), dsturl))
742 741 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
743 742 res = exchange.push(self._repo, other, force, newbranch=newbranch)
744 743
745 744 # the repo is now clean
746 745 self._cachestorehash(dsturl)
747 746 return res.cgresult
748 747
749 748 @annotatesubrepoerror
750 749 def outgoing(self, ui, dest, opts):
751 750 if 'rev' in opts or 'branch' in opts:
752 751 opts = copy.copy(opts)
753 752 opts.pop('rev', None)
754 753 opts.pop('branch', None)
755 754 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
756 755
757 756 @annotatesubrepoerror
758 757 def incoming(self, ui, source, opts):
759 758 if 'rev' in opts or 'branch' in opts:
760 759 opts = copy.copy(opts)
761 760 opts.pop('rev', None)
762 761 opts.pop('branch', None)
763 762 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
764 763
765 764 @annotatesubrepoerror
766 765 def files(self):
767 766 rev = self._state[1]
768 767 ctx = self._repo[rev]
769 768 return ctx.manifest().keys()
770 769
771 770 def filedata(self, name, decode):
772 771 rev = self._state[1]
773 772 data = self._repo[rev][name].data()
774 773 if decode:
775 774 data = self._repo.wwritedata(name, data)
776 775 return data
777 776
778 777 def fileflags(self, name):
779 778 rev = self._state[1]
780 779 ctx = self._repo[rev]
781 780 return ctx.flags(name)
782 781
783 782 @annotatesubrepoerror
784 783 def printfiles(self, ui, m, fm, fmt, subrepos):
785 784 # If the parent context is a workingctx, use the workingctx here for
786 785 # consistency.
787 786 if self._ctx.rev() is None:
788 787 ctx = self._repo[None]
789 788 else:
790 789 rev = self._state[1]
791 790 ctx = self._repo[rev]
792 791 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
793 792
794 793 @annotatesubrepoerror
795 794 def getfileset(self, expr):
796 795 if self._ctx.rev() is None:
797 796 ctx = self._repo[None]
798 797 else:
799 798 rev = self._state[1]
800 799 ctx = self._repo[rev]
801 800
802 801 files = ctx.getfileset(expr)
803 802
804 803 for subpath in ctx.substate:
805 804 sub = ctx.sub(subpath)
806 805
807 806 try:
808 807 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
809 808 except error.LookupError:
810 809 self.ui.status(_("skipping missing subrepository: %s\n")
811 810 % self.wvfs.reljoin(reporelpath(self), subpath))
812 811 return files
813 812
814 813 def walk(self, match):
815 814 ctx = self._repo[None]
816 815 return ctx.walk(match)
817 816
818 817 @annotatesubrepoerror
819 818 def forget(self, match, prefix, dryrun):
820 819 return cmdutil.forget(self.ui, self._repo, match,
821 820 self.wvfs.reljoin(prefix, self._path),
822 821 True, dryrun=dryrun)
823 822
824 823 @annotatesubrepoerror
825 824 def removefiles(self, matcher, prefix, after, force, subrepos,
826 825 dryrun, warnings):
827 826 return cmdutil.remove(self.ui, self._repo, matcher,
828 827 self.wvfs.reljoin(prefix, self._path),
829 828 after, force, subrepos, dryrun)
830 829
831 830 @annotatesubrepoerror
832 831 def revert(self, substate, *pats, **opts):
833 832 # reverting a subrepo is a 2 step process:
834 833 # 1. if the no_backup is not set, revert all modified
835 834 # files inside the subrepo
836 835 # 2. update the subrepo to the revision specified in
837 836 # the corresponding substate dictionary
838 837 self.ui.status(_('reverting subrepo %s\n') % substate[0])
839 838 if not opts.get(r'no_backup'):
840 839 # Revert all files on the subrepo, creating backups
841 840 # Note that this will not recursively revert subrepos
842 841 # We could do it if there was a set:subrepos() predicate
843 842 opts = opts.copy()
844 843 opts[r'date'] = None
845 844 opts[r'rev'] = substate[1]
846 845
847 846 self.filerevert(*pats, **opts)
848 847
849 848 # Update the repo to the revision specified in the given substate
850 849 if not opts.get(r'dry_run'):
851 850 self.get(substate, overwrite=True)
852 851
853 852 def filerevert(self, *pats, **opts):
854 853 ctx = self._repo[opts[r'rev']]
855 854 parents = self._repo.dirstate.parents()
856 855 if opts.get(r'all'):
857 856 pats = ['set:modified()']
858 857 else:
859 858 pats = []
860 859 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
861 860
862 861 def shortid(self, revid):
863 862 return revid[:12]
864 863
865 864 @annotatesubrepoerror
866 865 def unshare(self):
867 866 # subrepo inherently violates our import layering rules
868 867 # because it wants to make repo objects from deep inside the stack
869 868 # so we manually delay the circular imports to not break
870 869 # scripts that don't use our demand-loading
871 870 global hg
872 871 from . import hg as h
873 872 hg = h
874 873
875 874 # Nothing prevents a user from sharing in a repo, and then making that a
876 875 # subrepo. Alternately, the previous unshare attempt may have failed
877 876 # part way through. So recurse whether or not this layer is shared.
878 877 if self._repo.shared():
879 878 self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
880 879
881 880 hg.unshare(self.ui, self._repo)
882 881
883 882 def verify(self):
884 883 try:
885 884 rev = self._state[1]
886 885 ctx = self._repo.unfiltered()[rev]
887 886 if ctx.hidden():
888 887 # Since hidden revisions aren't pushed/pulled, it seems worth an
889 888 # explicit warning.
890 889 ui = self._repo.ui
891 890 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
892 891 (self._relpath, node.short(self._ctx.node())))
893 892 return 0
894 893 except error.RepoLookupError:
895 894 # A missing subrepo revision may be a case of needing to pull it, so
896 895 # don't treat this as an error.
897 896 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
898 897 (self._relpath, node.short(self._ctx.node())))
899 898 return 0
900 899
901 900 @propertycache
902 901 def wvfs(self):
903 902 """return own wvfs for efficiency and consistency
904 903 """
905 904 return self._repo.wvfs
906 905
907 906 @propertycache
908 907 def _relpath(self):
909 908 """return path to this subrepository as seen from outermost repository
910 909 """
911 910 # Keep consistent dir separators by avoiding vfs.join(self._path)
912 911 return reporelpath(self._repo)
913 912
914 913 class svnsubrepo(abstractsubrepo):
915 914 def __init__(self, ctx, path, state, allowcreate):
916 915 super(svnsubrepo, self).__init__(ctx, path)
917 916 self._state = state
918 917 self._exe = procutil.findexe('svn')
919 918 if not self._exe:
920 919 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
921 920 % self._path)
922 921
923 922 def _svncommand(self, commands, filename='', failok=False):
924 923 cmd = [self._exe]
925 924 extrakw = {}
926 925 if not self.ui.interactive():
927 926 # Making stdin be a pipe should prevent svn from behaving
928 927 # interactively even if we can't pass --non-interactive.
929 928 extrakw[r'stdin'] = subprocess.PIPE
930 929 # Starting in svn 1.5 --non-interactive is a global flag
931 930 # instead of being per-command, but we need to support 1.4 so
932 931 # we have to be intelligent about what commands take
933 932 # --non-interactive.
934 933 if commands[0] in ('update', 'checkout', 'commit'):
935 934 cmd.append('--non-interactive')
936 935 cmd.extend(commands)
937 936 if filename is not None:
938 937 path = self.wvfs.reljoin(self._ctx.repo().origroot,
939 938 self._path, filename)
940 939 cmd.append(path)
941 940 env = dict(encoding.environ)
942 941 # Avoid localized output, preserve current locale for everything else.
943 942 lc_all = env.get('LC_ALL')
944 943 if lc_all:
945 944 env['LANG'] = lc_all
946 945 del env['LC_ALL']
947 946 env['LC_MESSAGES'] = 'C'
948 947 p = subprocess.Popen(cmd, bufsize=-1, close_fds=procutil.closefds,
949 948 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
950 949 universal_newlines=True, env=env, **extrakw)
951 950 stdout, stderr = p.communicate()
952 951 stderr = stderr.strip()
953 952 if not failok:
954 953 if p.returncode:
955 954 raise error.Abort(stderr or 'exited with code %d'
956 955 % p.returncode)
957 956 if stderr:
958 957 self.ui.warn(stderr + '\n')
959 958 return stdout, stderr
960 959
961 960 @propertycache
962 961 def _svnversion(self):
963 962 output, err = self._svncommand(['--version', '--quiet'], filename=None)
964 963 m = re.search(br'^(\d+)\.(\d+)', output)
965 964 if not m:
966 965 raise error.Abort(_('cannot retrieve svn tool version'))
967 966 return (int(m.group(1)), int(m.group(2)))
968 967
969 968 def _svnmissing(self):
970 969 return not self.wvfs.exists('.svn')
971 970
972 971 def _wcrevs(self):
973 972 # Get the working directory revision as well as the last
974 973 # commit revision so we can compare the subrepo state with
975 974 # both. We used to store the working directory one.
976 975 output, err = self._svncommand(['info', '--xml'])
977 976 doc = xml.dom.minidom.parseString(output)
978 977 entries = doc.getElementsByTagName('entry')
979 978 lastrev, rev = '0', '0'
980 979 if entries:
981 980 rev = str(entries[0].getAttribute('revision')) or '0'
982 981 commits = entries[0].getElementsByTagName('commit')
983 982 if commits:
984 983 lastrev = str(commits[0].getAttribute('revision')) or '0'
985 984 return (lastrev, rev)
986 985
987 986 def _wcrev(self):
988 987 return self._wcrevs()[0]
989 988
990 989 def _wcchanged(self):
991 990 """Return (changes, extchanges, missing) where changes is True
992 991 if the working directory was changed, extchanges is
993 992 True if any of these changes concern an external entry and missing
994 993 is True if any change is a missing entry.
995 994 """
996 995 output, err = self._svncommand(['status', '--xml'])
997 996 externals, changes, missing = [], [], []
998 997 doc = xml.dom.minidom.parseString(output)
999 998 for e in doc.getElementsByTagName('entry'):
1000 999 s = e.getElementsByTagName('wc-status')
1001 1000 if not s:
1002 1001 continue
1003 1002 item = s[0].getAttribute('item')
1004 1003 props = s[0].getAttribute('props')
1005 1004 path = e.getAttribute('path')
1006 1005 if item == 'external':
1007 1006 externals.append(path)
1008 1007 elif item == 'missing':
1009 1008 missing.append(path)
1010 1009 if (item not in ('', 'normal', 'unversioned', 'external')
1011 1010 or props not in ('', 'none', 'normal')):
1012 1011 changes.append(path)
1013 1012 for path in changes:
1014 1013 for ext in externals:
1015 1014 if path == ext or path.startswith(ext + pycompat.ossep):
1016 1015 return True, True, bool(missing)
1017 1016 return bool(changes), False, bool(missing)
1018 1017
1019 1018 @annotatesubrepoerror
1020 1019 def dirty(self, ignoreupdate=False, missing=False):
1021 1020 if self._svnmissing():
1022 1021 return self._state[1] != ''
1023 1022 wcchanged = self._wcchanged()
1024 1023 changed = wcchanged[0] or (missing and wcchanged[2])
1025 1024 if not changed:
1026 1025 if self._state[1] in self._wcrevs() or ignoreupdate:
1027 1026 return False
1028 1027 return True
1029 1028
1030 1029 def basestate(self):
1031 1030 lastrev, rev = self._wcrevs()
1032 1031 if lastrev != rev:
1033 1032 # Last committed rev is not the same than rev. We would
1034 1033 # like to take lastrev but we do not know if the subrepo
1035 1034 # URL exists at lastrev. Test it and fallback to rev it
1036 1035 # is not there.
1037 1036 try:
1038 1037 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1039 1038 return lastrev
1040 1039 except error.Abort:
1041 1040 pass
1042 1041 return rev
1043 1042
1044 1043 @annotatesubrepoerror
1045 1044 def commit(self, text, user, date):
1046 1045 # user and date are out of our hands since svn is centralized
1047 1046 changed, extchanged, missing = self._wcchanged()
1048 1047 if not changed:
1049 1048 return self.basestate()
1050 1049 if extchanged:
1051 1050 # Do not try to commit externals
1052 1051 raise error.Abort(_('cannot commit svn externals'))
1053 1052 if missing:
1054 1053 # svn can commit with missing entries but aborting like hg
1055 1054 # seems a better approach.
1056 1055 raise error.Abort(_('cannot commit missing svn entries'))
1057 1056 commitinfo, err = self._svncommand(['commit', '-m', text])
1058 1057 self.ui.status(commitinfo)
1059 1058 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1060 1059 if not newrev:
1061 1060 if not commitinfo.strip():
1062 1061 # Sometimes, our definition of "changed" differs from
1063 1062 # svn one. For instance, svn ignores missing files
1064 1063 # when committing. If there are only missing files, no
1065 1064 # commit is made, no output and no error code.
1066 1065 raise error.Abort(_('failed to commit svn changes'))
1067 1066 raise error.Abort(commitinfo.splitlines()[-1])
1068 1067 newrev = newrev.groups()[0]
1069 1068 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1070 1069 return newrev
1071 1070
1072 1071 @annotatesubrepoerror
1073 1072 def remove(self):
1074 1073 if self.dirty():
1075 1074 self.ui.warn(_('not removing repo %s because '
1076 1075 'it has changes.\n') % self._path)
1077 1076 return
1078 1077 self.ui.note(_('removing subrepo %s\n') % self._path)
1079 1078
1080 1079 self.wvfs.rmtree(forcibly=True)
1081 1080 try:
1082 1081 pwvfs = self._ctx.repo().wvfs
1083 1082 pwvfs.removedirs(pwvfs.dirname(self._path))
1084 1083 except OSError:
1085 1084 pass
1086 1085
1087 1086 @annotatesubrepoerror
1088 1087 def get(self, state, overwrite=False):
1089 1088 if overwrite:
1090 1089 self._svncommand(['revert', '--recursive'])
1091 1090 args = ['checkout']
1092 1091 if self._svnversion >= (1, 5):
1093 1092 args.append('--force')
1094 1093 # The revision must be specified at the end of the URL to properly
1095 1094 # update to a directory which has since been deleted and recreated.
1096 1095 args.append('%s@%s' % (state[0], state[1]))
1097 1096
1098 1097 # SEC: check that the ssh url is safe
1099 1098 util.checksafessh(state[0])
1100 1099
1101 1100 status, err = self._svncommand(args, failok=True)
1102 1101 _sanitize(self.ui, self.wvfs, '.svn')
1103 1102 if not re.search('Checked out revision [0-9]+.', status):
1104 1103 if ('is already a working copy for a different URL' in err
1105 1104 and (self._wcchanged()[:2] == (False, False))):
1106 1105 # obstructed but clean working copy, so just blow it away.
1107 1106 self.remove()
1108 1107 self.get(state, overwrite=False)
1109 1108 return
1110 1109 raise error.Abort((status or err).splitlines()[-1])
1111 1110 self.ui.status(status)
1112 1111
1113 1112 @annotatesubrepoerror
1114 1113 def merge(self, state):
1115 1114 old = self._state[1]
1116 1115 new = state[1]
1117 1116 wcrev = self._wcrev()
1118 1117 if new != wcrev:
1119 1118 dirty = old == wcrev or self._wcchanged()[0]
1120 1119 if _updateprompt(self.ui, self, dirty, wcrev, new):
1121 1120 self.get(state, False)
1122 1121
1123 1122 def push(self, opts):
1124 1123 # push is a no-op for SVN
1125 1124 return True
1126 1125
1127 1126 @annotatesubrepoerror
1128 1127 def files(self):
1129 1128 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1130 1129 doc = xml.dom.minidom.parseString(output)
1131 1130 paths = []
1132 1131 for e in doc.getElementsByTagName('entry'):
1133 1132 kind = pycompat.bytestr(e.getAttribute('kind'))
1134 1133 if kind != 'file':
1135 1134 continue
1136 1135 name = ''.join(c.data for c
1137 1136 in e.getElementsByTagName('name')[0].childNodes
1138 1137 if c.nodeType == c.TEXT_NODE)
1139 1138 paths.append(name.encode('utf-8'))
1140 1139 return paths
1141 1140
1142 1141 def filedata(self, name, decode):
1143 1142 return self._svncommand(['cat'], name)[0]
1144 1143
1145 1144
1146 1145 class gitsubrepo(abstractsubrepo):
1147 1146 def __init__(self, ctx, path, state, allowcreate):
1148 1147 super(gitsubrepo, self).__init__(ctx, path)
1149 1148 self._state = state
1150 1149 self._abspath = ctx.repo().wjoin(path)
1151 1150 self._subparent = ctx.repo()
1152 1151 self._ensuregit()
1153 1152
1154 1153 def _ensuregit(self):
1155 1154 try:
1156 1155 self._gitexecutable = 'git'
1157 1156 out, err = self._gitnodir(['--version'])
1158 1157 except OSError as e:
1159 1158 genericerror = _("error executing git for subrepo '%s': %s")
1160 1159 notfoundhint = _("check git is installed and in your PATH")
1161 1160 if e.errno != errno.ENOENT:
1162 1161 raise error.Abort(genericerror % (
1163 1162 self._path, encoding.strtolocal(e.strerror)))
1164 1163 elif pycompat.iswindows:
1165 1164 try:
1166 1165 self._gitexecutable = 'git.cmd'
1167 1166 out, err = self._gitnodir(['--version'])
1168 1167 except OSError as e2:
1169 1168 if e2.errno == errno.ENOENT:
1170 1169 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1171 1170 " for subrepo '%s'") % self._path,
1172 1171 hint=notfoundhint)
1173 1172 else:
1174 1173 raise error.Abort(genericerror % (self._path,
1175 1174 encoding.strtolocal(e2.strerror)))
1176 1175 else:
1177 1176 raise error.Abort(_("couldn't find git for subrepo '%s'")
1178 1177 % self._path, hint=notfoundhint)
1179 1178 versionstatus = self._checkversion(out)
1180 1179 if versionstatus == 'unknown':
1181 1180 self.ui.warn(_('cannot retrieve git version\n'))
1182 1181 elif versionstatus == 'abort':
1183 1182 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1184 1183 elif versionstatus == 'warning':
1185 1184 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1186 1185
1187 1186 @staticmethod
1188 1187 def _gitversion(out):
1189 1188 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1190 1189 if m:
1191 1190 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1192 1191
1193 1192 m = re.search(br'^git version (\d+)\.(\d+)', out)
1194 1193 if m:
1195 1194 return (int(m.group(1)), int(m.group(2)), 0)
1196 1195
1197 1196 return -1
1198 1197
1199 1198 @staticmethod
1200 1199 def _checkversion(out):
1201 1200 '''ensure git version is new enough
1202 1201
1203 1202 >>> _checkversion = gitsubrepo._checkversion
1204 1203 >>> _checkversion(b'git version 1.6.0')
1205 1204 'ok'
1206 1205 >>> _checkversion(b'git version 1.8.5')
1207 1206 'ok'
1208 1207 >>> _checkversion(b'git version 1.4.0')
1209 1208 'abort'
1210 1209 >>> _checkversion(b'git version 1.5.0')
1211 1210 'warning'
1212 1211 >>> _checkversion(b'git version 1.9-rc0')
1213 1212 'ok'
1214 1213 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1215 1214 'ok'
1216 1215 >>> _checkversion(b'git version 1.9.0.GIT')
1217 1216 'ok'
1218 1217 >>> _checkversion(b'git version 12345')
1219 1218 'unknown'
1220 1219 >>> _checkversion(b'no')
1221 1220 'unknown'
1222 1221 '''
1223 1222 version = gitsubrepo._gitversion(out)
1224 1223 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1225 1224 # despite the docstring comment. For now, error on 1.4.0, warn on
1226 1225 # 1.5.0 but attempt to continue.
1227 1226 if version == -1:
1228 1227 return 'unknown'
1229 1228 if version < (1, 5, 0):
1230 1229 return 'abort'
1231 1230 elif version < (1, 6, 0):
1232 1231 return 'warning'
1233 1232 return 'ok'
1234 1233
1235 1234 def _gitcommand(self, commands, env=None, stream=False):
1236 1235 return self._gitdir(commands, env=env, stream=stream)[0]
1237 1236
1238 1237 def _gitdir(self, commands, env=None, stream=False):
1239 1238 return self._gitnodir(commands, env=env, stream=stream,
1240 1239 cwd=self._abspath)
1241 1240
1242 1241 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1243 1242 """Calls the git command
1244 1243
1245 1244 The methods tries to call the git command. versions prior to 1.6.0
1246 1245 are not supported and very probably fail.
1247 1246 """
1248 1247 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1249 1248 if env is None:
1250 1249 env = encoding.environ.copy()
1251 1250 # disable localization for Git output (issue5176)
1252 1251 env['LC_ALL'] = 'C'
1253 1252 # fix for Git CVE-2015-7545
1254 1253 if 'GIT_ALLOW_PROTOCOL' not in env:
1255 1254 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1256 1255 # unless ui.quiet is set, print git's stderr,
1257 1256 # which is mostly progress and useful info
1258 1257 errpipe = None
1259 1258 if self.ui.quiet:
1260 1259 errpipe = open(os.devnull, 'w')
1261 1260 if self.ui._colormode and len(commands) and commands[0] == "diff":
1262 1261 # insert the argument in the front,
1263 1262 # the end of git diff arguments is used for paths
1264 1263 commands.insert(1, '--color')
1265 1264 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1266 1265 cwd=cwd, env=env, close_fds=procutil.closefds,
1267 1266 stdout=subprocess.PIPE, stderr=errpipe)
1268 1267 if stream:
1269 1268 return p.stdout, None
1270 1269
1271 1270 retdata = p.stdout.read().strip()
1272 1271 # wait for the child to exit to avoid race condition.
1273 1272 p.wait()
1274 1273
1275 1274 if p.returncode != 0 and p.returncode != 1:
1276 1275 # there are certain error codes that are ok
1277 1276 command = commands[0]
1278 1277 if command in ('cat-file', 'symbolic-ref'):
1279 1278 return retdata, p.returncode
1280 1279 # for all others, abort
1281 1280 raise error.Abort(_('git %s error %d in %s') %
1282 1281 (command, p.returncode, self._relpath))
1283 1282
1284 1283 return retdata, p.returncode
1285 1284
1286 1285 def _gitmissing(self):
1287 1286 return not self.wvfs.exists('.git')
1288 1287
1289 1288 def _gitstate(self):
1290 1289 return self._gitcommand(['rev-parse', 'HEAD'])
1291 1290
1292 1291 def _gitcurrentbranch(self):
1293 1292 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1294 1293 if err:
1295 1294 current = None
1296 1295 return current
1297 1296
1298 1297 def _gitremote(self, remote):
1299 1298 out = self._gitcommand(['remote', 'show', '-n', remote])
1300 1299 line = out.split('\n')[1]
1301 1300 i = line.index('URL: ') + len('URL: ')
1302 1301 return line[i:]
1303 1302
1304 1303 def _githavelocally(self, revision):
1305 1304 out, code = self._gitdir(['cat-file', '-e', revision])
1306 1305 return code == 0
1307 1306
1308 1307 def _gitisancestor(self, r1, r2):
1309 1308 base = self._gitcommand(['merge-base', r1, r2])
1310 1309 return base == r1
1311 1310
1312 1311 def _gitisbare(self):
1313 1312 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1314 1313
1315 1314 def _gitupdatestat(self):
1316 1315 """This must be run before git diff-index.
1317 1316 diff-index only looks at changes to file stat;
1318 1317 this command looks at file contents and updates the stat."""
1319 1318 self._gitcommand(['update-index', '-q', '--refresh'])
1320 1319
1321 1320 def _gitbranchmap(self):
1322 1321 '''returns 2 things:
1323 1322 a map from git branch to revision
1324 1323 a map from revision to branches'''
1325 1324 branch2rev = {}
1326 1325 rev2branch = {}
1327 1326
1328 1327 out = self._gitcommand(['for-each-ref', '--format',
1329 1328 '%(objectname) %(refname)'])
1330 1329 for line in out.split('\n'):
1331 1330 revision, ref = line.split(' ')
1332 1331 if (not ref.startswith('refs/heads/') and
1333 1332 not ref.startswith('refs/remotes/')):
1334 1333 continue
1335 1334 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1336 1335 continue # ignore remote/HEAD redirects
1337 1336 branch2rev[ref] = revision
1338 1337 rev2branch.setdefault(revision, []).append(ref)
1339 1338 return branch2rev, rev2branch
1340 1339
1341 1340 def _gittracking(self, branches):
1342 1341 'return map of remote branch to local tracking branch'
1343 1342 # assumes no more than one local tracking branch for each remote
1344 1343 tracking = {}
1345 1344 for b in branches:
1346 1345 if b.startswith('refs/remotes/'):
1347 1346 continue
1348 1347 bname = b.split('/', 2)[2]
1349 1348 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1350 1349 if remote:
1351 1350 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1352 1351 tracking['refs/remotes/%s/%s' %
1353 1352 (remote, ref.split('/', 2)[2])] = b
1354 1353 return tracking
1355 1354
1356 1355 def _abssource(self, source):
1357 1356 if '://' not in source:
1358 1357 # recognize the scp syntax as an absolute source
1359 1358 colon = source.find(':')
1360 1359 if colon != -1 and '/' not in source[:colon]:
1361 1360 return source
1362 1361 self._subsource = source
1363 1362 return _abssource(self)
1364 1363
1365 1364 def _fetch(self, source, revision):
1366 1365 if self._gitmissing():
1367 1366 # SEC: check for safe ssh url
1368 1367 util.checksafessh(source)
1369 1368
1370 1369 source = self._abssource(source)
1371 1370 self.ui.status(_('cloning subrepo %s from %s\n') %
1372 1371 (self._relpath, source))
1373 1372 self._gitnodir(['clone', source, self._abspath])
1374 1373 if self._githavelocally(revision):
1375 1374 return
1376 1375 self.ui.status(_('pulling subrepo %s from %s\n') %
1377 1376 (self._relpath, self._gitremote('origin')))
1378 1377 # try only origin: the originally cloned repo
1379 1378 self._gitcommand(['fetch'])
1380 1379 if not self._githavelocally(revision):
1381 1380 raise error.Abort(_('revision %s does not exist in subrepository '
1382 1381 '"%s"\n') % (revision, self._relpath))
1383 1382
1384 1383 @annotatesubrepoerror
1385 1384 def dirty(self, ignoreupdate=False, missing=False):
1386 1385 if self._gitmissing():
1387 1386 return self._state[1] != ''
1388 1387 if self._gitisbare():
1389 1388 return True
1390 1389 if not ignoreupdate and self._state[1] != self._gitstate():
1391 1390 # different version checked out
1392 1391 return True
1393 1392 # check for staged changes or modified files; ignore untracked files
1394 1393 self._gitupdatestat()
1395 1394 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1396 1395 return code == 1
1397 1396
1398 1397 def basestate(self):
1399 1398 return self._gitstate()
1400 1399
1401 1400 @annotatesubrepoerror
1402 1401 def get(self, state, overwrite=False):
1403 1402 source, revision, kind = state
1404 1403 if not revision:
1405 1404 self.remove()
1406 1405 return
1407 1406 self._fetch(source, revision)
1408 1407 # if the repo was set to be bare, unbare it
1409 1408 if self._gitisbare():
1410 1409 self._gitcommand(['config', 'core.bare', 'false'])
1411 1410 if self._gitstate() == revision:
1412 1411 self._gitcommand(['reset', '--hard', 'HEAD'])
1413 1412 return
1414 1413 elif self._gitstate() == revision:
1415 1414 if overwrite:
1416 1415 # first reset the index to unmark new files for commit, because
1417 1416 # reset --hard will otherwise throw away files added for commit,
1418 1417 # not just unmark them.
1419 1418 self._gitcommand(['reset', 'HEAD'])
1420 1419 self._gitcommand(['reset', '--hard', 'HEAD'])
1421 1420 return
1422 1421 branch2rev, rev2branch = self._gitbranchmap()
1423 1422
1424 1423 def checkout(args):
1425 1424 cmd = ['checkout']
1426 1425 if overwrite:
1427 1426 # first reset the index to unmark new files for commit, because
1428 1427 # the -f option will otherwise throw away files added for
1429 1428 # commit, not just unmark them.
1430 1429 self._gitcommand(['reset', 'HEAD'])
1431 1430 cmd.append('-f')
1432 1431 self._gitcommand(cmd + args)
1433 1432 _sanitize(self.ui, self.wvfs, '.git')
1434 1433
1435 1434 def rawcheckout():
1436 1435 # no branch to checkout, check it out with no branch
1437 1436 self.ui.warn(_('checking out detached HEAD in '
1438 1437 'subrepository "%s"\n') % self._relpath)
1439 1438 self.ui.warn(_('check out a git branch if you intend '
1440 1439 'to make changes\n'))
1441 1440 checkout(['-q', revision])
1442 1441
1443 1442 if revision not in rev2branch:
1444 1443 rawcheckout()
1445 1444 return
1446 1445 branches = rev2branch[revision]
1447 1446 firstlocalbranch = None
1448 1447 for b in branches:
1449 1448 if b == 'refs/heads/master':
1450 1449 # master trumps all other branches
1451 1450 checkout(['refs/heads/master'])
1452 1451 return
1453 1452 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1454 1453 firstlocalbranch = b
1455 1454 if firstlocalbranch:
1456 1455 checkout([firstlocalbranch])
1457 1456 return
1458 1457
1459 1458 tracking = self._gittracking(branch2rev.keys())
1460 1459 # choose a remote branch already tracked if possible
1461 1460 remote = branches[0]
1462 1461 if remote not in tracking:
1463 1462 for b in branches:
1464 1463 if b in tracking:
1465 1464 remote = b
1466 1465 break
1467 1466
1468 1467 if remote not in tracking:
1469 1468 # create a new local tracking branch
1470 1469 local = remote.split('/', 3)[3]
1471 1470 checkout(['-b', local, remote])
1472 1471 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1473 1472 # When updating to a tracked remote branch,
1474 1473 # if the local tracking branch is downstream of it,
1475 1474 # a normal `git pull` would have performed a "fast-forward merge"
1476 1475 # which is equivalent to updating the local branch to the remote.
1477 1476 # Since we are only looking at branching at update, we need to
1478 1477 # detect this situation and perform this action lazily.
1479 1478 if tracking[remote] != self._gitcurrentbranch():
1480 1479 checkout([tracking[remote]])
1481 1480 self._gitcommand(['merge', '--ff', remote])
1482 1481 _sanitize(self.ui, self.wvfs, '.git')
1483 1482 else:
1484 1483 # a real merge would be required, just checkout the revision
1485 1484 rawcheckout()
1486 1485
1487 1486 @annotatesubrepoerror
1488 1487 def commit(self, text, user, date):
1489 1488 if self._gitmissing():
1490 1489 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1491 1490 cmd = ['commit', '-a', '-m', text]
1492 1491 env = encoding.environ.copy()
1493 1492 if user:
1494 1493 cmd += ['--author', user]
1495 1494 if date:
1496 1495 # git's date parser silently ignores when seconds < 1e9
1497 1496 # convert to ISO8601
1498 1497 env['GIT_AUTHOR_DATE'] = dateutil.datestr(date,
1499 1498 '%Y-%m-%dT%H:%M:%S %1%2')
1500 1499 self._gitcommand(cmd, env=env)
1501 1500 # make sure commit works otherwise HEAD might not exist under certain
1502 1501 # circumstances
1503 1502 return self._gitstate()
1504 1503
1505 1504 @annotatesubrepoerror
1506 1505 def merge(self, state):
1507 1506 source, revision, kind = state
1508 1507 self._fetch(source, revision)
1509 1508 base = self._gitcommand(['merge-base', revision, self._state[1]])
1510 1509 self._gitupdatestat()
1511 1510 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1512 1511
1513 1512 def mergefunc():
1514 1513 if base == revision:
1515 1514 self.get(state) # fast forward merge
1516 1515 elif base != self._state[1]:
1517 1516 self._gitcommand(['merge', '--no-commit', revision])
1518 1517 _sanitize(self.ui, self.wvfs, '.git')
1519 1518
1520 1519 if self.dirty():
1521 1520 if self._gitstate() != revision:
1522 1521 dirty = self._gitstate() == self._state[1] or code != 0
1523 1522 if _updateprompt(self.ui, self, dirty,
1524 1523 self._state[1][:7], revision[:7]):
1525 1524 mergefunc()
1526 1525 else:
1527 1526 mergefunc()
1528 1527
1529 1528 @annotatesubrepoerror
1530 1529 def push(self, opts):
1531 1530 force = opts.get('force')
1532 1531
1533 1532 if not self._state[1]:
1534 1533 return True
1535 1534 if self._gitmissing():
1536 1535 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1537 1536 # if a branch in origin contains the revision, nothing to do
1538 1537 branch2rev, rev2branch = self._gitbranchmap()
1539 1538 if self._state[1] in rev2branch:
1540 1539 for b in rev2branch[self._state[1]]:
1541 1540 if b.startswith('refs/remotes/origin/'):
1542 1541 return True
1543 1542 for b, revision in branch2rev.iteritems():
1544 1543 if b.startswith('refs/remotes/origin/'):
1545 1544 if self._gitisancestor(self._state[1], revision):
1546 1545 return True
1547 1546 # otherwise, try to push the currently checked out branch
1548 1547 cmd = ['push']
1549 1548 if force:
1550 1549 cmd.append('--force')
1551 1550
1552 1551 current = self._gitcurrentbranch()
1553 1552 if current:
1554 1553 # determine if the current branch is even useful
1555 1554 if not self._gitisancestor(self._state[1], current):
1556 1555 self.ui.warn(_('unrelated git branch checked out '
1557 1556 'in subrepository "%s"\n') % self._relpath)
1558 1557 return False
1559 1558 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1560 1559 (current.split('/', 2)[2], self._relpath))
1561 1560 ret = self._gitdir(cmd + ['origin', current])
1562 1561 return ret[1] == 0
1563 1562 else:
1564 1563 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1565 1564 'cannot push revision %s\n') %
1566 1565 (self._relpath, self._state[1]))
1567 1566 return False
1568 1567
1569 1568 @annotatesubrepoerror
1570 1569 def add(self, ui, match, prefix, explicitonly, **opts):
1571 1570 if self._gitmissing():
1572 1571 return []
1573 1572
1574 1573 (modified, added, removed,
1575 1574 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1576 1575 clean=True)
1577 1576
1578 1577 tracked = set()
1579 1578 # dirstates 'amn' warn, 'r' is added again
1580 1579 for l in (modified, added, deleted, clean):
1581 1580 tracked.update(l)
1582 1581
1583 1582 # Unknown files not of interest will be rejected by the matcher
1584 1583 files = unknown
1585 1584 files.extend(match.files())
1586 1585
1587 1586 rejected = []
1588 1587
1589 1588 files = [f for f in sorted(set(files)) if match(f)]
1590 1589 for f in files:
1591 1590 exact = match.exact(f)
1592 1591 command = ["add"]
1593 1592 if exact:
1594 1593 command.append("-f") #should be added, even if ignored
1595 1594 if ui.verbose or not exact:
1596 1595 ui.status(_('adding %s\n') % match.rel(f))
1597 1596
1598 1597 if f in tracked: # hg prints 'adding' even if already tracked
1599 1598 if exact:
1600 1599 rejected.append(f)
1601 1600 continue
1602 1601 if not opts.get(r'dry_run'):
1603 1602 self._gitcommand(command + [f])
1604 1603
1605 1604 for f in rejected:
1606 1605 ui.warn(_("%s already tracked!\n") % match.abs(f))
1607 1606
1608 1607 return rejected
1609 1608
1610 1609 @annotatesubrepoerror
1611 1610 def remove(self):
1612 1611 if self._gitmissing():
1613 1612 return
1614 1613 if self.dirty():
1615 1614 self.ui.warn(_('not removing repo %s because '
1616 1615 'it has changes.\n') % self._relpath)
1617 1616 return
1618 1617 # we can't fully delete the repository as it may contain
1619 1618 # local-only history
1620 1619 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1621 1620 self._gitcommand(['config', 'core.bare', 'true'])
1622 1621 for f, kind in self.wvfs.readdir():
1623 1622 if f == '.git':
1624 1623 continue
1625 1624 if kind == stat.S_IFDIR:
1626 1625 self.wvfs.rmtree(f)
1627 1626 else:
1628 1627 self.wvfs.unlink(f)
1629 1628
1630 1629 def archive(self, archiver, prefix, match=None, decode=True):
1631 1630 total = 0
1632 1631 source, revision = self._state
1633 1632 if not revision:
1634 1633 return total
1635 1634 self._fetch(source, revision)
1636 1635
1637 1636 # Parse git's native archive command.
1638 1637 # This should be much faster than manually traversing the trees
1639 1638 # and objects with many subprocess calls.
1640 1639 tarstream = self._gitcommand(['archive', revision], stream=True)
1641 1640 tar = tarfile.open(fileobj=tarstream, mode='r|')
1642 1641 relpath = subrelpath(self)
1643 1642 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1644 1643 for i, info in enumerate(tar):
1645 1644 if info.isdir():
1646 1645 continue
1647 1646 if match and not match(info.name):
1648 1647 continue
1649 1648 if info.issym():
1650 1649 data = info.linkname
1651 1650 else:
1652 1651 data = tar.extractfile(info).read()
1653 1652 archiver.addfile(prefix + self._path + '/' + info.name,
1654 1653 info.mode, info.issym(), data)
1655 1654 total += 1
1656 1655 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1657 1656 unit=_('files'))
1658 1657 self.ui.progress(_('archiving (%s)') % relpath, None)
1659 1658 return total
1660 1659
1661 1660
1662 1661 @annotatesubrepoerror
1663 1662 def cat(self, match, fm, fntemplate, prefix, **opts):
1664 1663 rev = self._state[1]
1665 1664 if match.anypats():
1666 1665 return 1 #No support for include/exclude yet
1667 1666
1668 1667 if not match.files():
1669 1668 return 1
1670 1669
1671 1670 # TODO: add support for non-plain formatter (see cmdutil.cat())
1672 1671 for f in match.files():
1673 1672 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1674 1673 fp = cmdutil.makefileobj(self._ctx, fntemplate,
1675 1674 pathname=self.wvfs.reljoin(prefix, f))
1676 1675 fp.write(output)
1677 1676 fp.close()
1678 1677 return 0
1679 1678
1680 1679
1681 1680 @annotatesubrepoerror
1682 1681 def status(self, rev2, **opts):
1683 1682 rev1 = self._state[1]
1684 1683 if self._gitmissing() or not rev1:
1685 1684 # if the repo is missing, return no results
1686 1685 return scmutil.status([], [], [], [], [], [], [])
1687 1686 modified, added, removed = [], [], []
1688 1687 self._gitupdatestat()
1689 1688 if rev2:
1690 1689 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1691 1690 else:
1692 1691 command = ['diff-index', '--no-renames', rev1]
1693 1692 out = self._gitcommand(command)
1694 1693 for line in out.split('\n'):
1695 1694 tab = line.find('\t')
1696 1695 if tab == -1:
1697 1696 continue
1698 1697 status, f = line[tab - 1], line[tab + 1:]
1699 1698 if status == 'M':
1700 1699 modified.append(f)
1701 1700 elif status == 'A':
1702 1701 added.append(f)
1703 1702 elif status == 'D':
1704 1703 removed.append(f)
1705 1704
1706 1705 deleted, unknown, ignored, clean = [], [], [], []
1707 1706
1708 1707 command = ['status', '--porcelain', '-z']
1709 1708 if opts.get(r'unknown'):
1710 1709 command += ['--untracked-files=all']
1711 1710 if opts.get(r'ignored'):
1712 1711 command += ['--ignored']
1713 1712 out = self._gitcommand(command)
1714 1713
1715 1714 changedfiles = set()
1716 1715 changedfiles.update(modified)
1717 1716 changedfiles.update(added)
1718 1717 changedfiles.update(removed)
1719 1718 for line in out.split('\0'):
1720 1719 if not line:
1721 1720 continue
1722 1721 st = line[0:2]
1723 1722 #moves and copies show 2 files on one line
1724 1723 if line.find('\0') >= 0:
1725 1724 filename1, filename2 = line[3:].split('\0')
1726 1725 else:
1727 1726 filename1 = line[3:]
1728 1727 filename2 = None
1729 1728
1730 1729 changedfiles.add(filename1)
1731 1730 if filename2:
1732 1731 changedfiles.add(filename2)
1733 1732
1734 1733 if st == '??':
1735 1734 unknown.append(filename1)
1736 1735 elif st == '!!':
1737 1736 ignored.append(filename1)
1738 1737
1739 1738 if opts.get(r'clean'):
1740 1739 out = self._gitcommand(['ls-files'])
1741 1740 for f in out.split('\n'):
1742 1741 if not f in changedfiles:
1743 1742 clean.append(f)
1744 1743
1745 1744 return scmutil.status(modified, added, removed, deleted,
1746 1745 unknown, ignored, clean)
1747 1746
1748 1747 @annotatesubrepoerror
1749 1748 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1750 1749 node1 = self._state[1]
1751 1750 cmd = ['diff', '--no-renames']
1752 1751 if opts[r'stat']:
1753 1752 cmd.append('--stat')
1754 1753 else:
1755 1754 # for Git, this also implies '-p'
1756 1755 cmd.append('-U%d' % diffopts.context)
1757 1756
1758 1757 gitprefix = self.wvfs.reljoin(prefix, self._path)
1759 1758
1760 1759 if diffopts.noprefix:
1761 1760 cmd.extend(['--src-prefix=%s/' % gitprefix,
1762 1761 '--dst-prefix=%s/' % gitprefix])
1763 1762 else:
1764 1763 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1765 1764 '--dst-prefix=b/%s/' % gitprefix])
1766 1765
1767 1766 if diffopts.ignorews:
1768 1767 cmd.append('--ignore-all-space')
1769 1768 if diffopts.ignorewsamount:
1770 1769 cmd.append('--ignore-space-change')
1771 1770 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1772 1771 and diffopts.ignoreblanklines:
1773 1772 cmd.append('--ignore-blank-lines')
1774 1773
1775 1774 cmd.append(node1)
1776 1775 if node2:
1777 1776 cmd.append(node2)
1778 1777
1779 1778 output = ""
1780 1779 if match.always():
1781 1780 output += self._gitcommand(cmd) + '\n'
1782 1781 else:
1783 1782 st = self.status(node2)[:3]
1784 1783 files = [f for sublist in st for f in sublist]
1785 1784 for f in files:
1786 1785 if match(f):
1787 1786 output += self._gitcommand(cmd + ['--', f]) + '\n'
1788 1787
1789 1788 if output.strip():
1790 1789 ui.write(output)
1791 1790
1792 1791 @annotatesubrepoerror
1793 1792 def revert(self, substate, *pats, **opts):
1794 1793 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1795 1794 if not opts.get(r'no_backup'):
1796 1795 status = self.status(None)
1797 1796 names = status.modified
1798 1797 for name in names:
1799 1798 bakname = scmutil.origpath(self.ui, self._subparent, name)
1800 1799 self.ui.note(_('saving current version of %s as %s\n') %
1801 1800 (name, bakname))
1802 1801 self.wvfs.rename(name, bakname)
1803 1802
1804 1803 if not opts.get(r'dry_run'):
1805 1804 self.get(substate, overwrite=True)
1806 1805 return []
1807 1806
1808 1807 def shortid(self, revid):
1809 1808 return revid[:7]
1810 1809
1811 1810 types = {
1812 1811 'hg': hgsubrepo,
1813 1812 'svn': svnsubrepo,
1814 1813 'git': gitsubrepo,
1815 1814 }
General Comments 0
You need to be logged in to leave comments. Login now