##// END OF EJS Templates
pull: add --remote-hidden option and pass it through peer creation...
Manuel Jacob -
r51309:3a2df812 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,28 +1,28 b''
1 1 # narrowrepo.py - repository which supports narrow revlogs, lazy loading
2 2 #
3 3 # Copyright 2017 Google, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 from mercurial import wireprototypes
10 10
11 11 from . import narrowdirstate
12 12
13 13
14 14 def wraprepo(repo):
15 15 """Enables narrow clone functionality on a single local repository."""
16 16
17 17 class narrowrepository(repo.__class__):
18 18 def _makedirstate(self):
19 19 dirstate = super(narrowrepository, self)._makedirstate()
20 20 return narrowdirstate.wrapdirstate(self, dirstate)
21 21
22 def peer(self, path=None):
23 peer = super(narrowrepository, self).peer(path=path)
22 def peer(self, *args, **kwds):
23 peer = super(narrowrepository, self).peer(*args, **kwds)
24 24 peer._caps.add(wireprototypes.NARROWCAP)
25 25 peer._caps.add(wireprototypes.ELLIPSESCAP)
26 26 return peer
27 27
28 28 repo.__class__ = narrowrepository
@@ -1,742 +1,742 b''
1 1 # bundlerepo.py - repository class for viewing uncompressed bundles
2 2 #
3 3 # Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """Repository class for viewing uncompressed bundles.
9 9
10 10 This provides a read-only repository interface to bundles as if they
11 11 were part of the actual repository.
12 12 """
13 13
14 14
15 15 import os
16 16 import shutil
17 17
18 18 from .i18n import _
19 19 from .node import (
20 20 hex,
21 21 nullrev,
22 22 )
23 23
24 24 from . import (
25 25 bundle2,
26 26 changegroup,
27 27 changelog,
28 28 cmdutil,
29 29 discovery,
30 30 encoding,
31 31 error,
32 32 exchange,
33 33 filelog,
34 34 localrepo,
35 35 manifest,
36 36 mdiff,
37 37 pathutil,
38 38 phases,
39 39 pycompat,
40 40 revlog,
41 41 revlogutils,
42 42 util,
43 43 vfs as vfsmod,
44 44 )
45 45 from .utils import (
46 46 urlutil,
47 47 )
48 48
49 49 from .revlogutils import (
50 50 constants as revlog_constants,
51 51 )
52 52
53 53
54 54 class bundlerevlog(revlog.revlog):
55 55 def __init__(self, opener, target, radix, cgunpacker, linkmapper):
56 56 # How it works:
57 57 # To retrieve a revision, we need to know the offset of the revision in
58 58 # the bundle (an unbundle object). We store this offset in the index
59 59 # (start). The base of the delta is stored in the base field.
60 60 #
61 61 # To differentiate a rev in the bundle from a rev in the revlog, we
62 62 # check revision against repotiprev.
63 63 opener = vfsmod.readonlyvfs(opener)
64 64 revlog.revlog.__init__(self, opener, target=target, radix=radix)
65 65 self.bundle = cgunpacker
66 66 n = len(self)
67 67 self.repotiprev = n - 1
68 68 self.bundlerevs = set() # used by 'bundle()' revset expression
69 69 for deltadata in cgunpacker.deltaiter():
70 70 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
71 71
72 72 size = len(delta)
73 73 start = cgunpacker.tell() - size
74 74
75 75 if self.index.has_node(node):
76 76 # this can happen if two branches make the same change
77 77 self.bundlerevs.add(self.index.rev(node))
78 78 continue
79 79 if cs == node:
80 80 linkrev = nullrev
81 81 else:
82 82 linkrev = linkmapper(cs)
83 83
84 84 for p in (p1, p2):
85 85 if not self.index.has_node(p):
86 86 raise error.LookupError(
87 87 p, self.display_id, _(b"unknown parent")
88 88 )
89 89
90 90 if not self.index.has_node(deltabase):
91 91 raise error.LookupError(
92 92 deltabase, self.display_id, _(b'unknown delta base')
93 93 )
94 94
95 95 baserev = self.rev(deltabase)
96 96 # start, size, full unc. size, base (unused), link, p1, p2, node, sidedata_offset (unused), sidedata_size (unused)
97 97 e = revlogutils.entry(
98 98 flags=flags,
99 99 data_offset=start,
100 100 data_compressed_length=size,
101 101 data_delta_base=baserev,
102 102 link_rev=linkrev,
103 103 parent_rev_1=self.rev(p1),
104 104 parent_rev_2=self.rev(p2),
105 105 node_id=node,
106 106 )
107 107 self.index.append(e)
108 108 self.bundlerevs.add(n)
109 109 n += 1
110 110
111 111 def _chunk(self, rev, df=None):
112 112 # Warning: in case of bundle, the diff is against what we stored as
113 113 # delta base, not against rev - 1
114 114 # XXX: could use some caching
115 115 if rev <= self.repotiprev:
116 116 return revlog.revlog._chunk(self, rev)
117 117 self.bundle.seek(self.start(rev))
118 118 return self.bundle.read(self.length(rev))
119 119
120 120 def revdiff(self, rev1, rev2):
121 121 """return or calculate a delta between two revisions"""
122 122 if rev1 > self.repotiprev and rev2 > self.repotiprev:
123 123 # hot path for bundle
124 124 revb = self.index[rev2][3]
125 125 if revb == rev1:
126 126 return self._chunk(rev2)
127 127 elif rev1 <= self.repotiprev and rev2 <= self.repotiprev:
128 128 return revlog.revlog.revdiff(self, rev1, rev2)
129 129
130 130 return mdiff.textdiff(self.rawdata(rev1), self.rawdata(rev2))
131 131
132 132 def _rawtext(self, node, rev, _df=None):
133 133 if rev is None:
134 134 rev = self.rev(node)
135 135 validated = False
136 136 rawtext = None
137 137 chain = []
138 138 iterrev = rev
139 139 # reconstruct the revision if it is from a changegroup
140 140 while iterrev > self.repotiprev:
141 141 if self._revisioncache and self._revisioncache[1] == iterrev:
142 142 rawtext = self._revisioncache[2]
143 143 break
144 144 chain.append(iterrev)
145 145 iterrev = self.index[iterrev][3]
146 146 if iterrev == nullrev:
147 147 rawtext = b''
148 148 elif rawtext is None:
149 149 r = super(bundlerevlog, self)._rawtext(
150 150 self.node(iterrev), iterrev, _df=_df
151 151 )
152 152 __, rawtext, validated = r
153 153 if chain:
154 154 validated = False
155 155 while chain:
156 156 delta = self._chunk(chain.pop())
157 157 rawtext = mdiff.patches(rawtext, [delta])
158 158 return rev, rawtext, validated
159 159
160 160 def addrevision(self, *args, **kwargs):
161 161 raise NotImplementedError
162 162
163 163 def addgroup(self, *args, **kwargs):
164 164 raise NotImplementedError
165 165
166 166 def strip(self, *args, **kwargs):
167 167 raise NotImplementedError
168 168
169 169 def checksize(self):
170 170 raise NotImplementedError
171 171
172 172
173 173 class bundlechangelog(bundlerevlog, changelog.changelog):
174 174 def __init__(self, opener, cgunpacker):
175 175 changelog.changelog.__init__(self, opener)
176 176 linkmapper = lambda x: x
177 177 bundlerevlog.__init__(
178 178 self,
179 179 opener,
180 180 (revlog_constants.KIND_CHANGELOG, None),
181 181 self.radix,
182 182 cgunpacker,
183 183 linkmapper,
184 184 )
185 185
186 186
187 187 class bundlemanifest(bundlerevlog, manifest.manifestrevlog):
188 188 def __init__(
189 189 self,
190 190 nodeconstants,
191 191 opener,
192 192 cgunpacker,
193 193 linkmapper,
194 194 dirlogstarts=None,
195 195 dir=b'',
196 196 ):
197 197 manifest.manifestrevlog.__init__(self, nodeconstants, opener, tree=dir)
198 198 bundlerevlog.__init__(
199 199 self,
200 200 opener,
201 201 (revlog_constants.KIND_MANIFESTLOG, dir),
202 202 self._revlog.radix,
203 203 cgunpacker,
204 204 linkmapper,
205 205 )
206 206 if dirlogstarts is None:
207 207 dirlogstarts = {}
208 208 if self.bundle.version == b"03":
209 209 dirlogstarts = _getfilestarts(self.bundle)
210 210 self._dirlogstarts = dirlogstarts
211 211 self._linkmapper = linkmapper
212 212
213 213 def dirlog(self, d):
214 214 if d in self._dirlogstarts:
215 215 self.bundle.seek(self._dirlogstarts[d])
216 216 return bundlemanifest(
217 217 self.nodeconstants,
218 218 self.opener,
219 219 self.bundle,
220 220 self._linkmapper,
221 221 self._dirlogstarts,
222 222 dir=d,
223 223 )
224 224 return super(bundlemanifest, self).dirlog(d)
225 225
226 226
227 227 class bundlefilelog(filelog.filelog):
228 228 def __init__(self, opener, path, cgunpacker, linkmapper):
229 229 filelog.filelog.__init__(self, opener, path)
230 230 self._revlog = bundlerevlog(
231 231 opener,
232 232 # XXX should use the unencoded path
233 233 target=(revlog_constants.KIND_FILELOG, path),
234 234 radix=self._revlog.radix,
235 235 cgunpacker=cgunpacker,
236 236 linkmapper=linkmapper,
237 237 )
238 238
239 239
240 240 class bundlepeer(localrepo.localpeer):
241 241 def canpush(self):
242 242 return False
243 243
244 244
245 245 class bundlephasecache(phases.phasecache):
246 246 def __init__(self, *args, **kwargs):
247 247 super(bundlephasecache, self).__init__(*args, **kwargs)
248 248 if util.safehasattr(self, 'opener'):
249 249 self.opener = vfsmod.readonlyvfs(self.opener)
250 250
251 251 def write(self):
252 252 raise NotImplementedError
253 253
254 254 def _write(self, fp):
255 255 raise NotImplementedError
256 256
257 257 def _updateroots(self, phase, newroots, tr):
258 258 self.phaseroots[phase] = newroots
259 259 self.invalidate()
260 260 self.dirty = True
261 261
262 262
263 263 def _getfilestarts(cgunpacker):
264 264 filespos = {}
265 265 for chunkdata in iter(cgunpacker.filelogheader, {}):
266 266 fname = chunkdata[b'filename']
267 267 filespos[fname] = cgunpacker.tell()
268 268 for chunk in iter(lambda: cgunpacker.deltachunk(None), {}):
269 269 pass
270 270 return filespos
271 271
272 272
273 273 class bundlerepository:
274 274 """A repository instance that is a union of a local repo and a bundle.
275 275
276 276 Instances represent a read-only repository composed of a local repository
277 277 with the contents of a bundle file applied. The repository instance is
278 278 conceptually similar to the state of a repository after an
279 279 ``hg unbundle`` operation. However, the contents of the bundle are never
280 280 applied to the actual base repository.
281 281
282 282 Instances constructed directly are not usable as repository objects.
283 283 Use instance() or makebundlerepository() to create instances.
284 284 """
285 285
286 286 def __init__(self, bundlepath, url, tempparent):
287 287 self._tempparent = tempparent
288 288 self._url = url
289 289
290 290 self.ui.setconfig(b'phases', b'publish', False, b'bundlerepo')
291 291
292 292 # dict with the mapping 'filename' -> position in the changegroup.
293 293 self._cgfilespos = {}
294 294 self._bundlefile = None
295 295 self._cgunpacker = None
296 296 self.tempfile = None
297 297 f = util.posixfile(bundlepath, b"rb")
298 298 bundle = exchange.readbundle(self.ui, f, bundlepath)
299 299
300 300 if isinstance(bundle, bundle2.unbundle20):
301 301 self._bundlefile = bundle
302 302
303 303 cgpart = None
304 304 for part in bundle.iterparts(seekable=True):
305 305 if part.type == b'phase-heads':
306 306 self._handle_bundle2_phase_part(bundle, part)
307 307 elif part.type == b'changegroup':
308 308 if cgpart:
309 309 raise NotImplementedError(
310 310 b"can't process multiple changegroups"
311 311 )
312 312 cgpart = part
313 313 self._handle_bundle2_cg_part(bundle, part)
314 314
315 315 if not cgpart:
316 316 raise error.Abort(_(b"No changegroups found"))
317 317
318 318 # This is required to placate a later consumer, which expects
319 319 # the payload offset to be at the beginning of the changegroup.
320 320 # We need to do this after the iterparts() generator advances
321 321 # because iterparts() will seek to end of payload after the
322 322 # generator returns control to iterparts().
323 323 cgpart.seek(0, os.SEEK_SET)
324 324
325 325 elif isinstance(bundle, changegroup.cg1unpacker):
326 326 self._handle_bundle1(bundle, bundlepath)
327 327 else:
328 328 raise error.Abort(
329 329 _(b'bundle type %r cannot be read') % type(bundle)
330 330 )
331 331
332 332 def _handle_bundle1(self, bundle, bundlepath):
333 333 if bundle.compressed():
334 334 f = self._writetempbundle(bundle.read, b'.hg10un', header=b'HG10UN')
335 335 bundle = exchange.readbundle(self.ui, f, bundlepath, self.vfs)
336 336
337 337 self._bundlefile = bundle
338 338 self._cgunpacker = bundle
339 339
340 340 self.firstnewrev = self.changelog.repotiprev + 1
341 341 phases.retractboundary(
342 342 self,
343 343 None,
344 344 phases.draft,
345 345 [ctx.node() for ctx in self[self.firstnewrev :]],
346 346 )
347 347
348 348 def _handle_bundle2_cg_part(self, bundle, part):
349 349 assert part.type == b'changegroup'
350 350 cgstream = part
351 351 targetphase = part.params.get(b'targetphase')
352 352 try:
353 353 targetphase = int(targetphase)
354 354 except TypeError:
355 355 pass
356 356 if targetphase is None:
357 357 targetphase = phases.draft
358 358 if targetphase not in phases.allphases:
359 359 m = _(b'unsupported targetphase: %d')
360 360 m %= targetphase
361 361 raise error.Abort(m)
362 362 version = part.params.get(b'version', b'01')
363 363 legalcgvers = changegroup.supportedincomingversions(self)
364 364 if version not in legalcgvers:
365 365 msg = _(b'Unsupported changegroup version: %s')
366 366 raise error.Abort(msg % version)
367 367 if bundle.compressed():
368 368 cgstream = self._writetempbundle(part.read, b'.cg%sun' % version)
369 369
370 370 self._cgunpacker = changegroup.getunbundler(version, cgstream, b'UN')
371 371
372 372 self.firstnewrev = self.changelog.repotiprev + 1
373 373 phases.retractboundary(
374 374 self,
375 375 None,
376 376 targetphase,
377 377 [ctx.node() for ctx in self[self.firstnewrev :]],
378 378 )
379 379
380 380 def _handle_bundle2_phase_part(self, bundle, part):
381 381 assert part.type == b'phase-heads'
382 382
383 383 unfi = self.unfiltered()
384 384 headsbyphase = phases.binarydecode(part)
385 385 phases.updatephases(unfi, lambda: None, headsbyphase)
386 386
387 387 def _writetempbundle(self, readfn, suffix, header=b''):
388 388 """Write a temporary file to disk"""
389 389 fdtemp, temp = self.vfs.mkstemp(prefix=b"hg-bundle-", suffix=suffix)
390 390 self.tempfile = temp
391 391
392 392 with os.fdopen(fdtemp, 'wb') as fptemp:
393 393 fptemp.write(header)
394 394 while True:
395 395 chunk = readfn(2 ** 18)
396 396 if not chunk:
397 397 break
398 398 fptemp.write(chunk)
399 399
400 400 return self.vfs.open(self.tempfile, mode=b"rb")
401 401
402 402 @localrepo.unfilteredpropertycache
403 403 def _phasecache(self):
404 404 return bundlephasecache(self, self._phasedefaults)
405 405
406 406 @localrepo.unfilteredpropertycache
407 407 def changelog(self):
408 408 # consume the header if it exists
409 409 self._cgunpacker.changelogheader()
410 410 c = bundlechangelog(self.svfs, self._cgunpacker)
411 411 self.manstart = self._cgunpacker.tell()
412 412 return c
413 413
414 414 def _refreshchangelog(self):
415 415 # changelog for bundle repo are not filecache, this method is not
416 416 # applicable.
417 417 pass
418 418
419 419 @localrepo.unfilteredpropertycache
420 420 def manifestlog(self):
421 421 self._cgunpacker.seek(self.manstart)
422 422 # consume the header if it exists
423 423 self._cgunpacker.manifestheader()
424 424 linkmapper = self.unfiltered().changelog.rev
425 425 rootstore = bundlemanifest(
426 426 self.nodeconstants, self.svfs, self._cgunpacker, linkmapper
427 427 )
428 428 self.filestart = self._cgunpacker.tell()
429 429
430 430 return manifest.manifestlog(
431 431 self.svfs, self, rootstore, self.narrowmatch()
432 432 )
433 433
434 434 def _consumemanifest(self):
435 435 """Consumes the manifest portion of the bundle, setting filestart so the
436 436 file portion can be read."""
437 437 self._cgunpacker.seek(self.manstart)
438 438 self._cgunpacker.manifestheader()
439 439 for delta in self._cgunpacker.deltaiter():
440 440 pass
441 441 self.filestart = self._cgunpacker.tell()
442 442
443 443 @localrepo.unfilteredpropertycache
444 444 def manstart(self):
445 445 self.changelog
446 446 return self.manstart
447 447
448 448 @localrepo.unfilteredpropertycache
449 449 def filestart(self):
450 450 self.manifestlog
451 451
452 452 # If filestart was not set by self.manifestlog, that means the
453 453 # manifestlog implementation did not consume the manifests from the
454 454 # changegroup (ex: it might be consuming trees from a separate bundle2
455 455 # part instead). So we need to manually consume it.
456 456 if 'filestart' not in self.__dict__:
457 457 self._consumemanifest()
458 458
459 459 return self.filestart
460 460
461 461 def url(self):
462 462 return self._url
463 463
464 464 def file(self, f):
465 465 if not self._cgfilespos:
466 466 self._cgunpacker.seek(self.filestart)
467 467 self._cgfilespos = _getfilestarts(self._cgunpacker)
468 468
469 469 if f in self._cgfilespos:
470 470 self._cgunpacker.seek(self._cgfilespos[f])
471 471 linkmapper = self.unfiltered().changelog.rev
472 472 return bundlefilelog(self.svfs, f, self._cgunpacker, linkmapper)
473 473 else:
474 474 return super(bundlerepository, self).file(f)
475 475
476 476 def close(self):
477 477 """Close assigned bundle file immediately."""
478 478 self._bundlefile.close()
479 479 if self.tempfile is not None:
480 480 self.vfs.unlink(self.tempfile)
481 481 if self._tempparent:
482 482 shutil.rmtree(self._tempparent, True)
483 483
484 484 def cancopy(self):
485 485 return False
486 486
487 def peer(self, path=None):
488 return bundlepeer(self, path=path)
487 def peer(self, path=None, remotehidden=False):
488 return bundlepeer(self, path=path, remotehidden=remotehidden)
489 489
490 490 def getcwd(self):
491 491 return encoding.getcwd() # always outside the repo
492 492
493 493 # Check if parents exist in localrepo before setting
494 494 def setparents(self, p1, p2=None):
495 495 if p2 is None:
496 496 p2 = self.nullid
497 497 p1rev = self.changelog.rev(p1)
498 498 p2rev = self.changelog.rev(p2)
499 499 msg = _(b"setting parent to node %s that only exists in the bundle\n")
500 500 if self.changelog.repotiprev < p1rev:
501 501 self.ui.warn(msg % hex(p1))
502 502 if self.changelog.repotiprev < p2rev:
503 503 self.ui.warn(msg % hex(p2))
504 504 return super(bundlerepository, self).setparents(p1, p2)
505 505
506 506
507 507 def instance(ui, path, create, intents=None, createopts=None):
508 508 if create:
509 509 raise error.Abort(_(b'cannot create new bundle repository'))
510 510 # internal config: bundle.mainreporoot
511 511 parentpath = ui.config(b"bundle", b"mainreporoot")
512 512 if not parentpath:
513 513 # try to find the correct path to the working directory repo
514 514 parentpath = cmdutil.findrepo(encoding.getcwd())
515 515 if parentpath is None:
516 516 parentpath = b''
517 517 if parentpath:
518 518 # Try to make the full path relative so we get a nice, short URL.
519 519 # In particular, we don't want temp dir names in test outputs.
520 520 cwd = encoding.getcwd()
521 521 if parentpath == cwd:
522 522 parentpath = b''
523 523 else:
524 524 cwd = pathutil.normasprefix(cwd)
525 525 if parentpath.startswith(cwd):
526 526 parentpath = parentpath[len(cwd) :]
527 527 u = urlutil.url(path)
528 528 path = u.localpath()
529 529 if u.scheme == b'bundle':
530 530 s = path.split(b"+", 1)
531 531 if len(s) == 1:
532 532 repopath, bundlename = parentpath, s[0]
533 533 else:
534 534 repopath, bundlename = s
535 535 else:
536 536 repopath, bundlename = parentpath, path
537 537
538 538 return makebundlerepository(ui, repopath, bundlename)
539 539
540 540
541 541 def makebundlerepository(ui, repopath, bundlepath):
542 542 """Make a bundle repository object based on repo and bundle paths."""
543 543 if repopath:
544 544 url = b'bundle:%s+%s' % (util.expandpath(repopath), bundlepath)
545 545 else:
546 546 url = b'bundle:%s' % bundlepath
547 547
548 548 # Because we can't make any guarantees about the type of the base
549 549 # repository, we can't have a static class representing the bundle
550 550 # repository. We also can't make any guarantees about how to even
551 551 # call the base repository's constructor!
552 552 #
553 553 # So, our strategy is to go through ``localrepo.instance()`` to construct
554 554 # a repo instance. Then, we dynamically create a new type derived from
555 555 # both it and our ``bundlerepository`` class which overrides some
556 556 # functionality. We then change the type of the constructed repository
557 557 # to this new type and initialize the bundle-specific bits of it.
558 558
559 559 try:
560 560 repo = localrepo.instance(ui, repopath, create=False)
561 561 tempparent = None
562 562 except error.RequirementError:
563 563 raise # no fallback if the backing repo is unsupported
564 564 except error.RepoError:
565 565 tempparent = pycompat.mkdtemp()
566 566 try:
567 567 repo = localrepo.instance(ui, tempparent, create=True)
568 568 except Exception:
569 569 shutil.rmtree(tempparent)
570 570 raise
571 571
572 572 class derivedbundlerepository(bundlerepository, repo.__class__):
573 573 pass
574 574
575 575 repo.__class__ = derivedbundlerepository
576 576 bundlerepository.__init__(repo, bundlepath, url, tempparent)
577 577
578 578 return repo
579 579
580 580
581 581 class bundletransactionmanager:
582 582 def transaction(self):
583 583 return None
584 584
585 585 def close(self):
586 586 raise NotImplementedError
587 587
588 588 def release(self):
589 589 raise NotImplementedError
590 590
591 591
592 592 def getremotechanges(
593 593 ui, repo, peer, onlyheads=None, bundlename=None, force=False
594 594 ):
595 595 """obtains a bundle of changes incoming from peer
596 596
597 597 "onlyheads" restricts the returned changes to those reachable from the
598 598 specified heads.
599 599 "bundlename", if given, stores the bundle to this file path permanently;
600 600 otherwise it's stored to a temp file and gets deleted again when you call
601 601 the returned "cleanupfn".
602 602 "force" indicates whether to proceed on unrelated repos.
603 603
604 604 Returns a tuple (local, csets, cleanupfn):
605 605
606 606 "local" is a local repo from which to obtain the actual incoming
607 607 changesets; it is a bundlerepo for the obtained bundle when the
608 608 original "peer" is remote.
609 609 "csets" lists the incoming changeset node ids.
610 610 "cleanupfn" must be called without arguments when you're done processing
611 611 the changes; it closes both the original "peer" and the one returned
612 612 here.
613 613 """
614 614 tmp = discovery.findcommonincoming(repo, peer, heads=onlyheads, force=force)
615 615 common, incoming, rheads = tmp
616 616 if not incoming:
617 617 try:
618 618 if bundlename:
619 619 os.unlink(bundlename)
620 620 except OSError:
621 621 pass
622 622 return repo, [], peer.close
623 623
624 624 commonset = set(common)
625 625 rheads = [x for x in rheads if x not in commonset]
626 626
627 627 bundle = None
628 628 bundlerepo = None
629 629 localrepo = peer.local()
630 630 if bundlename or not localrepo:
631 631 # create a bundle (uncompressed if peer repo is not local)
632 632
633 633 # developer config: devel.legacy.exchange
634 634 legexc = ui.configlist(b'devel', b'legacy.exchange')
635 635 forcebundle1 = b'bundle2' not in legexc and b'bundle1' in legexc
636 636 canbundle2 = (
637 637 not forcebundle1
638 638 and peer.capable(b'getbundle')
639 639 and peer.capable(b'bundle2')
640 640 )
641 641 if canbundle2:
642 642 with peer.commandexecutor() as e:
643 643 b2 = e.callcommand(
644 644 b'getbundle',
645 645 {
646 646 b'source': b'incoming',
647 647 b'common': common,
648 648 b'heads': rheads,
649 649 b'bundlecaps': exchange.caps20to10(
650 650 repo, role=b'client'
651 651 ),
652 652 b'cg': True,
653 653 },
654 654 ).result()
655 655
656 656 fname = bundle = changegroup.writechunks(
657 657 ui, b2._forwardchunks(), bundlename
658 658 )
659 659 else:
660 660 if peer.capable(b'getbundle'):
661 661 with peer.commandexecutor() as e:
662 662 cg = e.callcommand(
663 663 b'getbundle',
664 664 {
665 665 b'source': b'incoming',
666 666 b'common': common,
667 667 b'heads': rheads,
668 668 },
669 669 ).result()
670 670 elif onlyheads is None and not peer.capable(b'changegroupsubset'):
671 671 # compat with older servers when pulling all remote heads
672 672
673 673 with peer.commandexecutor() as e:
674 674 cg = e.callcommand(
675 675 b'changegroup',
676 676 {
677 677 b'nodes': incoming,
678 678 b'source': b'incoming',
679 679 },
680 680 ).result()
681 681
682 682 rheads = None
683 683 else:
684 684 with peer.commandexecutor() as e:
685 685 cg = e.callcommand(
686 686 b'changegroupsubset',
687 687 {
688 688 b'bases': incoming,
689 689 b'heads': rheads,
690 690 b'source': b'incoming',
691 691 },
692 692 ).result()
693 693
694 694 if localrepo:
695 695 bundletype = b"HG10BZ"
696 696 else:
697 697 bundletype = b"HG10UN"
698 698 fname = bundle = bundle2.writebundle(ui, cg, bundlename, bundletype)
699 699 # keep written bundle?
700 700 if bundlename:
701 701 bundle = None
702 702 if not localrepo:
703 703 # use the created uncompressed bundlerepo
704 704 localrepo = bundlerepo = makebundlerepository(
705 705 repo.baseui, repo.root, fname
706 706 )
707 707
708 708 # this repo contains local and peer now, so filter out local again
709 709 common = repo.heads()
710 710 if localrepo:
711 711 # Part of common may be remotely filtered
712 712 # So use an unfiltered version
713 713 # The discovery process probably need cleanup to avoid that
714 714 localrepo = localrepo.unfiltered()
715 715
716 716 csets = localrepo.changelog.findmissing(common, rheads)
717 717
718 718 if bundlerepo:
719 719 reponodes = [ctx.node() for ctx in bundlerepo[bundlerepo.firstnewrev :]]
720 720
721 721 with peer.commandexecutor() as e:
722 722 remotephases = e.callcommand(
723 723 b'listkeys',
724 724 {
725 725 b'namespace': b'phases',
726 726 },
727 727 ).result()
728 728
729 729 pullop = exchange.pulloperation(
730 730 bundlerepo, peer, path=None, heads=reponodes
731 731 )
732 732 pullop.trmanager = bundletransactionmanager()
733 733 exchange._pullapplyphases(pullop, remotephases)
734 734
735 735 def cleanup():
736 736 if bundlerepo:
737 737 bundlerepo.close()
738 738 if bundle:
739 739 os.unlink(bundle)
740 740 peer.close()
741 741
742 742 return (localrepo, csets, cleanup)
@@ -1,8020 +1,8038 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import os
10 10 import re
11 11 import sys
12 12
13 13 from .i18n import _
14 14 from .node import (
15 15 hex,
16 16 nullid,
17 17 nullrev,
18 18 short,
19 19 wdirrev,
20 20 )
21 21 from .pycompat import open
22 22 from . import (
23 23 archival,
24 24 bookmarks,
25 25 bundle2,
26 26 bundlecaches,
27 27 changegroup,
28 28 cmdutil,
29 29 copies,
30 30 debugcommands as debugcommandsmod,
31 31 destutil,
32 32 discovery,
33 33 encoding,
34 34 error,
35 35 exchange,
36 36 extensions,
37 37 filemerge,
38 38 formatter,
39 39 graphmod,
40 40 grep as grepmod,
41 41 hbisect,
42 42 help,
43 43 hg,
44 44 logcmdutil,
45 45 merge as mergemod,
46 46 mergestate as mergestatemod,
47 47 narrowspec,
48 48 obsolete,
49 49 obsutil,
50 50 patch,
51 51 phases,
52 52 pycompat,
53 53 rcutil,
54 54 registrar,
55 55 requirements,
56 56 revsetlang,
57 57 rewriteutil,
58 58 scmutil,
59 59 server,
60 60 shelve as shelvemod,
61 61 state as statemod,
62 62 streamclone,
63 63 tags as tagsmod,
64 64 ui as uimod,
65 65 util,
66 66 verify as verifymod,
67 67 vfs as vfsmod,
68 68 wireprotoserver,
69 69 )
70 70 from .utils import (
71 71 dateutil,
72 72 stringutil,
73 73 urlutil,
74 74 )
75 75
76 76 table = {}
77 77 table.update(debugcommandsmod.command._table)
78 78
79 79 command = registrar.command(table)
80 80 INTENT_READONLY = registrar.INTENT_READONLY
81 81
82 82 # common command options
83 83
84 84 globalopts = [
85 85 (
86 86 b'R',
87 87 b'repository',
88 88 b'',
89 89 _(b'repository root directory or name of overlay bundle file'),
90 90 _(b'REPO'),
91 91 ),
92 92 (b'', b'cwd', b'', _(b'change working directory'), _(b'DIR')),
93 93 (
94 94 b'y',
95 95 b'noninteractive',
96 96 None,
97 97 _(
98 98 b'do not prompt, automatically pick the first choice for all prompts'
99 99 ),
100 100 ),
101 101 (b'q', b'quiet', None, _(b'suppress output')),
102 102 (b'v', b'verbose', None, _(b'enable additional output')),
103 103 (
104 104 b'',
105 105 b'color',
106 106 b'',
107 107 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
108 108 # and should not be translated
109 109 _(b"when to colorize (boolean, always, auto, never, or debug)"),
110 110 _(b'TYPE'),
111 111 ),
112 112 (
113 113 b'',
114 114 b'config',
115 115 [],
116 116 _(b'set/override config option (use \'section.name=value\')'),
117 117 _(b'CONFIG'),
118 118 ),
119 119 (b'', b'debug', None, _(b'enable debugging output')),
120 120 (b'', b'debugger', None, _(b'start debugger')),
121 121 (
122 122 b'',
123 123 b'encoding',
124 124 encoding.encoding,
125 125 _(b'set the charset encoding'),
126 126 _(b'ENCODE'),
127 127 ),
128 128 (
129 129 b'',
130 130 b'encodingmode',
131 131 encoding.encodingmode,
132 132 _(b'set the charset encoding mode'),
133 133 _(b'MODE'),
134 134 ),
135 135 (b'', b'traceback', None, _(b'always print a traceback on exception')),
136 136 (b'', b'time', None, _(b'time how long the command takes')),
137 137 (b'', b'profile', None, _(b'print command execution profile')),
138 138 (b'', b'version', None, _(b'output version information and exit')),
139 139 (b'h', b'help', None, _(b'display help and exit')),
140 140 (b'', b'hidden', False, _(b'consider hidden changesets')),
141 141 (
142 142 b'',
143 143 b'pager',
144 144 b'auto',
145 145 _(b"when to paginate (boolean, always, auto, or never)"),
146 146 _(b'TYPE'),
147 147 ),
148 148 ]
149 149
150 150 dryrunopts = cmdutil.dryrunopts
151 151 remoteopts = cmdutil.remoteopts
152 152 walkopts = cmdutil.walkopts
153 153 commitopts = cmdutil.commitopts
154 154 commitopts2 = cmdutil.commitopts2
155 155 commitopts3 = cmdutil.commitopts3
156 156 formatteropts = cmdutil.formatteropts
157 157 templateopts = cmdutil.templateopts
158 158 logopts = cmdutil.logopts
159 159 diffopts = cmdutil.diffopts
160 160 diffwsopts = cmdutil.diffwsopts
161 161 diffopts2 = cmdutil.diffopts2
162 162 mergetoolopts = cmdutil.mergetoolopts
163 163 similarityopts = cmdutil.similarityopts
164 164 subrepoopts = cmdutil.subrepoopts
165 165 debugrevlogopts = cmdutil.debugrevlogopts
166 166
167 167 # Commands start here, listed alphabetically
168 168
169 169
170 170 @command(
171 171 b'abort',
172 172 dryrunopts,
173 173 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
174 174 helpbasic=True,
175 175 )
176 176 def abort(ui, repo, **opts):
177 177 """abort an unfinished operation (EXPERIMENTAL)
178 178
179 179 Aborts a multistep operation like graft, histedit, rebase, merge,
180 180 and unshelve if they are in an unfinished state.
181 181
182 182 use --dry-run/-n to dry run the command.
183 183 """
184 184 dryrun = opts.get('dry_run')
185 185 abortstate = cmdutil.getunfinishedstate(repo)
186 186 if not abortstate:
187 187 raise error.StateError(_(b'no operation in progress'))
188 188 if not abortstate.abortfunc:
189 189 raise error.InputError(
190 190 (
191 191 _(b"%s in progress but does not support 'hg abort'")
192 192 % (abortstate._opname)
193 193 ),
194 194 hint=abortstate.hint(),
195 195 )
196 196 if dryrun:
197 197 ui.status(
198 198 _(b'%s in progress, will be aborted\n') % (abortstate._opname)
199 199 )
200 200 return
201 201 return abortstate.abortfunc(ui, repo)
202 202
203 203
204 204 @command(
205 205 b'add',
206 206 walkopts + subrepoopts + dryrunopts,
207 207 _(b'[OPTION]... [FILE]...'),
208 208 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
209 209 helpbasic=True,
210 210 inferrepo=True,
211 211 )
212 212 def add(ui, repo, *pats, **opts):
213 213 """add the specified files on the next commit
214 214
215 215 Schedule files to be version controlled and added to the
216 216 repository.
217 217
218 218 The files will be added to the repository at the next commit. To
219 219 undo an add before that, see :hg:`forget`.
220 220
221 221 If no names are given, add all files to the repository (except
222 222 files matching ``.hgignore``).
223 223
224 224 .. container:: verbose
225 225
226 226 Examples:
227 227
228 228 - New (unknown) files are added
229 229 automatically by :hg:`add`::
230 230
231 231 $ ls
232 232 foo.c
233 233 $ hg status
234 234 ? foo.c
235 235 $ hg add
236 236 adding foo.c
237 237 $ hg status
238 238 A foo.c
239 239
240 240 - Specific files to be added can be specified::
241 241
242 242 $ ls
243 243 bar.c foo.c
244 244 $ hg status
245 245 ? bar.c
246 246 ? foo.c
247 247 $ hg add bar.c
248 248 $ hg status
249 249 A bar.c
250 250 ? foo.c
251 251
252 252 Returns 0 if all files are successfully added.
253 253 """
254 254
255 255 with repo.wlock(), repo.dirstate.changing_files(repo):
256 256 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
257 257 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
258 258 rejected = cmdutil.add(ui, repo, m, b"", uipathfn, False, **opts)
259 259 return rejected and 1 or 0
260 260
261 261
262 262 @command(
263 263 b'addremove',
264 264 similarityopts + subrepoopts + walkopts + dryrunopts,
265 265 _(b'[OPTION]... [FILE]...'),
266 266 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
267 267 inferrepo=True,
268 268 )
269 269 def addremove(ui, repo, *pats, **opts):
270 270 """add all new files, delete all missing files
271 271
272 272 Add all new files and remove all missing files from the
273 273 repository.
274 274
275 275 Unless names are given, new files are ignored if they match any of
276 276 the patterns in ``.hgignore``. As with add, these changes take
277 277 effect at the next commit.
278 278
279 279 Use the -s/--similarity option to detect renamed files. This
280 280 option takes a percentage between 0 (disabled) and 100 (files must
281 281 be identical) as its parameter. With a parameter greater than 0,
282 282 this compares every removed file with every added file and records
283 283 those similar enough as renames. Detecting renamed files this way
284 284 can be expensive. After using this option, :hg:`status -C` can be
285 285 used to check which files were identified as moved or renamed. If
286 286 not specified, -s/--similarity defaults to 100 and only renames of
287 287 identical files are detected.
288 288
289 289 .. container:: verbose
290 290
291 291 Examples:
292 292
293 293 - A number of files (bar.c and foo.c) are new,
294 294 while foobar.c has been removed (without using :hg:`remove`)
295 295 from the repository::
296 296
297 297 $ ls
298 298 bar.c foo.c
299 299 $ hg status
300 300 ! foobar.c
301 301 ? bar.c
302 302 ? foo.c
303 303 $ hg addremove
304 304 adding bar.c
305 305 adding foo.c
306 306 removing foobar.c
307 307 $ hg status
308 308 A bar.c
309 309 A foo.c
310 310 R foobar.c
311 311
312 312 - A file foobar.c was moved to foo.c without using :hg:`rename`.
313 313 Afterwards, it was edited slightly::
314 314
315 315 $ ls
316 316 foo.c
317 317 $ hg status
318 318 ! foobar.c
319 319 ? foo.c
320 320 $ hg addremove --similarity 90
321 321 removing foobar.c
322 322 adding foo.c
323 323 recording removal of foobar.c as rename to foo.c (94% similar)
324 324 $ hg status -C
325 325 A foo.c
326 326 foobar.c
327 327 R foobar.c
328 328
329 329 Returns 0 if all files are successfully added.
330 330 """
331 331 opts = pycompat.byteskwargs(opts)
332 332 if not opts.get(b'similarity'):
333 333 opts[b'similarity'] = b'100'
334 334 with repo.wlock(), repo.dirstate.changing_files(repo):
335 335 matcher = scmutil.match(repo[None], pats, opts)
336 336 relative = scmutil.anypats(pats, opts)
337 337 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
338 338 return scmutil.addremove(repo, matcher, b"", uipathfn, opts)
339 339
340 340
341 341 @command(
342 342 b'annotate|blame',
343 343 [
344 344 (b'r', b'rev', b'', _(b'annotate the specified revision'), _(b'REV')),
345 345 (
346 346 b'',
347 347 b'follow',
348 348 None,
349 349 _(b'follow copies/renames and list the filename (DEPRECATED)'),
350 350 ),
351 351 (b'', b'no-follow', None, _(b"don't follow copies and renames")),
352 352 (b'a', b'text', None, _(b'treat all files as text')),
353 353 (b'u', b'user', None, _(b'list the author (long with -v)')),
354 354 (b'f', b'file', None, _(b'list the filename')),
355 355 (b'd', b'date', None, _(b'list the date (short with -q)')),
356 356 (b'n', b'number', None, _(b'list the revision number (default)')),
357 357 (b'c', b'changeset', None, _(b'list the changeset')),
358 358 (
359 359 b'l',
360 360 b'line-number',
361 361 None,
362 362 _(b'show line number at the first appearance'),
363 363 ),
364 364 (
365 365 b'',
366 366 b'skip',
367 367 [],
368 368 _(b'revset to not display (EXPERIMENTAL)'),
369 369 _(b'REV'),
370 370 ),
371 371 ]
372 372 + diffwsopts
373 373 + walkopts
374 374 + formatteropts,
375 375 _(b'[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
376 376 helpcategory=command.CATEGORY_FILE_CONTENTS,
377 377 helpbasic=True,
378 378 inferrepo=True,
379 379 )
380 380 def annotate(ui, repo, *pats, **opts):
381 381 """show changeset information by line for each file
382 382
383 383 List changes in files, showing the revision id responsible for
384 384 each line.
385 385
386 386 This command is useful for discovering when a change was made and
387 387 by whom.
388 388
389 389 If you include --file, --user, or --date, the revision number is
390 390 suppressed unless you also include --number.
391 391
392 392 Without the -a/--text option, annotate will avoid processing files
393 393 it detects as binary. With -a, annotate will annotate the file
394 394 anyway, although the results will probably be neither useful
395 395 nor desirable.
396 396
397 397 .. container:: verbose
398 398
399 399 Template:
400 400
401 401 The following keywords are supported in addition to the common template
402 402 keywords and functions. See also :hg:`help templates`.
403 403
404 404 :lines: List of lines with annotation data.
405 405 :path: String. Repository-absolute path of the specified file.
406 406
407 407 And each entry of ``{lines}`` provides the following sub-keywords in
408 408 addition to ``{date}``, ``{node}``, ``{rev}``, ``{user}``, etc.
409 409
410 410 :line: String. Line content.
411 411 :lineno: Integer. Line number at that revision.
412 412 :path: String. Repository-absolute path of the file at that revision.
413 413
414 414 See :hg:`help templates.operators` for the list expansion syntax.
415 415
416 416 Returns 0 on success.
417 417 """
418 418 opts = pycompat.byteskwargs(opts)
419 419 if not pats:
420 420 raise error.InputError(
421 421 _(b'at least one filename or pattern is required')
422 422 )
423 423
424 424 if opts.get(b'follow'):
425 425 # --follow is deprecated and now just an alias for -f/--file
426 426 # to mimic the behavior of Mercurial before version 1.5
427 427 opts[b'file'] = True
428 428
429 429 if (
430 430 not opts.get(b'user')
431 431 and not opts.get(b'changeset')
432 432 and not opts.get(b'date')
433 433 and not opts.get(b'file')
434 434 ):
435 435 opts[b'number'] = True
436 436
437 437 linenumber = opts.get(b'line_number') is not None
438 438 if (
439 439 linenumber
440 440 and (not opts.get(b'changeset'))
441 441 and (not opts.get(b'number'))
442 442 ):
443 443 raise error.InputError(_(b'at least one of -n/-c is required for -l'))
444 444
445 445 rev = opts.get(b'rev')
446 446 if rev:
447 447 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
448 448 ctx = logcmdutil.revsingle(repo, rev)
449 449
450 450 ui.pager(b'annotate')
451 451 rootfm = ui.formatter(b'annotate', opts)
452 452 if ui.debugflag:
453 453 shorthex = pycompat.identity
454 454 else:
455 455
456 456 def shorthex(h):
457 457 return h[:12]
458 458
459 459 if ui.quiet:
460 460 datefunc = dateutil.shortdate
461 461 else:
462 462 datefunc = dateutil.datestr
463 463 if ctx.rev() is None:
464 464 if opts.get(b'changeset'):
465 465 # omit "+" suffix which is appended to node hex
466 466 def formatrev(rev):
467 467 if rev == wdirrev:
468 468 return b'%d' % ctx.p1().rev()
469 469 else:
470 470 return b'%d' % rev
471 471
472 472 else:
473 473
474 474 def formatrev(rev):
475 475 if rev == wdirrev:
476 476 return b'%d+' % ctx.p1().rev()
477 477 else:
478 478 return b'%d ' % rev
479 479
480 480 def formathex(h):
481 481 if h == repo.nodeconstants.wdirhex:
482 482 return b'%s+' % shorthex(hex(ctx.p1().node()))
483 483 else:
484 484 return b'%s ' % shorthex(h)
485 485
486 486 else:
487 487 formatrev = b'%d'.__mod__
488 488 formathex = shorthex
489 489
490 490 opmap = [
491 491 (b'user', b' ', lambda x: x.fctx.user(), ui.shortuser),
492 492 (b'rev', b' ', lambda x: scmutil.intrev(x.fctx), formatrev),
493 493 (b'node', b' ', lambda x: hex(scmutil.binnode(x.fctx)), formathex),
494 494 (b'date', b' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
495 495 (b'path', b' ', lambda x: x.fctx.path(), pycompat.bytestr),
496 496 (b'lineno', b':', lambda x: x.lineno, pycompat.bytestr),
497 497 ]
498 498 opnamemap = {
499 499 b'rev': b'number',
500 500 b'node': b'changeset',
501 501 b'path': b'file',
502 502 b'lineno': b'line_number',
503 503 }
504 504
505 505 if rootfm.isplain():
506 506
507 507 def makefunc(get, fmt):
508 508 return lambda x: fmt(get(x))
509 509
510 510 else:
511 511
512 512 def makefunc(get, fmt):
513 513 return get
514 514
515 515 datahint = rootfm.datahint()
516 516 funcmap = [
517 517 (makefunc(get, fmt), sep)
518 518 for fn, sep, get, fmt in opmap
519 519 if opts.get(opnamemap.get(fn, fn)) or fn in datahint
520 520 ]
521 521 funcmap[0] = (funcmap[0][0], b'') # no separator in front of first column
522 522 fields = b' '.join(
523 523 fn
524 524 for fn, sep, get, fmt in opmap
525 525 if opts.get(opnamemap.get(fn, fn)) or fn in datahint
526 526 )
527 527
528 528 def bad(x, y):
529 529 raise error.InputError(b"%s: %s" % (x, y))
530 530
531 531 m = scmutil.match(ctx, pats, opts, badfn=bad)
532 532
533 533 follow = not opts.get(b'no_follow')
534 534 diffopts = patch.difffeatureopts(
535 535 ui, opts, section=b'annotate', whitespace=True
536 536 )
537 537 skiprevs = opts.get(b'skip')
538 538 if skiprevs:
539 539 skiprevs = logcmdutil.revrange(repo, skiprevs)
540 540
541 541 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
542 542 for abs in ctx.walk(m):
543 543 fctx = ctx[abs]
544 544 rootfm.startitem()
545 545 rootfm.data(path=abs)
546 546 if not opts.get(b'text') and fctx.isbinary():
547 547 rootfm.plain(_(b"%s: binary file\n") % uipathfn(abs))
548 548 continue
549 549
550 550 fm = rootfm.nested(b'lines', tmpl=b'{rev}: {line}')
551 551 lines = fctx.annotate(
552 552 follow=follow, skiprevs=skiprevs, diffopts=diffopts
553 553 )
554 554 if not lines:
555 555 fm.end()
556 556 continue
557 557 formats = []
558 558 pieces = []
559 559
560 560 for f, sep in funcmap:
561 561 l = [f(n) for n in lines]
562 562 if fm.isplain():
563 563 sizes = [encoding.colwidth(x) for x in l]
564 564 ml = max(sizes)
565 565 formats.append([sep + b' ' * (ml - w) + b'%s' for w in sizes])
566 566 else:
567 567 formats.append([b'%s'] * len(l))
568 568 pieces.append(l)
569 569
570 570 for f, p, n in zip(zip(*formats), zip(*pieces), lines):
571 571 fm.startitem()
572 572 fm.context(fctx=n.fctx)
573 573 fm.write(fields, b"".join(f), *p)
574 574 if n.skip:
575 575 fmt = b"* %s"
576 576 else:
577 577 fmt = b": %s"
578 578 fm.write(b'line', fmt, n.text)
579 579
580 580 if not lines[-1].text.endswith(b'\n'):
581 581 fm.plain(b'\n')
582 582 fm.end()
583 583
584 584 rootfm.end()
585 585
586 586
587 587 @command(
588 588 b'archive',
589 589 [
590 590 (b'', b'no-decode', None, _(b'do not pass files through decoders')),
591 591 (
592 592 b'p',
593 593 b'prefix',
594 594 b'',
595 595 _(b'directory prefix for files in archive'),
596 596 _(b'PREFIX'),
597 597 ),
598 598 (b'r', b'rev', b'', _(b'revision to distribute'), _(b'REV')),
599 599 (b't', b'type', b'', _(b'type of distribution to create'), _(b'TYPE')),
600 600 ]
601 601 + subrepoopts
602 602 + walkopts,
603 603 _(b'[OPTION]... DEST'),
604 604 helpcategory=command.CATEGORY_IMPORT_EXPORT,
605 605 )
606 606 def archive(ui, repo, dest, **opts):
607 607 """create an unversioned archive of a repository revision
608 608
609 609 By default, the revision used is the parent of the working
610 610 directory; use -r/--rev to specify a different revision.
611 611
612 612 The archive type is automatically detected based on file
613 613 extension (to override, use -t/--type).
614 614
615 615 .. container:: verbose
616 616
617 617 Examples:
618 618
619 619 - create a zip file containing the 1.0 release::
620 620
621 621 hg archive -r 1.0 project-1.0.zip
622 622
623 623 - create a tarball excluding .hg files::
624 624
625 625 hg archive project.tar.gz -X ".hg*"
626 626
627 627 Valid types are:
628 628
629 629 :``files``: a directory full of files (default)
630 630 :``tar``: tar archive, uncompressed
631 631 :``tbz2``: tar archive, compressed using bzip2
632 632 :``tgz``: tar archive, compressed using gzip
633 633 :``txz``: tar archive, compressed using lzma (only in Python 3)
634 634 :``uzip``: zip archive, uncompressed
635 635 :``zip``: zip archive, compressed using deflate
636 636
637 637 The exact name of the destination archive or directory is given
638 638 using a format string; see :hg:`help export` for details.
639 639
640 640 Each member added to an archive file has a directory prefix
641 641 prepended. Use -p/--prefix to specify a format string for the
642 642 prefix. The default is the basename of the archive, with suffixes
643 643 removed.
644 644
645 645 Returns 0 on success.
646 646 """
647 647
648 648 opts = pycompat.byteskwargs(opts)
649 649 rev = opts.get(b'rev')
650 650 if rev:
651 651 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
652 652 ctx = logcmdutil.revsingle(repo, rev)
653 653 if not ctx:
654 654 raise error.InputError(
655 655 _(b'no working directory: please specify a revision')
656 656 )
657 657 node = ctx.node()
658 658 dest = cmdutil.makefilename(ctx, dest)
659 659 if os.path.realpath(dest) == repo.root:
660 660 raise error.InputError(_(b'repository root cannot be destination'))
661 661
662 662 kind = opts.get(b'type') or archival.guesskind(dest) or b'files'
663 663 prefix = opts.get(b'prefix')
664 664
665 665 if dest == b'-':
666 666 if kind == b'files':
667 667 raise error.InputError(_(b'cannot archive plain files to stdout'))
668 668 dest = cmdutil.makefileobj(ctx, dest)
669 669 if not prefix:
670 670 prefix = os.path.basename(repo.root) + b'-%h'
671 671
672 672 prefix = cmdutil.makefilename(ctx, prefix)
673 673 match = scmutil.match(ctx, [], opts)
674 674 archival.archive(
675 675 repo,
676 676 dest,
677 677 node,
678 678 kind,
679 679 not opts.get(b'no_decode'),
680 680 match,
681 681 prefix,
682 682 subrepos=opts.get(b'subrepos'),
683 683 )
684 684
685 685
686 686 @command(
687 687 b'backout',
688 688 [
689 689 (
690 690 b'',
691 691 b'merge',
692 692 None,
693 693 _(b'merge with old dirstate parent after backout'),
694 694 ),
695 695 (
696 696 b'',
697 697 b'commit',
698 698 None,
699 699 _(b'commit if no conflicts were encountered (DEPRECATED)'),
700 700 ),
701 701 (b'', b'no-commit', None, _(b'do not commit')),
702 702 (
703 703 b'',
704 704 b'parent',
705 705 b'',
706 706 _(b'parent to choose when backing out merge (DEPRECATED)'),
707 707 _(b'REV'),
708 708 ),
709 709 (b'r', b'rev', b'', _(b'revision to backout'), _(b'REV')),
710 710 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
711 711 ]
712 712 + mergetoolopts
713 713 + walkopts
714 714 + commitopts
715 715 + commitopts2,
716 716 _(b'[OPTION]... [-r] REV'),
717 717 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
718 718 )
719 719 def backout(ui, repo, node=None, rev=None, **opts):
720 720 """reverse effect of earlier changeset
721 721
722 722 Prepare a new changeset with the effect of REV undone in the
723 723 current working directory. If no conflicts were encountered,
724 724 it will be committed immediately.
725 725
726 726 If REV is the parent of the working directory, then this new changeset
727 727 is committed automatically (unless --no-commit is specified).
728 728
729 729 .. note::
730 730
731 731 :hg:`backout` cannot be used to fix either an unwanted or
732 732 incorrect merge.
733 733
734 734 .. container:: verbose
735 735
736 736 Examples:
737 737
738 738 - Reverse the effect of the parent of the working directory.
739 739 This backout will be committed immediately::
740 740
741 741 hg backout -r .
742 742
743 743 - Reverse the effect of previous bad revision 23::
744 744
745 745 hg backout -r 23
746 746
747 747 - Reverse the effect of previous bad revision 23 and
748 748 leave changes uncommitted::
749 749
750 750 hg backout -r 23 --no-commit
751 751 hg commit -m "Backout revision 23"
752 752
753 753 By default, the pending changeset will have one parent,
754 754 maintaining a linear history. With --merge, the pending
755 755 changeset will instead have two parents: the old parent of the
756 756 working directory and a new child of REV that simply undoes REV.
757 757
758 758 Before version 1.7, the behavior without --merge was equivalent
759 759 to specifying --merge followed by :hg:`update --clean .` to
760 760 cancel the merge and leave the child of REV as a head to be
761 761 merged separately.
762 762
763 763 See :hg:`help dates` for a list of formats valid for -d/--date.
764 764
765 765 See :hg:`help revert` for a way to restore files to the state
766 766 of another revision.
767 767
768 768 Returns 0 on success, 1 if nothing to backout or there are unresolved
769 769 files.
770 770 """
771 771 with repo.wlock(), repo.lock():
772 772 return _dobackout(ui, repo, node, rev, **opts)
773 773
774 774
775 775 def _dobackout(ui, repo, node=None, rev=None, **opts):
776 776 cmdutil.check_incompatible_arguments(opts, 'no_commit', ['commit', 'merge'])
777 777 opts = pycompat.byteskwargs(opts)
778 778
779 779 if rev and node:
780 780 raise error.InputError(_(b"please specify just one revision"))
781 781
782 782 if not rev:
783 783 rev = node
784 784
785 785 if not rev:
786 786 raise error.InputError(_(b"please specify a revision to backout"))
787 787
788 788 date = opts.get(b'date')
789 789 if date:
790 790 opts[b'date'] = dateutil.parsedate(date)
791 791
792 792 cmdutil.checkunfinished(repo)
793 793 cmdutil.bailifchanged(repo)
794 794 ctx = logcmdutil.revsingle(repo, rev)
795 795 node = ctx.node()
796 796
797 797 op1, op2 = repo.dirstate.parents()
798 798 if not repo.changelog.isancestor(node, op1):
799 799 raise error.InputError(
800 800 _(b'cannot backout change that is not an ancestor')
801 801 )
802 802
803 803 p1, p2 = repo.changelog.parents(node)
804 804 if p1 == repo.nullid:
805 805 raise error.InputError(_(b'cannot backout a change with no parents'))
806 806 if p2 != repo.nullid:
807 807 if not opts.get(b'parent'):
808 808 raise error.InputError(_(b'cannot backout a merge changeset'))
809 809 p = repo.lookup(opts[b'parent'])
810 810 if p not in (p1, p2):
811 811 raise error.InputError(
812 812 _(b'%s is not a parent of %s') % (short(p), short(node))
813 813 )
814 814 parent = p
815 815 else:
816 816 if opts.get(b'parent'):
817 817 raise error.InputError(
818 818 _(b'cannot use --parent on non-merge changeset')
819 819 )
820 820 parent = p1
821 821
822 822 # the backout should appear on the same branch
823 823 branch = repo.dirstate.branch()
824 824 bheads = repo.branchheads(branch)
825 825 rctx = scmutil.revsingle(repo, hex(parent))
826 826 if not opts.get(b'merge') and op1 != node:
827 827 with repo.transaction(b"backout"):
828 828 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
829 829 with ui.configoverride(overrides, b'backout'):
830 830 stats = mergemod.back_out(ctx, parent=repo[parent])
831 831 repo.setparents(op1, op2)
832 832 hg._showstats(repo, stats)
833 833 if stats.unresolvedcount:
834 834 repo.ui.status(
835 835 _(b"use 'hg resolve' to retry unresolved file merges\n")
836 836 )
837 837 return 1
838 838 else:
839 839 hg.clean(repo, node, show_stats=False)
840 840 repo.dirstate.setbranch(branch, repo.currenttransaction())
841 841 cmdutil.revert(ui, repo, rctx)
842 842
843 843 if opts.get(b'no_commit'):
844 844 msg = _(b"changeset %s backed out, don't forget to commit.\n")
845 845 ui.status(msg % short(node))
846 846 return 0
847 847
848 848 def commitfunc(ui, repo, message, match, opts):
849 849 editform = b'backout'
850 850 e = cmdutil.getcommiteditor(
851 851 editform=editform, **pycompat.strkwargs(opts)
852 852 )
853 853 if not message:
854 854 # we don't translate commit messages
855 855 message = b"Backed out changeset %s" % short(node)
856 856 e = cmdutil.getcommiteditor(edit=True, editform=editform)
857 857 return repo.commit(
858 858 message, opts.get(b'user'), opts.get(b'date'), match, editor=e
859 859 )
860 860
861 861 # save to detect changes
862 862 tip = repo.changelog.tip()
863 863
864 864 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
865 865 if not newnode:
866 866 ui.status(_(b"nothing changed\n"))
867 867 return 1
868 868 cmdutil.commitstatus(repo, newnode, branch, bheads, tip)
869 869
870 870 def nice(node):
871 871 return b'%d:%s' % (repo.changelog.rev(node), short(node))
872 872
873 873 ui.status(
874 874 _(b'changeset %s backs out changeset %s\n')
875 875 % (nice(newnode), nice(node))
876 876 )
877 877 if opts.get(b'merge') and op1 != node:
878 878 hg.clean(repo, op1, show_stats=False)
879 879 ui.status(_(b'merging with changeset %s\n') % nice(newnode))
880 880 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
881 881 with ui.configoverride(overrides, b'backout'):
882 882 return hg.merge(repo[b'tip'])
883 883 return 0
884 884
885 885
886 886 @command(
887 887 b'bisect',
888 888 [
889 889 (b'r', b'reset', False, _(b'reset bisect state')),
890 890 (b'g', b'good', False, _(b'mark changeset good')),
891 891 (b'b', b'bad', False, _(b'mark changeset bad')),
892 892 (b's', b'skip', False, _(b'skip testing changeset')),
893 893 (b'e', b'extend', False, _(b'extend the bisect range')),
894 894 (
895 895 b'c',
896 896 b'command',
897 897 b'',
898 898 _(b'use command to check changeset state'),
899 899 _(b'CMD'),
900 900 ),
901 901 (b'U', b'noupdate', False, _(b'do not update to target')),
902 902 ],
903 903 _(b"[-gbsr] [-U] [-c CMD] [REV]"),
904 904 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
905 905 )
906 906 def bisect(
907 907 ui,
908 908 repo,
909 909 positional_1=None,
910 910 positional_2=None,
911 911 command=None,
912 912 reset=None,
913 913 good=None,
914 914 bad=None,
915 915 skip=None,
916 916 extend=None,
917 917 noupdate=None,
918 918 ):
919 919 """subdivision search of changesets
920 920
921 921 This command helps to find changesets which introduce problems. To
922 922 use, mark the earliest changeset you know exhibits the problem as
923 923 bad, then mark the latest changeset which is free from the problem
924 924 as good. Bisect will update your working directory to a revision
925 925 for testing (unless the -U/--noupdate option is specified). Once
926 926 you have performed tests, mark the working directory as good or
927 927 bad, and bisect will either update to another candidate changeset
928 928 or announce that it has found the bad revision.
929 929
930 930 As a shortcut, you can also use the revision argument to mark a
931 931 revision as good or bad without checking it out first.
932 932
933 933 If you supply a command, it will be used for automatic bisection.
934 934 The environment variable HG_NODE will contain the ID of the
935 935 changeset being tested. The exit status of the command will be
936 936 used to mark revisions as good or bad: status 0 means good, 125
937 937 means to skip the revision, 127 (command not found) will abort the
938 938 bisection, and any other non-zero exit status means the revision
939 939 is bad.
940 940
941 941 .. container:: verbose
942 942
943 943 Some examples:
944 944
945 945 - start a bisection with known bad revision 34, and good revision 12::
946 946
947 947 hg bisect --bad 34
948 948 hg bisect --good 12
949 949
950 950 - advance the current bisection by marking current revision as good or
951 951 bad::
952 952
953 953 hg bisect --good
954 954 hg bisect --bad
955 955
956 956 - mark the current revision, or a known revision, to be skipped (e.g. if
957 957 that revision is not usable because of another issue)::
958 958
959 959 hg bisect --skip
960 960 hg bisect --skip 23
961 961
962 962 - skip all revisions that do not touch directories ``foo`` or ``bar``::
963 963
964 964 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
965 965
966 966 - forget the current bisection::
967 967
968 968 hg bisect --reset
969 969
970 970 - use 'make && make tests' to automatically find the first broken
971 971 revision::
972 972
973 973 hg bisect --reset
974 974 hg bisect --bad 34
975 975 hg bisect --good 12
976 976 hg bisect --command "make && make tests"
977 977
978 978 - see all changesets whose states are already known in the current
979 979 bisection::
980 980
981 981 hg log -r "bisect(pruned)"
982 982
983 983 - see the changeset currently being bisected (especially useful
984 984 if running with -U/--noupdate)::
985 985
986 986 hg log -r "bisect(current)"
987 987
988 988 - see all changesets that took part in the current bisection::
989 989
990 990 hg log -r "bisect(range)"
991 991
992 992 - you can even get a nice graph::
993 993
994 994 hg log --graph -r "bisect(range)"
995 995
996 996 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
997 997
998 998 Returns 0 on success.
999 999 """
1000 1000 rev = []
1001 1001 # backward compatibility
1002 1002 if positional_1 in (b"good", b"bad", b"reset", b"init"):
1003 1003 ui.warn(_(b"(use of 'hg bisect <cmd>' is deprecated)\n"))
1004 1004 cmd = positional_1
1005 1005 rev.append(positional_2)
1006 1006 if cmd == b"good":
1007 1007 good = True
1008 1008 elif cmd == b"bad":
1009 1009 bad = True
1010 1010 else:
1011 1011 reset = True
1012 1012 elif positional_2:
1013 1013 raise error.InputError(_(b'incompatible arguments'))
1014 1014 elif positional_1 is not None:
1015 1015 rev.append(positional_1)
1016 1016
1017 1017 incompatibles = {
1018 1018 b'--bad': bad,
1019 1019 b'--command': bool(command),
1020 1020 b'--extend': extend,
1021 1021 b'--good': good,
1022 1022 b'--reset': reset,
1023 1023 b'--skip': skip,
1024 1024 }
1025 1025
1026 1026 enabled = [x for x in incompatibles if incompatibles[x]]
1027 1027
1028 1028 if len(enabled) > 1:
1029 1029 raise error.InputError(
1030 1030 _(b'%s and %s are incompatible') % tuple(sorted(enabled)[0:2])
1031 1031 )
1032 1032
1033 1033 if reset:
1034 1034 hbisect.resetstate(repo)
1035 1035 return
1036 1036
1037 1037 state = hbisect.load_state(repo)
1038 1038
1039 1039 if rev:
1040 1040 revs = logcmdutil.revrange(repo, rev)
1041 1041 goodnodes = state[b'good']
1042 1042 badnodes = state[b'bad']
1043 1043 if goodnodes and badnodes:
1044 1044 candidates = repo.revs(b'(%ln)::(%ln)', goodnodes, badnodes)
1045 1045 candidates += repo.revs(b'(%ln)::(%ln)', badnodes, goodnodes)
1046 1046 revs = candidates & revs
1047 1047 nodes = [repo.changelog.node(i) for i in revs]
1048 1048 else:
1049 1049 nodes = [repo.lookup(b'.')]
1050 1050
1051 1051 # update state
1052 1052 if good or bad or skip:
1053 1053 if good:
1054 1054 state[b'good'] += nodes
1055 1055 elif bad:
1056 1056 state[b'bad'] += nodes
1057 1057 elif skip:
1058 1058 state[b'skip'] += nodes
1059 1059 hbisect.save_state(repo, state)
1060 1060 if not (state[b'good'] and state[b'bad']):
1061 1061 return
1062 1062
1063 1063 def mayupdate(repo, node, show_stats=True):
1064 1064 """common used update sequence"""
1065 1065 if noupdate:
1066 1066 return
1067 1067 cmdutil.checkunfinished(repo)
1068 1068 cmdutil.bailifchanged(repo)
1069 1069 return hg.clean(repo, node, show_stats=show_stats)
1070 1070
1071 1071 displayer = logcmdutil.changesetdisplayer(ui, repo, {})
1072 1072
1073 1073 if command:
1074 1074 changesets = 1
1075 1075 if noupdate:
1076 1076 try:
1077 1077 node = state[b'current'][0]
1078 1078 except LookupError:
1079 1079 raise error.StateError(
1080 1080 _(
1081 1081 b'current bisect revision is unknown - '
1082 1082 b'start a new bisect to fix'
1083 1083 )
1084 1084 )
1085 1085 else:
1086 1086 node, p2 = repo.dirstate.parents()
1087 1087 if p2 != repo.nullid:
1088 1088 raise error.StateError(_(b'current bisect revision is a merge'))
1089 1089 if rev:
1090 1090 if not nodes:
1091 1091 raise error.InputError(_(b'empty revision set'))
1092 1092 node = repo[nodes[-1]].node()
1093 1093 with hbisect.restore_state(repo, state, node):
1094 1094 while changesets:
1095 1095 # update state
1096 1096 state[b'current'] = [node]
1097 1097 hbisect.save_state(repo, state)
1098 1098 status = ui.system(
1099 1099 command,
1100 1100 environ={b'HG_NODE': hex(node)},
1101 1101 blockedtag=b'bisect_check',
1102 1102 )
1103 1103 if status == 125:
1104 1104 transition = b"skip"
1105 1105 elif status == 0:
1106 1106 transition = b"good"
1107 1107 # status < 0 means process was killed
1108 1108 elif status == 127:
1109 1109 raise error.Abort(_(b"failed to execute %s") % command)
1110 1110 elif status < 0:
1111 1111 raise error.Abort(_(b"%s killed") % command)
1112 1112 else:
1113 1113 transition = b"bad"
1114 1114 state[transition].append(node)
1115 1115 ctx = repo[node]
1116 1116 summary = cmdutil.format_changeset_summary(ui, ctx, b'bisect')
1117 1117 ui.status(_(b'changeset %s: %s\n') % (summary, transition))
1118 1118 hbisect.checkstate(state)
1119 1119 # bisect
1120 1120 nodes, changesets, bgood = hbisect.bisect(repo, state)
1121 1121 # update to next check
1122 1122 node = nodes[0]
1123 1123 mayupdate(repo, node, show_stats=False)
1124 1124 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
1125 1125 return
1126 1126
1127 1127 hbisect.checkstate(state)
1128 1128
1129 1129 # actually bisect
1130 1130 nodes, changesets, good = hbisect.bisect(repo, state)
1131 1131 if extend:
1132 1132 if not changesets:
1133 1133 extendctx = hbisect.extendrange(repo, state, nodes, good)
1134 1134 if extendctx is not None:
1135 1135 ui.write(
1136 1136 _(b"Extending search to changeset %s\n")
1137 1137 % cmdutil.format_changeset_summary(ui, extendctx, b'bisect')
1138 1138 )
1139 1139 state[b'current'] = [extendctx.node()]
1140 1140 hbisect.save_state(repo, state)
1141 1141 return mayupdate(repo, extendctx.node())
1142 1142 raise error.StateError(_(b"nothing to extend"))
1143 1143
1144 1144 if changesets == 0:
1145 1145 hbisect.printresult(ui, repo, state, displayer, nodes, good)
1146 1146 else:
1147 1147 assert len(nodes) == 1 # only a single node can be tested next
1148 1148 node = nodes[0]
1149 1149 # compute the approximate number of remaining tests
1150 1150 tests, size = 0, 2
1151 1151 while size <= changesets:
1152 1152 tests, size = tests + 1, size * 2
1153 1153 rev = repo.changelog.rev(node)
1154 1154 summary = cmdutil.format_changeset_summary(ui, repo[rev], b'bisect')
1155 1155 ui.write(
1156 1156 _(
1157 1157 b"Testing changeset %s "
1158 1158 b"(%d changesets remaining, ~%d tests)\n"
1159 1159 )
1160 1160 % (summary, changesets, tests)
1161 1161 )
1162 1162 state[b'current'] = [node]
1163 1163 hbisect.save_state(repo, state)
1164 1164 return mayupdate(repo, node)
1165 1165
1166 1166
1167 1167 @command(
1168 1168 b'bookmarks|bookmark',
1169 1169 [
1170 1170 (b'f', b'force', False, _(b'force')),
1171 1171 (b'r', b'rev', b'', _(b'revision for bookmark action'), _(b'REV')),
1172 1172 (b'd', b'delete', False, _(b'delete a given bookmark')),
1173 1173 (b'm', b'rename', b'', _(b'rename a given bookmark'), _(b'OLD')),
1174 1174 (b'i', b'inactive', False, _(b'mark a bookmark inactive')),
1175 1175 (b'l', b'list', False, _(b'list existing bookmarks')),
1176 1176 ]
1177 1177 + formatteropts,
1178 1178 _(b'hg bookmarks [OPTIONS]... [NAME]...'),
1179 1179 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1180 1180 )
1181 1181 def bookmark(ui, repo, *names, **opts):
1182 1182 """create a new bookmark or list existing bookmarks
1183 1183
1184 1184 Bookmarks are labels on changesets to help track lines of development.
1185 1185 Bookmarks are unversioned and can be moved, renamed and deleted.
1186 1186 Deleting or moving a bookmark has no effect on the associated changesets.
1187 1187
1188 1188 Creating or updating to a bookmark causes it to be marked as 'active'.
1189 1189 The active bookmark is indicated with a '*'.
1190 1190 When a commit is made, the active bookmark will advance to the new commit.
1191 1191 A plain :hg:`update` will also advance an active bookmark, if possible.
1192 1192 Updating away from a bookmark will cause it to be deactivated.
1193 1193
1194 1194 Bookmarks can be pushed and pulled between repositories (see
1195 1195 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
1196 1196 diverged, a new 'divergent bookmark' of the form 'name@path' will
1197 1197 be created. Using :hg:`merge` will resolve the divergence.
1198 1198
1199 1199 Specifying bookmark as '.' to -m/-d/-l options is equivalent to specifying
1200 1200 the active bookmark's name.
1201 1201
1202 1202 A bookmark named '@' has the special property that :hg:`clone` will
1203 1203 check it out by default if it exists.
1204 1204
1205 1205 .. container:: verbose
1206 1206
1207 1207 Template:
1208 1208
1209 1209 The following keywords are supported in addition to the common template
1210 1210 keywords and functions such as ``{bookmark}``. See also
1211 1211 :hg:`help templates`.
1212 1212
1213 1213 :active: Boolean. True if the bookmark is active.
1214 1214
1215 1215 Examples:
1216 1216
1217 1217 - create an active bookmark for a new line of development::
1218 1218
1219 1219 hg book new-feature
1220 1220
1221 1221 - create an inactive bookmark as a place marker::
1222 1222
1223 1223 hg book -i reviewed
1224 1224
1225 1225 - create an inactive bookmark on another changeset::
1226 1226
1227 1227 hg book -r .^ tested
1228 1228
1229 1229 - rename bookmark turkey to dinner::
1230 1230
1231 1231 hg book -m turkey dinner
1232 1232
1233 1233 - move the '@' bookmark from another branch::
1234 1234
1235 1235 hg book -f @
1236 1236
1237 1237 - print only the active bookmark name::
1238 1238
1239 1239 hg book -ql .
1240 1240 """
1241 1241 opts = pycompat.byteskwargs(opts)
1242 1242 force = opts.get(b'force')
1243 1243 rev = opts.get(b'rev')
1244 1244 inactive = opts.get(b'inactive') # meaning add/rename to inactive bookmark
1245 1245
1246 1246 action = cmdutil.check_at_most_one_arg(opts, b'delete', b'rename', b'list')
1247 1247 if action:
1248 1248 cmdutil.check_incompatible_arguments(opts, action, [b'rev'])
1249 1249 elif names or rev:
1250 1250 action = b'add'
1251 1251 elif inactive:
1252 1252 action = b'inactive' # meaning deactivate
1253 1253 else:
1254 1254 action = b'list'
1255 1255
1256 1256 cmdutil.check_incompatible_arguments(
1257 1257 opts, b'inactive', [b'delete', b'list']
1258 1258 )
1259 1259 if not names and action in {b'add', b'delete'}:
1260 1260 raise error.InputError(_(b"bookmark name required"))
1261 1261
1262 1262 if action in {b'add', b'delete', b'rename', b'inactive'}:
1263 1263 with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr:
1264 1264 if action == b'delete':
1265 1265 names = pycompat.maplist(repo._bookmarks.expandname, names)
1266 1266 bookmarks.delete(repo, tr, names)
1267 1267 elif action == b'rename':
1268 1268 if not names:
1269 1269 raise error.InputError(_(b"new bookmark name required"))
1270 1270 elif len(names) > 1:
1271 1271 raise error.InputError(
1272 1272 _(b"only one new bookmark name allowed")
1273 1273 )
1274 1274 oldname = repo._bookmarks.expandname(opts[b'rename'])
1275 1275 bookmarks.rename(repo, tr, oldname, names[0], force, inactive)
1276 1276 elif action == b'add':
1277 1277 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
1278 1278 elif action == b'inactive':
1279 1279 if len(repo._bookmarks) == 0:
1280 1280 ui.status(_(b"no bookmarks set\n"))
1281 1281 elif not repo._activebookmark:
1282 1282 ui.status(_(b"no active bookmark\n"))
1283 1283 else:
1284 1284 bookmarks.deactivate(repo)
1285 1285 elif action == b'list':
1286 1286 names = pycompat.maplist(repo._bookmarks.expandname, names)
1287 1287 with ui.formatter(b'bookmarks', opts) as fm:
1288 1288 bookmarks.printbookmarks(ui, repo, fm, names)
1289 1289 else:
1290 1290 raise error.ProgrammingError(b'invalid action: %s' % action)
1291 1291
1292 1292
1293 1293 @command(
1294 1294 b'branch',
1295 1295 [
1296 1296 (
1297 1297 b'f',
1298 1298 b'force',
1299 1299 None,
1300 1300 _(b'set branch name even if it shadows an existing branch'),
1301 1301 ),
1302 1302 (b'C', b'clean', None, _(b'reset branch name to parent branch name')),
1303 1303 (
1304 1304 b'r',
1305 1305 b'rev',
1306 1306 [],
1307 1307 _(b'change branches of the given revs (EXPERIMENTAL)'),
1308 1308 ),
1309 1309 ],
1310 1310 _(b'[-fC] [NAME]'),
1311 1311 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1312 1312 )
1313 1313 def branch(ui, repo, label=None, **opts):
1314 1314 """set or show the current branch name
1315 1315
1316 1316 .. note::
1317 1317
1318 1318 Branch names are permanent and global. Use :hg:`bookmark` to create a
1319 1319 light-weight bookmark instead. See :hg:`help glossary` for more
1320 1320 information about named branches and bookmarks.
1321 1321
1322 1322 With no argument, show the current branch name. With one argument,
1323 1323 set the working directory branch name (the branch will not exist
1324 1324 in the repository until the next commit). Standard practice
1325 1325 recommends that primary development take place on the 'default'
1326 1326 branch.
1327 1327
1328 1328 Unless -f/--force is specified, branch will not let you set a
1329 1329 branch name that already exists.
1330 1330
1331 1331 Use -C/--clean to reset the working directory branch to that of
1332 1332 the parent of the working directory, negating a previous branch
1333 1333 change.
1334 1334
1335 1335 Use the command :hg:`update` to switch to an existing branch. Use
1336 1336 :hg:`commit --close-branch` to mark this branch head as closed.
1337 1337 When all heads of a branch are closed, the branch will be
1338 1338 considered closed.
1339 1339
1340 1340 Returns 0 on success.
1341 1341 """
1342 1342 opts = pycompat.byteskwargs(opts)
1343 1343 revs = opts.get(b'rev')
1344 1344 if label:
1345 1345 label = label.strip()
1346 1346
1347 1347 if not opts.get(b'clean') and not label:
1348 1348 if revs:
1349 1349 raise error.InputError(
1350 1350 _(b"no branch name specified for the revisions")
1351 1351 )
1352 1352 ui.write(b"%s\n" % repo.dirstate.branch())
1353 1353 return
1354 1354
1355 1355 with repo.wlock():
1356 1356 if opts.get(b'clean'):
1357 1357 label = repo[b'.'].branch()
1358 1358 repo.dirstate.setbranch(label, repo.currenttransaction())
1359 1359 ui.status(_(b'reset working directory to branch %s\n') % label)
1360 1360 elif label:
1361 1361
1362 1362 scmutil.checknewlabel(repo, label, b'branch')
1363 1363 if revs:
1364 1364 return cmdutil.changebranch(ui, repo, revs, label, opts)
1365 1365
1366 1366 if not opts.get(b'force') and label in repo.branchmap():
1367 1367 if label not in [p.branch() for p in repo[None].parents()]:
1368 1368 raise error.InputError(
1369 1369 _(b'a branch of the same name already exists'),
1370 1370 # i18n: "it" refers to an existing branch
1371 1371 hint=_(b"use 'hg update' to switch to it"),
1372 1372 )
1373 1373
1374 1374 repo.dirstate.setbranch(label, repo.currenttransaction())
1375 1375 ui.status(_(b'marked working directory as branch %s\n') % label)
1376 1376
1377 1377 # find any open named branches aside from default
1378 1378 for n, h, t, c in repo.branchmap().iterbranches():
1379 1379 if n != b"default" and not c:
1380 1380 return 0
1381 1381 ui.status(
1382 1382 _(
1383 1383 b'(branches are permanent and global, '
1384 1384 b'did you want a bookmark?)\n'
1385 1385 )
1386 1386 )
1387 1387
1388 1388
1389 1389 @command(
1390 1390 b'branches',
1391 1391 [
1392 1392 (
1393 1393 b'a',
1394 1394 b'active',
1395 1395 False,
1396 1396 _(b'show only branches that have unmerged heads (DEPRECATED)'),
1397 1397 ),
1398 1398 (b'c', b'closed', False, _(b'show normal and closed branches')),
1399 1399 (b'r', b'rev', [], _(b'show branch name(s) of the given rev')),
1400 1400 ]
1401 1401 + formatteropts,
1402 1402 _(b'[-c]'),
1403 1403 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1404 1404 intents={INTENT_READONLY},
1405 1405 )
1406 1406 def branches(ui, repo, active=False, closed=False, **opts):
1407 1407 """list repository named branches
1408 1408
1409 1409 List the repository's named branches, indicating which ones are
1410 1410 inactive. If -c/--closed is specified, also list branches which have
1411 1411 been marked closed (see :hg:`commit --close-branch`).
1412 1412
1413 1413 Use the command :hg:`update` to switch to an existing branch.
1414 1414
1415 1415 .. container:: verbose
1416 1416
1417 1417 Template:
1418 1418
1419 1419 The following keywords are supported in addition to the common template
1420 1420 keywords and functions such as ``{branch}``. See also
1421 1421 :hg:`help templates`.
1422 1422
1423 1423 :active: Boolean. True if the branch is active.
1424 1424 :closed: Boolean. True if the branch is closed.
1425 1425 :current: Boolean. True if it is the current branch.
1426 1426
1427 1427 Returns 0.
1428 1428 """
1429 1429
1430 1430 opts = pycompat.byteskwargs(opts)
1431 1431 revs = opts.get(b'rev')
1432 1432 selectedbranches = None
1433 1433 if revs:
1434 1434 revs = logcmdutil.revrange(repo, revs)
1435 1435 getbi = repo.revbranchcache().branchinfo
1436 1436 selectedbranches = {getbi(r)[0] for r in revs}
1437 1437
1438 1438 ui.pager(b'branches')
1439 1439 fm = ui.formatter(b'branches', opts)
1440 1440 hexfunc = fm.hexfunc
1441 1441
1442 1442 allheads = set(repo.heads())
1443 1443 branches = []
1444 1444 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1445 1445 if selectedbranches is not None and tag not in selectedbranches:
1446 1446 continue
1447 1447 isactive = False
1448 1448 if not isclosed:
1449 1449 openheads = set(repo.branchmap().iteropen(heads))
1450 1450 isactive = bool(openheads & allheads)
1451 1451 branches.append((tag, repo[tip], isactive, not isclosed))
1452 1452 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]), reverse=True)
1453 1453
1454 1454 for tag, ctx, isactive, isopen in branches:
1455 1455 if active and not isactive:
1456 1456 continue
1457 1457 if isactive:
1458 1458 label = b'branches.active'
1459 1459 notice = b''
1460 1460 elif not isopen:
1461 1461 if not closed:
1462 1462 continue
1463 1463 label = b'branches.closed'
1464 1464 notice = _(b' (closed)')
1465 1465 else:
1466 1466 label = b'branches.inactive'
1467 1467 notice = _(b' (inactive)')
1468 1468 current = tag == repo.dirstate.branch()
1469 1469 if current:
1470 1470 label = b'branches.current'
1471 1471
1472 1472 fm.startitem()
1473 1473 fm.write(b'branch', b'%s', tag, label=label)
1474 1474 rev = ctx.rev()
1475 1475 padsize = max(31 - len(b"%d" % rev) - encoding.colwidth(tag), 0)
1476 1476 fmt = b' ' * padsize + b' %d:%s'
1477 1477 fm.condwrite(
1478 1478 not ui.quiet,
1479 1479 b'rev node',
1480 1480 fmt,
1481 1481 rev,
1482 1482 hexfunc(ctx.node()),
1483 1483 label=b'log.changeset changeset.%s' % ctx.phasestr(),
1484 1484 )
1485 1485 fm.context(ctx=ctx)
1486 1486 fm.data(active=isactive, closed=not isopen, current=current)
1487 1487 if not ui.quiet:
1488 1488 fm.plain(notice)
1489 1489 fm.plain(b'\n')
1490 1490 fm.end()
1491 1491
1492 1492
1493 1493 @command(
1494 1494 b'bundle',
1495 1495 [
1496 1496 (
1497 1497 b'',
1498 1498 b'exact',
1499 1499 None,
1500 1500 _(b'compute the base from the revision specified'),
1501 1501 ),
1502 1502 (
1503 1503 b'f',
1504 1504 b'force',
1505 1505 None,
1506 1506 _(b'run even when the destination is unrelated'),
1507 1507 ),
1508 1508 (
1509 1509 b'r',
1510 1510 b'rev',
1511 1511 [],
1512 1512 _(b'a changeset intended to be added to the destination'),
1513 1513 _(b'REV'),
1514 1514 ),
1515 1515 (
1516 1516 b'b',
1517 1517 b'branch',
1518 1518 [],
1519 1519 _(b'a specific branch you would like to bundle'),
1520 1520 _(b'BRANCH'),
1521 1521 ),
1522 1522 (
1523 1523 b'',
1524 1524 b'base',
1525 1525 [],
1526 1526 _(b'a base changeset assumed to be available at the destination'),
1527 1527 _(b'REV'),
1528 1528 ),
1529 1529 (b'a', b'all', None, _(b'bundle all changesets in the repository')),
1530 1530 (
1531 1531 b't',
1532 1532 b'type',
1533 1533 b'bzip2',
1534 1534 _(b'bundle compression type to use'),
1535 1535 _(b'TYPE'),
1536 1536 ),
1537 1537 ]
1538 1538 + remoteopts,
1539 1539 _(b'[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]...'),
1540 1540 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1541 1541 )
1542 1542 def bundle(ui, repo, fname, *dests, **opts):
1543 1543 """create a bundle file
1544 1544
1545 1545 Generate a bundle file containing data to be transferred to another
1546 1546 repository.
1547 1547
1548 1548 To create a bundle containing all changesets, use -a/--all
1549 1549 (or --base null). Otherwise, hg assumes the destination will have
1550 1550 all the nodes you specify with --base parameters. Otherwise, hg
1551 1551 will assume the repository has all the nodes in destination, or
1552 1552 default-push/default if no destination is specified, where destination
1553 1553 is the repositories you provide through DEST option.
1554 1554
1555 1555 You can change bundle format with the -t/--type option. See
1556 1556 :hg:`help bundlespec` for documentation on this format. By default,
1557 1557 the most appropriate format is used and compression defaults to
1558 1558 bzip2.
1559 1559
1560 1560 The bundle file can then be transferred using conventional means
1561 1561 and applied to another repository with the unbundle or pull
1562 1562 command. This is useful when direct push and pull are not
1563 1563 available or when exporting an entire repository is undesirable.
1564 1564
1565 1565 Applying bundles preserves all changeset contents including
1566 1566 permissions, copy/rename information, and revision history.
1567 1567
1568 1568 Returns 0 on success, 1 if no changes found.
1569 1569 """
1570 1570 opts = pycompat.byteskwargs(opts)
1571 1571
1572 1572 revs = None
1573 1573 if b'rev' in opts:
1574 1574 revstrings = opts[b'rev']
1575 1575 revs = logcmdutil.revrange(repo, revstrings)
1576 1576 if revstrings and not revs:
1577 1577 raise error.InputError(_(b'no commits to bundle'))
1578 1578
1579 1579 bundletype = opts.get(b'type', b'bzip2').lower()
1580 1580 try:
1581 1581 bundlespec = bundlecaches.parsebundlespec(
1582 1582 repo, bundletype, strict=False
1583 1583 )
1584 1584 except error.UnsupportedBundleSpecification as e:
1585 1585 raise error.InputError(
1586 1586 pycompat.bytestr(e),
1587 1587 hint=_(b"see 'hg help bundlespec' for supported values for --type"),
1588 1588 )
1589 1589 cgversion = bundlespec.params[b"cg.version"]
1590 1590
1591 1591 # Packed bundles are a pseudo bundle format for now.
1592 1592 if cgversion == b's1':
1593 1593 raise error.InputError(
1594 1594 _(b'packed bundles cannot be produced by "hg bundle"'),
1595 1595 hint=_(b"use 'hg debugcreatestreamclonebundle'"),
1596 1596 )
1597 1597
1598 1598 if opts.get(b'all'):
1599 1599 if dests:
1600 1600 raise error.InputError(
1601 1601 _(b"--all is incompatible with specifying destinations")
1602 1602 )
1603 1603 if opts.get(b'base'):
1604 1604 ui.warn(_(b"ignoring --base because --all was specified\n"))
1605 1605 if opts.get(b'exact'):
1606 1606 ui.warn(_(b"ignoring --exact because --all was specified\n"))
1607 1607 base = [nullrev]
1608 1608 elif opts.get(b'exact'):
1609 1609 if dests:
1610 1610 raise error.InputError(
1611 1611 _(b"--exact is incompatible with specifying destinations")
1612 1612 )
1613 1613 if opts.get(b'base'):
1614 1614 ui.warn(_(b"ignoring --base because --exact was specified\n"))
1615 1615 base = repo.revs(b'parents(%ld) - %ld', revs, revs)
1616 1616 if not base:
1617 1617 base = [nullrev]
1618 1618 else:
1619 1619 base = logcmdutil.revrange(repo, opts.get(b'base'))
1620 1620 if cgversion not in changegroup.supportedoutgoingversions(repo):
1621 1621 raise error.Abort(
1622 1622 _(b"repository does not support bundle version %s") % cgversion
1623 1623 )
1624 1624
1625 1625 if base:
1626 1626 if dests:
1627 1627 raise error.InputError(
1628 1628 _(b"--base is incompatible with specifying destinations")
1629 1629 )
1630 1630 cl = repo.changelog
1631 1631 common = [cl.node(rev) for rev in base]
1632 1632 heads = [cl.node(r) for r in revs] if revs else None
1633 1633 outgoing = discovery.outgoing(repo, common, heads)
1634 1634 missing = outgoing.missing
1635 1635 excluded = outgoing.excluded
1636 1636 else:
1637 1637 missing = set()
1638 1638 excluded = set()
1639 1639 for path in urlutil.get_push_paths(repo, ui, dests):
1640 1640 other = hg.peer(repo, opts, path)
1641 1641 if revs is not None:
1642 1642 hex_revs = [repo[r].hex() for r in revs]
1643 1643 else:
1644 1644 hex_revs = None
1645 1645 branches = (path.branch, [])
1646 1646 head_revs, checkout = hg.addbranchrevs(
1647 1647 repo, repo, branches, hex_revs
1648 1648 )
1649 1649 heads = (
1650 1650 head_revs
1651 1651 and pycompat.maplist(repo.lookup, head_revs)
1652 1652 or head_revs
1653 1653 )
1654 1654 outgoing = discovery.findcommonoutgoing(
1655 1655 repo,
1656 1656 other,
1657 1657 onlyheads=heads,
1658 1658 force=opts.get(b'force'),
1659 1659 portable=True,
1660 1660 )
1661 1661 missing.update(outgoing.missing)
1662 1662 excluded.update(outgoing.excluded)
1663 1663
1664 1664 if not missing:
1665 1665 scmutil.nochangesfound(ui, repo, not base and excluded)
1666 1666 return 1
1667 1667
1668 1668 # internal changeset are internal implementation details that should not
1669 1669 # leave the repository. Bundling with `hg bundle` create such risk.
1670 1670 bundled_internal = repo.revs(b"%ln and _internal()", missing)
1671 1671 if bundled_internal:
1672 1672 msg = _(b"cannot bundle internal changesets")
1673 1673 hint = _(b"%d internal changesets selected") % len(bundled_internal)
1674 1674 raise error.Abort(msg, hint=hint)
1675 1675
1676 1676 if heads:
1677 1677 outgoing = discovery.outgoing(
1678 1678 repo, missingroots=missing, ancestorsof=heads
1679 1679 )
1680 1680 else:
1681 1681 outgoing = discovery.outgoing(repo, missingroots=missing)
1682 1682 outgoing.excluded = sorted(excluded)
1683 1683
1684 1684 if cgversion == b'01': # bundle1
1685 1685 bversion = b'HG10' + bundlespec.wirecompression
1686 1686 bcompression = None
1687 1687 elif cgversion in (b'02', b'03'):
1688 1688 bversion = b'HG20'
1689 1689 bcompression = bundlespec.wirecompression
1690 1690 else:
1691 1691 raise error.ProgrammingError(
1692 1692 b'bundle: unexpected changegroup version %s' % cgversion
1693 1693 )
1694 1694
1695 1695 # TODO compression options should be derived from bundlespec parsing.
1696 1696 # This is a temporary hack to allow adjusting bundle compression
1697 1697 # level without a) formalizing the bundlespec changes to declare it
1698 1698 # b) introducing a command flag.
1699 1699 compopts = {}
1700 1700 complevel = ui.configint(
1701 1701 b'experimental', b'bundlecomplevel.' + bundlespec.compression
1702 1702 )
1703 1703 if complevel is None:
1704 1704 complevel = ui.configint(b'experimental', b'bundlecomplevel')
1705 1705 if complevel is not None:
1706 1706 compopts[b'level'] = complevel
1707 1707
1708 1708 compthreads = ui.configint(
1709 1709 b'experimental', b'bundlecompthreads.' + bundlespec.compression
1710 1710 )
1711 1711 if compthreads is None:
1712 1712 compthreads = ui.configint(b'experimental', b'bundlecompthreads')
1713 1713 if compthreads is not None:
1714 1714 compopts[b'threads'] = compthreads
1715 1715
1716 1716 # Bundling of obsmarker and phases is optional as not all clients
1717 1717 # support the necessary features.
1718 1718 cfg = ui.configbool
1719 1719 obsolescence_cfg = cfg(b'experimental', b'evolution.bundle-obsmarker')
1720 1720 bundlespec.set_param(b'obsolescence', obsolescence_cfg, overwrite=False)
1721 1721 obs_mand_cfg = cfg(b'experimental', b'evolution.bundle-obsmarker:mandatory')
1722 1722 bundlespec.set_param(
1723 1723 b'obsolescence-mandatory', obs_mand_cfg, overwrite=False
1724 1724 )
1725 1725 if not bundlespec.params.get(b'phases', False):
1726 1726 phases_cfg = cfg(b'experimental', b'bundle-phases')
1727 1727 bundlespec.set_param(b'phases', phases_cfg, overwrite=False)
1728 1728
1729 1729 bundle2.writenewbundle(
1730 1730 ui,
1731 1731 repo,
1732 1732 b'bundle',
1733 1733 fname,
1734 1734 bversion,
1735 1735 outgoing,
1736 1736 bundlespec.params,
1737 1737 compression=bcompression,
1738 1738 compopts=compopts,
1739 1739 )
1740 1740
1741 1741
1742 1742 @command(
1743 1743 b'cat',
1744 1744 [
1745 1745 (
1746 1746 b'o',
1747 1747 b'output',
1748 1748 b'',
1749 1749 _(b'print output to file with formatted name'),
1750 1750 _(b'FORMAT'),
1751 1751 ),
1752 1752 (b'r', b'rev', b'', _(b'print the given revision'), _(b'REV')),
1753 1753 (b'', b'decode', None, _(b'apply any matching decode filter')),
1754 1754 ]
1755 1755 + walkopts
1756 1756 + formatteropts,
1757 1757 _(b'[OPTION]... FILE...'),
1758 1758 helpcategory=command.CATEGORY_FILE_CONTENTS,
1759 1759 inferrepo=True,
1760 1760 intents={INTENT_READONLY},
1761 1761 )
1762 1762 def cat(ui, repo, file1, *pats, **opts):
1763 1763 """output the current or given revision of files
1764 1764
1765 1765 Print the specified files as they were at the given revision. If
1766 1766 no revision is given, the parent of the working directory is used.
1767 1767
1768 1768 Output may be to a file, in which case the name of the file is
1769 1769 given using a template string. See :hg:`help templates`. In addition
1770 1770 to the common template keywords, the following formatting rules are
1771 1771 supported:
1772 1772
1773 1773 :``%%``: literal "%" character
1774 1774 :``%s``: basename of file being printed
1775 1775 :``%d``: dirname of file being printed, or '.' if in repository root
1776 1776 :``%p``: root-relative path name of file being printed
1777 1777 :``%H``: changeset hash (40 hexadecimal digits)
1778 1778 :``%R``: changeset revision number
1779 1779 :``%h``: short-form changeset hash (12 hexadecimal digits)
1780 1780 :``%r``: zero-padded changeset revision number
1781 1781 :``%b``: basename of the exporting repository
1782 1782 :``\\``: literal "\\" character
1783 1783
1784 1784 .. container:: verbose
1785 1785
1786 1786 Template:
1787 1787
1788 1788 The following keywords are supported in addition to the common template
1789 1789 keywords and functions. See also :hg:`help templates`.
1790 1790
1791 1791 :data: String. File content.
1792 1792 :path: String. Repository-absolute path of the file.
1793 1793
1794 1794 Returns 0 on success.
1795 1795 """
1796 1796 opts = pycompat.byteskwargs(opts)
1797 1797 rev = opts.get(b'rev')
1798 1798 if rev:
1799 1799 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
1800 1800 ctx = logcmdutil.revsingle(repo, rev)
1801 1801 m = scmutil.match(ctx, (file1,) + pats, opts)
1802 1802 fntemplate = opts.pop(b'output', b'')
1803 1803 if cmdutil.isstdiofilename(fntemplate):
1804 1804 fntemplate = b''
1805 1805
1806 1806 if fntemplate:
1807 1807 fm = formatter.nullformatter(ui, b'cat', opts)
1808 1808 else:
1809 1809 ui.pager(b'cat')
1810 1810 fm = ui.formatter(b'cat', opts)
1811 1811 with fm:
1812 1812 return cmdutil.cat(
1813 1813 ui, repo, ctx, m, fm, fntemplate, b'', **pycompat.strkwargs(opts)
1814 1814 )
1815 1815
1816 1816
1817 1817 @command(
1818 1818 b'clone',
1819 1819 [
1820 1820 (
1821 1821 b'U',
1822 1822 b'noupdate',
1823 1823 None,
1824 1824 _(
1825 1825 b'the clone will include an empty working '
1826 1826 b'directory (only a repository)'
1827 1827 ),
1828 1828 ),
1829 1829 (
1830 1830 b'u',
1831 1831 b'updaterev',
1832 1832 b'',
1833 1833 _(b'revision, tag, or branch to check out'),
1834 1834 _(b'REV'),
1835 1835 ),
1836 1836 (
1837 1837 b'r',
1838 1838 b'rev',
1839 1839 [],
1840 1840 _(
1841 1841 b'do not clone everything, but include this changeset'
1842 1842 b' and its ancestors'
1843 1843 ),
1844 1844 _(b'REV'),
1845 1845 ),
1846 1846 (
1847 1847 b'b',
1848 1848 b'branch',
1849 1849 [],
1850 1850 _(
1851 1851 b'do not clone everything, but include this branch\'s'
1852 1852 b' changesets and their ancestors'
1853 1853 ),
1854 1854 _(b'BRANCH'),
1855 1855 ),
1856 1856 (b'', b'pull', None, _(b'use pull protocol to copy metadata')),
1857 1857 (b'', b'uncompressed', None, _(b'an alias to --stream (DEPRECATED)')),
1858 1858 (b'', b'stream', None, _(b'clone with minimal data processing')),
1859 1859 ]
1860 1860 + remoteopts,
1861 1861 _(b'[OPTION]... SOURCE [DEST]'),
1862 1862 helpcategory=command.CATEGORY_REPO_CREATION,
1863 1863 helpbasic=True,
1864 1864 norepo=True,
1865 1865 )
1866 1866 def clone(ui, source, dest=None, **opts):
1867 1867 """make a copy of an existing repository
1868 1868
1869 1869 Create a copy of an existing repository in a new directory.
1870 1870
1871 1871 If no destination directory name is specified, it defaults to the
1872 1872 basename of the source.
1873 1873
1874 1874 The location of the source is added to the new repository's
1875 1875 ``.hg/hgrc`` file, as the default to be used for future pulls.
1876 1876
1877 1877 Only local paths and ``ssh://`` URLs are supported as
1878 1878 destinations. For ``ssh://`` destinations, no working directory or
1879 1879 ``.hg/hgrc`` will be created on the remote side.
1880 1880
1881 1881 If the source repository has a bookmark called '@' set, that
1882 1882 revision will be checked out in the new repository by default.
1883 1883
1884 1884 To check out a particular version, use -u/--update, or
1885 1885 -U/--noupdate to create a clone with no working directory.
1886 1886
1887 1887 To pull only a subset of changesets, specify one or more revisions
1888 1888 identifiers with -r/--rev or branches with -b/--branch. The
1889 1889 resulting clone will contain only the specified changesets and
1890 1890 their ancestors. These options (or 'clone src#rev dest') imply
1891 1891 --pull, even for local source repositories.
1892 1892
1893 1893 In normal clone mode, the remote normalizes repository data into a common
1894 1894 exchange format and the receiving end translates this data into its local
1895 1895 storage format. --stream activates a different clone mode that essentially
1896 1896 copies repository files from the remote with minimal data processing. This
1897 1897 significantly reduces the CPU cost of a clone both remotely and locally.
1898 1898 However, it often increases the transferred data size by 30-40%. This can
1899 1899 result in substantially faster clones where I/O throughput is plentiful,
1900 1900 especially for larger repositories. A side-effect of --stream clones is
1901 1901 that storage settings and requirements on the remote are applied locally:
1902 1902 a modern client may inherit legacy or inefficient storage used by the
1903 1903 remote or a legacy Mercurial client may not be able to clone from a
1904 1904 modern Mercurial remote.
1905 1905
1906 1906 .. note::
1907 1907
1908 1908 Specifying a tag will include the tagged changeset but not the
1909 1909 changeset containing the tag.
1910 1910
1911 1911 .. container:: verbose
1912 1912
1913 1913 For efficiency, hardlinks are used for cloning whenever the
1914 1914 source and destination are on the same filesystem (note this
1915 1915 applies only to the repository data, not to the working
1916 1916 directory). Some filesystems, such as AFS, implement hardlinking
1917 1917 incorrectly, but do not report errors. In these cases, use the
1918 1918 --pull option to avoid hardlinking.
1919 1919
1920 1920 Mercurial will update the working directory to the first applicable
1921 1921 revision from this list:
1922 1922
1923 1923 a) null if -U or the source repository has no changesets
1924 1924 b) if -u . and the source repository is local, the first parent of
1925 1925 the source repository's working directory
1926 1926 c) the changeset specified with -u (if a branch name, this means the
1927 1927 latest head of that branch)
1928 1928 d) the changeset specified with -r
1929 1929 e) the tipmost head specified with -b
1930 1930 f) the tipmost head specified with the url#branch source syntax
1931 1931 g) the revision marked with the '@' bookmark, if present
1932 1932 h) the tipmost head of the default branch
1933 1933 i) tip
1934 1934
1935 1935 When cloning from servers that support it, Mercurial may fetch
1936 1936 pre-generated data from a server-advertised URL or inline from the
1937 1937 same stream. When this is done, hooks operating on incoming changesets
1938 1938 and changegroups may fire more than once, once for each pre-generated
1939 1939 bundle and as well as for any additional remaining data. In addition,
1940 1940 if an error occurs, the repository may be rolled back to a partial
1941 1941 clone. This behavior may change in future releases.
1942 1942 See :hg:`help -e clonebundles` for more.
1943 1943
1944 1944 Examples:
1945 1945
1946 1946 - clone a remote repository to a new directory named hg/::
1947 1947
1948 1948 hg clone https://www.mercurial-scm.org/repo/hg/
1949 1949
1950 1950 - create a lightweight local clone::
1951 1951
1952 1952 hg clone project/ project-feature/
1953 1953
1954 1954 - clone from an absolute path on an ssh server (note double-slash)::
1955 1955
1956 1956 hg clone ssh://user@server//home/projects/alpha/
1957 1957
1958 1958 - do a streaming clone while checking out a specified version::
1959 1959
1960 1960 hg clone --stream http://server/repo -u 1.5
1961 1961
1962 1962 - create a repository without changesets after a particular revision::
1963 1963
1964 1964 hg clone -r 04e544 experimental/ good/
1965 1965
1966 1966 - clone (and track) a particular named branch::
1967 1967
1968 1968 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1969 1969
1970 1970 See :hg:`help urls` for details on specifying URLs.
1971 1971
1972 1972 Returns 0 on success.
1973 1973 """
1974 1974 opts = pycompat.byteskwargs(opts)
1975 1975 cmdutil.check_at_most_one_arg(opts, b'noupdate', b'updaterev')
1976 1976
1977 1977 # --include/--exclude can come from narrow or sparse.
1978 1978 includepats, excludepats = None, None
1979 1979
1980 1980 # hg.clone() differentiates between None and an empty set. So make sure
1981 1981 # patterns are sets if narrow is requested without patterns.
1982 1982 if opts.get(b'narrow'):
1983 1983 includepats = set()
1984 1984 excludepats = set()
1985 1985
1986 1986 if opts.get(b'include'):
1987 1987 includepats = narrowspec.parsepatterns(opts.get(b'include'))
1988 1988 if opts.get(b'exclude'):
1989 1989 excludepats = narrowspec.parsepatterns(opts.get(b'exclude'))
1990 1990
1991 1991 r = hg.clone(
1992 1992 ui,
1993 1993 opts,
1994 1994 source,
1995 1995 dest,
1996 1996 pull=opts.get(b'pull'),
1997 1997 stream=opts.get(b'stream') or opts.get(b'uncompressed'),
1998 1998 revs=opts.get(b'rev'),
1999 1999 update=opts.get(b'updaterev') or not opts.get(b'noupdate'),
2000 2000 branch=opts.get(b'branch'),
2001 2001 shareopts=opts.get(b'shareopts'),
2002 2002 storeincludepats=includepats,
2003 2003 storeexcludepats=excludepats,
2004 2004 depth=opts.get(b'depth') or None,
2005 2005 )
2006 2006
2007 2007 return r is None
2008 2008
2009 2009
2010 2010 @command(
2011 2011 b'commit|ci',
2012 2012 [
2013 2013 (
2014 2014 b'A',
2015 2015 b'addremove',
2016 2016 None,
2017 2017 _(b'mark new/missing files as added/removed before committing'),
2018 2018 ),
2019 2019 (b'', b'close-branch', None, _(b'mark a branch head as closed')),
2020 2020 (b'', b'amend', None, _(b'amend the parent of the working directory')),
2021 2021 (b's', b'secret', None, _(b'use the secret phase for committing')),
2022 2022 (b'', b'draft', None, _(b'use the draft phase for committing')),
2023 2023 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
2024 2024 (
2025 2025 b'',
2026 2026 b'force-close-branch',
2027 2027 None,
2028 2028 _(b'forcibly close branch from a non-head changeset (ADVANCED)'),
2029 2029 ),
2030 2030 (b'i', b'interactive', None, _(b'use interactive mode')),
2031 2031 ]
2032 2032 + walkopts
2033 2033 + commitopts
2034 2034 + commitopts2
2035 2035 + subrepoopts,
2036 2036 _(b'[OPTION]... [FILE]...'),
2037 2037 helpcategory=command.CATEGORY_COMMITTING,
2038 2038 helpbasic=True,
2039 2039 inferrepo=True,
2040 2040 )
2041 2041 def commit(ui, repo, *pats, **opts):
2042 2042 """commit the specified files or all outstanding changes
2043 2043
2044 2044 Commit changes to the given files into the repository. Unlike a
2045 2045 centralized SCM, this operation is a local operation. See
2046 2046 :hg:`push` for a way to actively distribute your changes.
2047 2047
2048 2048 If a list of files is omitted, all changes reported by :hg:`status`
2049 2049 will be committed.
2050 2050
2051 2051 If you are committing the result of a merge, do not provide any
2052 2052 filenames or -I/-X filters.
2053 2053
2054 2054 If no commit message is specified, Mercurial starts your
2055 2055 configured editor where you can enter a message. In case your
2056 2056 commit fails, you will find a backup of your message in
2057 2057 ``.hg/last-message.txt``.
2058 2058
2059 2059 The --close-branch flag can be used to mark the current branch
2060 2060 head closed. When all heads of a branch are closed, the branch
2061 2061 will be considered closed and no longer listed.
2062 2062
2063 2063 The --amend flag can be used to amend the parent of the
2064 2064 working directory with a new commit that contains the changes
2065 2065 in the parent in addition to those currently reported by :hg:`status`,
2066 2066 if there are any. The old commit is stored in a backup bundle in
2067 2067 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
2068 2068 on how to restore it).
2069 2069
2070 2070 Message, user and date are taken from the amended commit unless
2071 2071 specified. When a message isn't specified on the command line,
2072 2072 the editor will open with the message of the amended commit.
2073 2073
2074 2074 It is not possible to amend public changesets (see :hg:`help phases`)
2075 2075 or changesets that have children.
2076 2076
2077 2077 See :hg:`help dates` for a list of formats valid for -d/--date.
2078 2078
2079 2079 Returns 0 on success, 1 if nothing changed.
2080 2080
2081 2081 .. container:: verbose
2082 2082
2083 2083 Examples:
2084 2084
2085 2085 - commit all files ending in .py::
2086 2086
2087 2087 hg commit --include "set:**.py"
2088 2088
2089 2089 - commit all non-binary files::
2090 2090
2091 2091 hg commit --exclude "set:binary()"
2092 2092
2093 2093 - amend the current commit and set the date to now::
2094 2094
2095 2095 hg commit --amend --date now
2096 2096 """
2097 2097 cmdutil.check_at_most_one_arg(opts, 'draft', 'secret')
2098 2098 cmdutil.check_incompatible_arguments(opts, 'subrepos', ['amend'])
2099 2099 with repo.wlock(), repo.lock():
2100 2100 return _docommit(ui, repo, *pats, **opts)
2101 2101
2102 2102
2103 2103 def _docommit(ui, repo, *pats, **opts):
2104 2104 if opts.get('interactive'):
2105 2105 opts.pop('interactive')
2106 2106 ret = cmdutil.dorecord(
2107 2107 ui, repo, commit, None, False, cmdutil.recordfilter, *pats, **opts
2108 2108 )
2109 2109 # ret can be 0 (no changes to record) or the value returned by
2110 2110 # commit(), 1 if nothing changed or None on success.
2111 2111 return 1 if ret == 0 else ret
2112 2112
2113 2113 if opts.get('subrepos'):
2114 2114 # Let --subrepos on the command line override config setting.
2115 2115 ui.setconfig(b'ui', b'commitsubrepos', True, b'commit')
2116 2116
2117 2117 cmdutil.checkunfinished(repo, commit=True)
2118 2118
2119 2119 branch = repo[None].branch()
2120 2120 bheads = repo.branchheads(branch)
2121 2121 tip = repo.changelog.tip()
2122 2122
2123 2123 extra = {}
2124 2124 if opts.get('close_branch') or opts.get('force_close_branch'):
2125 2125 extra[b'close'] = b'1'
2126 2126
2127 2127 if repo[b'.'].closesbranch():
2128 2128 # Not ideal, but let us do an extra status early to prevent early
2129 2129 # bail out.
2130 2130 matcher = scmutil.match(
2131 2131 repo[None], pats, pycompat.byteskwargs(opts)
2132 2132 )
2133 2133 s = repo.status(match=matcher)
2134 2134 if s.modified or s.added or s.removed:
2135 2135 bheads = repo.branchheads(branch, closed=True)
2136 2136 else:
2137 2137 msg = _(b'current revision is already a branch closing head')
2138 2138 raise error.InputError(msg)
2139 2139
2140 2140 if not bheads:
2141 2141 raise error.InputError(
2142 2142 _(b'branch "%s" has no heads to close') % branch
2143 2143 )
2144 2144 elif (
2145 2145 branch == repo[b'.'].branch()
2146 2146 and repo[b'.'].node() not in bheads
2147 2147 and not opts.get('force_close_branch')
2148 2148 ):
2149 2149 hint = _(
2150 2150 b'use --force-close-branch to close branch from a non-head'
2151 2151 b' changeset'
2152 2152 )
2153 2153 raise error.InputError(_(b'can only close branch heads'), hint=hint)
2154 2154 elif opts.get('amend'):
2155 2155 if (
2156 2156 repo[b'.'].p1().branch() != branch
2157 2157 and repo[b'.'].p2().branch() != branch
2158 2158 ):
2159 2159 raise error.InputError(_(b'can only close branch heads'))
2160 2160
2161 2161 if opts.get('amend'):
2162 2162 if ui.configbool(b'ui', b'commitsubrepos'):
2163 2163 raise error.InputError(
2164 2164 _(b'cannot amend with ui.commitsubrepos enabled')
2165 2165 )
2166 2166
2167 2167 old = repo[b'.']
2168 2168 rewriteutil.precheck(repo, [old.rev()], b'amend')
2169 2169
2170 2170 # Currently histedit gets confused if an amend happens while histedit
2171 2171 # is in progress. Since we have a checkunfinished command, we are
2172 2172 # temporarily honoring it.
2173 2173 #
2174 2174 # Note: eventually this guard will be removed. Please do not expect
2175 2175 # this behavior to remain.
2176 2176 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2177 2177 cmdutil.checkunfinished(repo)
2178 2178
2179 2179 node = cmdutil.amend(ui, repo, old, extra, pats, opts)
2180 2180 opts = pycompat.byteskwargs(opts)
2181 2181 if node == old.node():
2182 2182 ui.status(_(b"nothing changed\n"))
2183 2183 return 1
2184 2184 else:
2185 2185
2186 2186 def commitfunc(ui, repo, message, match, opts):
2187 2187 overrides = {}
2188 2188 if opts.get(b'secret'):
2189 2189 overrides[(b'phases', b'new-commit')] = b'secret'
2190 2190 elif opts.get(b'draft'):
2191 2191 overrides[(b'phases', b'new-commit')] = b'draft'
2192 2192
2193 2193 baseui = repo.baseui
2194 2194 with baseui.configoverride(overrides, b'commit'):
2195 2195 with ui.configoverride(overrides, b'commit'):
2196 2196 editform = cmdutil.mergeeditform(
2197 2197 repo[None], b'commit.normal'
2198 2198 )
2199 2199 editor = cmdutil.getcommiteditor(
2200 2200 editform=editform, **pycompat.strkwargs(opts)
2201 2201 )
2202 2202 return repo.commit(
2203 2203 message,
2204 2204 opts.get(b'user'),
2205 2205 opts.get(b'date'),
2206 2206 match,
2207 2207 editor=editor,
2208 2208 extra=extra,
2209 2209 )
2210 2210
2211 2211 opts = pycompat.byteskwargs(opts)
2212 2212 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
2213 2213
2214 2214 if not node:
2215 2215 stat = cmdutil.postcommitstatus(repo, pats, opts)
2216 2216 if stat.deleted:
2217 2217 ui.status(
2218 2218 _(
2219 2219 b"nothing changed (%d missing files, see "
2220 2220 b"'hg status')\n"
2221 2221 )
2222 2222 % len(stat.deleted)
2223 2223 )
2224 2224 else:
2225 2225 ui.status(_(b"nothing changed\n"))
2226 2226 return 1
2227 2227
2228 2228 cmdutil.commitstatus(repo, node, branch, bheads, tip, opts)
2229 2229
2230 2230 if not ui.quiet and ui.configbool(b'commands', b'commit.post-status'):
2231 2231 status(
2232 2232 ui,
2233 2233 repo,
2234 2234 modified=True,
2235 2235 added=True,
2236 2236 removed=True,
2237 2237 deleted=True,
2238 2238 unknown=True,
2239 2239 subrepos=opts.get(b'subrepos'),
2240 2240 )
2241 2241
2242 2242
2243 2243 @command(
2244 2244 b'config|showconfig|debugconfig',
2245 2245 [
2246 2246 (b'u', b'untrusted', None, _(b'show untrusted configuration options')),
2247 2247 # This is experimental because we need
2248 2248 # * reasonable behavior around aliases,
2249 2249 # * decide if we display [debug] [experimental] and [devel] section par
2250 2250 # default
2251 2251 # * some way to display "generic" config entry (the one matching
2252 2252 # regexp,
2253 2253 # * proper display of the different value type
2254 2254 # * a better way to handle <DYNAMIC> values (and variable types),
2255 2255 # * maybe some type information ?
2256 2256 (
2257 2257 b'',
2258 2258 b'exp-all-known',
2259 2259 None,
2260 2260 _(b'show all known config option (EXPERIMENTAL)'),
2261 2261 ),
2262 2262 (b'e', b'edit', None, _(b'edit user config')),
2263 2263 (b'l', b'local', None, _(b'edit repository config')),
2264 2264 (b'', b'source', None, _(b'show source of configuration value')),
2265 2265 (
2266 2266 b'',
2267 2267 b'shared',
2268 2268 None,
2269 2269 _(b'edit shared source repository config (EXPERIMENTAL)'),
2270 2270 ),
2271 2271 (b'', b'non-shared', None, _(b'edit non shared config (EXPERIMENTAL)')),
2272 2272 (b'g', b'global', None, _(b'edit global config')),
2273 2273 ]
2274 2274 + formatteropts,
2275 2275 _(b'[-u] [NAME]...'),
2276 2276 helpcategory=command.CATEGORY_HELP,
2277 2277 optionalrepo=True,
2278 2278 intents={INTENT_READONLY},
2279 2279 )
2280 2280 def config(ui, repo, *values, **opts):
2281 2281 """show combined config settings from all hgrc files
2282 2282
2283 2283 With no arguments, print names and values of all config items.
2284 2284
2285 2285 With one argument of the form section.name, print just the value
2286 2286 of that config item.
2287 2287
2288 2288 With multiple arguments, print names and values of all config
2289 2289 items with matching section names or section.names.
2290 2290
2291 2291 With --edit, start an editor on the user-level config file. With
2292 2292 --global, edit the system-wide config file. With --local, edit the
2293 2293 repository-level config file.
2294 2294
2295 2295 With --source, the source (filename and line number) is printed
2296 2296 for each config item.
2297 2297
2298 2298 See :hg:`help config` for more information about config files.
2299 2299
2300 2300 .. container:: verbose
2301 2301
2302 2302 --non-shared flag is used to edit `.hg/hgrc-not-shared` config file.
2303 2303 This file is not shared across shares when in share-safe mode.
2304 2304
2305 2305 Template:
2306 2306
2307 2307 The following keywords are supported. See also :hg:`help templates`.
2308 2308
2309 2309 :name: String. Config name.
2310 2310 :source: String. Filename and line number where the item is defined.
2311 2311 :value: String. Config value.
2312 2312
2313 2313 The --shared flag can be used to edit the config file of shared source
2314 2314 repository. It only works when you have shared using the experimental
2315 2315 share safe feature.
2316 2316
2317 2317 Returns 0 on success, 1 if NAME does not exist.
2318 2318
2319 2319 """
2320 2320
2321 2321 opts = pycompat.byteskwargs(opts)
2322 2322 editopts = (b'edit', b'local', b'global', b'shared', b'non_shared')
2323 2323 if any(opts.get(o) for o in editopts):
2324 2324 cmdutil.check_at_most_one_arg(opts, *editopts[1:])
2325 2325 if opts.get(b'local'):
2326 2326 if not repo:
2327 2327 raise error.InputError(
2328 2328 _(b"can't use --local outside a repository")
2329 2329 )
2330 2330 paths = [repo.vfs.join(b'hgrc')]
2331 2331 elif opts.get(b'global'):
2332 2332 paths = rcutil.systemrcpath()
2333 2333 elif opts.get(b'shared'):
2334 2334 if not repo.shared():
2335 2335 raise error.InputError(
2336 2336 _(b"repository is not shared; can't use --shared")
2337 2337 )
2338 2338 if requirements.SHARESAFE_REQUIREMENT not in repo.requirements:
2339 2339 raise error.InputError(
2340 2340 _(
2341 2341 b"share safe feature not enabled; "
2342 2342 b"unable to edit shared source repository config"
2343 2343 )
2344 2344 )
2345 2345 paths = [vfsmod.vfs(repo.sharedpath).join(b'hgrc')]
2346 2346 elif opts.get(b'non_shared'):
2347 2347 paths = [repo.vfs.join(b'hgrc-not-shared')]
2348 2348 else:
2349 2349 paths = rcutil.userrcpath()
2350 2350
2351 2351 for f in paths:
2352 2352 if os.path.exists(f):
2353 2353 break
2354 2354 else:
2355 2355 if opts.get(b'global'):
2356 2356 samplehgrc = uimod.samplehgrcs[b'global']
2357 2357 elif opts.get(b'local'):
2358 2358 samplehgrc = uimod.samplehgrcs[b'local']
2359 2359 else:
2360 2360 samplehgrc = uimod.samplehgrcs[b'user']
2361 2361
2362 2362 f = paths[0]
2363 2363 fp = open(f, b"wb")
2364 2364 fp.write(util.tonativeeol(samplehgrc))
2365 2365 fp.close()
2366 2366
2367 2367 editor = ui.geteditor()
2368 2368 ui.system(
2369 2369 b"%s \"%s\"" % (editor, f),
2370 2370 onerr=error.InputError,
2371 2371 errprefix=_(b"edit failed"),
2372 2372 blockedtag=b'config_edit',
2373 2373 )
2374 2374 return
2375 2375 ui.pager(b'config')
2376 2376 fm = ui.formatter(b'config', opts)
2377 2377 for t, f in rcutil.rccomponents():
2378 2378 if t == b'path':
2379 2379 ui.debug(b'read config from: %s\n' % f)
2380 2380 elif t == b'resource':
2381 2381 ui.debug(b'read config from: resource:%s.%s\n' % (f[0], f[1]))
2382 2382 elif t == b'items':
2383 2383 # Don't print anything for 'items'.
2384 2384 pass
2385 2385 else:
2386 2386 raise error.ProgrammingError(b'unknown rctype: %s' % t)
2387 2387 untrusted = bool(opts.get(b'untrusted'))
2388 2388
2389 2389 selsections = selentries = []
2390 2390 if values:
2391 2391 selsections = [v for v in values if b'.' not in v]
2392 2392 selentries = [v for v in values if b'.' in v]
2393 2393 uniquesel = len(selentries) == 1 and not selsections
2394 2394 selsections = set(selsections)
2395 2395 selentries = set(selentries)
2396 2396
2397 2397 matched = False
2398 2398 all_known = opts[b'exp_all_known']
2399 2399 show_source = ui.debugflag or opts.get(b'source')
2400 2400 entries = ui.walkconfig(untrusted=untrusted, all_known=all_known)
2401 2401 for section, name, value in entries:
2402 2402 source = ui.configsource(section, name, untrusted)
2403 2403 value = pycompat.bytestr(value)
2404 2404 defaultvalue = ui.configdefault(section, name)
2405 2405 if fm.isplain():
2406 2406 source = source or b'none'
2407 2407 value = value.replace(b'\n', b'\\n')
2408 2408 entryname = section + b'.' + name
2409 2409 if values and not (section in selsections or entryname in selentries):
2410 2410 continue
2411 2411 fm.startitem()
2412 2412 fm.condwrite(show_source, b'source', b'%s: ', source)
2413 2413 if uniquesel:
2414 2414 fm.data(name=entryname)
2415 2415 fm.write(b'value', b'%s\n', value)
2416 2416 else:
2417 2417 fm.write(b'name value', b'%s=%s\n', entryname, value)
2418 2418 if formatter.isprintable(defaultvalue):
2419 2419 fm.data(defaultvalue=defaultvalue)
2420 2420 elif isinstance(defaultvalue, list) and all(
2421 2421 formatter.isprintable(e) for e in defaultvalue
2422 2422 ):
2423 2423 fm.data(defaultvalue=fm.formatlist(defaultvalue, name=b'value'))
2424 2424 # TODO: no idea how to process unsupported defaultvalue types
2425 2425 matched = True
2426 2426 fm.end()
2427 2427 if matched:
2428 2428 return 0
2429 2429 return 1
2430 2430
2431 2431
2432 2432 @command(
2433 2433 b'continue',
2434 2434 dryrunopts,
2435 2435 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
2436 2436 helpbasic=True,
2437 2437 )
2438 2438 def continuecmd(ui, repo, **opts):
2439 2439 """resumes an interrupted operation (EXPERIMENTAL)
2440 2440
2441 2441 Finishes a multistep operation like graft, histedit, rebase, merge,
2442 2442 and unshelve if they are in an interrupted state.
2443 2443
2444 2444 use --dry-run/-n to dry run the command.
2445 2445 """
2446 2446 dryrun = opts.get('dry_run')
2447 2447 contstate = cmdutil.getunfinishedstate(repo)
2448 2448 if not contstate:
2449 2449 raise error.StateError(_(b'no operation in progress'))
2450 2450 if not contstate.continuefunc:
2451 2451 raise error.StateError(
2452 2452 (
2453 2453 _(b"%s in progress but does not support 'hg continue'")
2454 2454 % (contstate._opname)
2455 2455 ),
2456 2456 hint=contstate.continuemsg(),
2457 2457 )
2458 2458 if dryrun:
2459 2459 ui.status(_(b'%s in progress, will be resumed\n') % (contstate._opname))
2460 2460 return
2461 2461 return contstate.continuefunc(ui, repo)
2462 2462
2463 2463
2464 2464 @command(
2465 2465 b'copy|cp',
2466 2466 [
2467 2467 (b'', b'forget', None, _(b'unmark a destination file as copied')),
2468 2468 (b'A', b'after', None, _(b'record a copy that has already occurred')),
2469 2469 (
2470 2470 b'',
2471 2471 b'at-rev',
2472 2472 b'',
2473 2473 _(b'(un)mark copies in the given revision (EXPERIMENTAL)'),
2474 2474 _(b'REV'),
2475 2475 ),
2476 2476 (
2477 2477 b'f',
2478 2478 b'force',
2479 2479 None,
2480 2480 _(b'forcibly copy over an existing managed file'),
2481 2481 ),
2482 2482 ]
2483 2483 + walkopts
2484 2484 + dryrunopts,
2485 2485 _(b'[OPTION]... (SOURCE... DEST | --forget DEST...)'),
2486 2486 helpcategory=command.CATEGORY_FILE_CONTENTS,
2487 2487 )
2488 2488 def copy(ui, repo, *pats, **opts):
2489 2489 """mark files as copied for the next commit
2490 2490
2491 2491 Mark dest as having copies of source files. If dest is a
2492 2492 directory, copies are put in that directory. If dest is a file,
2493 2493 the source must be a single file.
2494 2494
2495 2495 By default, this command copies the contents of files as they
2496 2496 exist in the working directory. If invoked with -A/--after, the
2497 2497 operation is recorded, but no copying is performed.
2498 2498
2499 2499 To undo marking a destination file as copied, use --forget. With that
2500 2500 option, all given (positional) arguments are unmarked as copies. The
2501 2501 destination file(s) will be left in place (still tracked). Note that
2502 2502 :hg:`copy --forget` behaves the same way as :hg:`rename --forget`.
2503 2503
2504 2504 This command takes effect with the next commit by default.
2505 2505
2506 2506 Returns 0 on success, 1 if errors are encountered.
2507 2507 """
2508 2508 opts = pycompat.byteskwargs(opts)
2509 2509
2510 2510 context = lambda repo: repo.dirstate.changing_files(repo)
2511 2511 rev = opts.get(b'at_rev')
2512 2512 ctx = None
2513 2513 if rev:
2514 2514 ctx = logcmdutil.revsingle(repo, rev)
2515 2515 if ctx.rev() is not None:
2516 2516
2517 2517 def context(repo):
2518 2518 return util.nullcontextmanager()
2519 2519
2520 2520 opts[b'at_rev'] = ctx.rev()
2521 2521 with repo.wlock(), context(repo):
2522 2522 return cmdutil.copy(ui, repo, pats, opts)
2523 2523
2524 2524
2525 2525 @command(
2526 2526 b'debugcommands',
2527 2527 [],
2528 2528 _(b'[COMMAND]'),
2529 2529 helpcategory=command.CATEGORY_HELP,
2530 2530 norepo=True,
2531 2531 )
2532 2532 def debugcommands(ui, cmd=b'', *args):
2533 2533 """list all available commands and options"""
2534 2534 for cmd, vals in sorted(table.items()):
2535 2535 cmd = cmd.split(b'|')[0]
2536 2536 opts = b', '.join([i[1] for i in vals[1]])
2537 2537 ui.write(b'%s: %s\n' % (cmd, opts))
2538 2538
2539 2539
2540 2540 @command(
2541 2541 b'debugcomplete',
2542 2542 [(b'o', b'options', None, _(b'show the command options'))],
2543 2543 _(b'[-o] CMD'),
2544 2544 helpcategory=command.CATEGORY_HELP,
2545 2545 norepo=True,
2546 2546 )
2547 2547 def debugcomplete(ui, cmd=b'', **opts):
2548 2548 """returns the completion list associated with the given command"""
2549 2549
2550 2550 if opts.get('options'):
2551 2551 options = []
2552 2552 otables = [globalopts]
2553 2553 if cmd:
2554 2554 aliases, entry = cmdutil.findcmd(cmd, table, False)
2555 2555 otables.append(entry[1])
2556 2556 for t in otables:
2557 2557 for o in t:
2558 2558 if b"(DEPRECATED)" in o[3]:
2559 2559 continue
2560 2560 if o[0]:
2561 2561 options.append(b'-%s' % o[0])
2562 2562 options.append(b'--%s' % o[1])
2563 2563 ui.write(b"%s\n" % b"\n".join(options))
2564 2564 return
2565 2565
2566 2566 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
2567 2567 if ui.verbose:
2568 2568 cmdlist = [b' '.join(c[0]) for c in cmdlist.values()]
2569 2569 ui.write(b"%s\n" % b"\n".join(sorted(cmdlist)))
2570 2570
2571 2571
2572 2572 @command(
2573 2573 b'diff',
2574 2574 [
2575 2575 (b'r', b'rev', [], _(b'revision (DEPRECATED)'), _(b'REV')),
2576 2576 (b'', b'from', b'', _(b'revision to diff from'), _(b'REV1')),
2577 2577 (b'', b'to', b'', _(b'revision to diff to'), _(b'REV2')),
2578 2578 (b'c', b'change', b'', _(b'change made by revision'), _(b'REV')),
2579 2579 ]
2580 2580 + diffopts
2581 2581 + diffopts2
2582 2582 + walkopts
2583 2583 + subrepoopts,
2584 2584 _(b'[OPTION]... ([-c REV] | [--from REV1] [--to REV2]) [FILE]...'),
2585 2585 helpcategory=command.CATEGORY_FILE_CONTENTS,
2586 2586 helpbasic=True,
2587 2587 inferrepo=True,
2588 2588 intents={INTENT_READONLY},
2589 2589 )
2590 2590 def diff(ui, repo, *pats, **opts):
2591 2591 """diff repository (or selected files)
2592 2592
2593 2593 Show differences between revisions for the specified files.
2594 2594
2595 2595 Differences between files are shown using the unified diff format.
2596 2596
2597 2597 .. note::
2598 2598
2599 2599 :hg:`diff` may generate unexpected results for merges, as it will
2600 2600 default to comparing against the working directory's first
2601 2601 parent changeset if no revisions are specified. To diff against the
2602 2602 conflict regions, you can use `--config diff.merge=yes`.
2603 2603
2604 2604 By default, the working directory files are compared to its first parent. To
2605 2605 see the differences from another revision, use --from. To see the difference
2606 2606 to another revision, use --to. For example, :hg:`diff --from .^` will show
2607 2607 the differences from the working copy's grandparent to the working copy,
2608 2608 :hg:`diff --to .` will show the diff from the working copy to its parent
2609 2609 (i.e. the reverse of the default), and :hg:`diff --from 1.0 --to 1.2` will
2610 2610 show the diff between those two revisions.
2611 2611
2612 2612 Alternatively you can specify -c/--change with a revision to see the changes
2613 2613 in that changeset relative to its first parent (i.e. :hg:`diff -c 42` is
2614 2614 equivalent to :hg:`diff --from 42^ --to 42`)
2615 2615
2616 2616 Without the -a/--text option, diff will avoid generating diffs of
2617 2617 files it detects as binary. With -a, diff will generate a diff
2618 2618 anyway, probably with undesirable results.
2619 2619
2620 2620 Use the -g/--git option to generate diffs in the git extended diff
2621 2621 format. For more information, read :hg:`help diffs`.
2622 2622
2623 2623 .. container:: verbose
2624 2624
2625 2625 Examples:
2626 2626
2627 2627 - compare a file in the current working directory to its parent::
2628 2628
2629 2629 hg diff foo.c
2630 2630
2631 2631 - compare two historical versions of a directory, with rename info::
2632 2632
2633 2633 hg diff --git --from 1.0 --to 1.2 lib/
2634 2634
2635 2635 - get change stats relative to the last change on some date::
2636 2636
2637 2637 hg diff --stat --from "date('may 2')"
2638 2638
2639 2639 - diff all newly-added files that contain a keyword::
2640 2640
2641 2641 hg diff "set:added() and grep(GNU)"
2642 2642
2643 2643 - compare a revision and its parents::
2644 2644
2645 2645 hg diff -c 9353 # compare against first parent
2646 2646 hg diff --from 9353^ --to 9353 # same using revset syntax
2647 2647 hg diff --from 9353^2 --to 9353 # compare against the second parent
2648 2648
2649 2649 Returns 0 on success.
2650 2650 """
2651 2651
2652 2652 cmdutil.check_at_most_one_arg(opts, 'rev', 'change')
2653 2653 opts = pycompat.byteskwargs(opts)
2654 2654 revs = opts.get(b'rev')
2655 2655 change = opts.get(b'change')
2656 2656 from_rev = opts.get(b'from')
2657 2657 to_rev = opts.get(b'to')
2658 2658 stat = opts.get(b'stat')
2659 2659 reverse = opts.get(b'reverse')
2660 2660
2661 2661 cmdutil.check_incompatible_arguments(opts, b'from', [b'rev', b'change'])
2662 2662 cmdutil.check_incompatible_arguments(opts, b'to', [b'rev', b'change'])
2663 2663 if change:
2664 2664 repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
2665 2665 ctx2 = logcmdutil.revsingle(repo, change, None)
2666 2666 ctx1 = logcmdutil.diff_parent(ctx2)
2667 2667 elif from_rev or to_rev:
2668 2668 repo = scmutil.unhidehashlikerevs(
2669 2669 repo, [from_rev] + [to_rev], b'nowarn'
2670 2670 )
2671 2671 ctx1 = logcmdutil.revsingle(repo, from_rev, None)
2672 2672 ctx2 = logcmdutil.revsingle(repo, to_rev, None)
2673 2673 else:
2674 2674 repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn')
2675 2675 ctx1, ctx2 = logcmdutil.revpair(repo, revs)
2676 2676
2677 2677 if reverse:
2678 2678 ctxleft = ctx2
2679 2679 ctxright = ctx1
2680 2680 else:
2681 2681 ctxleft = ctx1
2682 2682 ctxright = ctx2
2683 2683
2684 2684 diffopts = patch.diffallopts(ui, opts)
2685 2685 m = scmutil.match(ctx2, pats, opts)
2686 2686 m = repo.narrowmatch(m)
2687 2687 ui.pager(b'diff')
2688 2688 logcmdutil.diffordiffstat(
2689 2689 ui,
2690 2690 repo,
2691 2691 diffopts,
2692 2692 ctxleft,
2693 2693 ctxright,
2694 2694 m,
2695 2695 stat=stat,
2696 2696 listsubrepos=opts.get(b'subrepos'),
2697 2697 root=opts.get(b'root'),
2698 2698 )
2699 2699
2700 2700
2701 2701 @command(
2702 2702 b'export',
2703 2703 [
2704 2704 (
2705 2705 b'B',
2706 2706 b'bookmark',
2707 2707 b'',
2708 2708 _(b'export changes only reachable by given bookmark'),
2709 2709 _(b'BOOKMARK'),
2710 2710 ),
2711 2711 (
2712 2712 b'o',
2713 2713 b'output',
2714 2714 b'',
2715 2715 _(b'print output to file with formatted name'),
2716 2716 _(b'FORMAT'),
2717 2717 ),
2718 2718 (b'', b'switch-parent', None, _(b'diff against the second parent')),
2719 2719 (b'r', b'rev', [], _(b'revisions to export'), _(b'REV')),
2720 2720 ]
2721 2721 + diffopts
2722 2722 + formatteropts,
2723 2723 _(b'[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'),
2724 2724 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2725 2725 helpbasic=True,
2726 2726 intents={INTENT_READONLY},
2727 2727 )
2728 2728 def export(ui, repo, *changesets, **opts):
2729 2729 """dump the header and diffs for one or more changesets
2730 2730
2731 2731 Print the changeset header and diffs for one or more revisions.
2732 2732 If no revision is given, the parent of the working directory is used.
2733 2733
2734 2734 The information shown in the changeset header is: author, date,
2735 2735 branch name (if non-default), changeset hash, parent(s) and commit
2736 2736 comment.
2737 2737
2738 2738 .. note::
2739 2739
2740 2740 :hg:`export` may generate unexpected diff output for merge
2741 2741 changesets, as it will compare the merge changeset against its
2742 2742 first parent only.
2743 2743
2744 2744 Output may be to a file, in which case the name of the file is
2745 2745 given using a template string. See :hg:`help templates`. In addition
2746 2746 to the common template keywords, the following formatting rules are
2747 2747 supported:
2748 2748
2749 2749 :``%%``: literal "%" character
2750 2750 :``%H``: changeset hash (40 hexadecimal digits)
2751 2751 :``%N``: number of patches being generated
2752 2752 :``%R``: changeset revision number
2753 2753 :``%b``: basename of the exporting repository
2754 2754 :``%h``: short-form changeset hash (12 hexadecimal digits)
2755 2755 :``%m``: first line of the commit message (only alphanumeric characters)
2756 2756 :``%n``: zero-padded sequence number, starting at 1
2757 2757 :``%r``: zero-padded changeset revision number
2758 2758 :``\\``: literal "\\" character
2759 2759
2760 2760 Without the -a/--text option, export will avoid generating diffs
2761 2761 of files it detects as binary. With -a, export will generate a
2762 2762 diff anyway, probably with undesirable results.
2763 2763
2764 2764 With -B/--bookmark changesets reachable by the given bookmark are
2765 2765 selected.
2766 2766
2767 2767 Use the -g/--git option to generate diffs in the git extended diff
2768 2768 format. See :hg:`help diffs` for more information.
2769 2769
2770 2770 With the --switch-parent option, the diff will be against the
2771 2771 second parent. It can be useful to review a merge.
2772 2772
2773 2773 .. container:: verbose
2774 2774
2775 2775 Template:
2776 2776
2777 2777 The following keywords are supported in addition to the common template
2778 2778 keywords and functions. See also :hg:`help templates`.
2779 2779
2780 2780 :diff: String. Diff content.
2781 2781 :parents: List of strings. Parent nodes of the changeset.
2782 2782
2783 2783 Examples:
2784 2784
2785 2785 - use export and import to transplant a bugfix to the current
2786 2786 branch::
2787 2787
2788 2788 hg export -r 9353 | hg import -
2789 2789
2790 2790 - export all the changesets between two revisions to a file with
2791 2791 rename information::
2792 2792
2793 2793 hg export --git -r 123:150 > changes.txt
2794 2794
2795 2795 - split outgoing changes into a series of patches with
2796 2796 descriptive names::
2797 2797
2798 2798 hg export -r "outgoing()" -o "%n-%m.patch"
2799 2799
2800 2800 Returns 0 on success.
2801 2801 """
2802 2802 opts = pycompat.byteskwargs(opts)
2803 2803 bookmark = opts.get(b'bookmark')
2804 2804 changesets += tuple(opts.get(b'rev', []))
2805 2805
2806 2806 cmdutil.check_at_most_one_arg(opts, b'rev', b'bookmark')
2807 2807
2808 2808 if bookmark:
2809 2809 if bookmark not in repo._bookmarks:
2810 2810 raise error.InputError(_(b"bookmark '%s' not found") % bookmark)
2811 2811
2812 2812 revs = scmutil.bookmarkrevs(repo, bookmark)
2813 2813 else:
2814 2814 if not changesets:
2815 2815 changesets = [b'.']
2816 2816
2817 2817 repo = scmutil.unhidehashlikerevs(repo, changesets, b'nowarn')
2818 2818 revs = logcmdutil.revrange(repo, changesets)
2819 2819
2820 2820 if not revs:
2821 2821 raise error.InputError(_(b"export requires at least one changeset"))
2822 2822 if len(revs) > 1:
2823 2823 ui.note(_(b'exporting patches:\n'))
2824 2824 else:
2825 2825 ui.note(_(b'exporting patch:\n'))
2826 2826
2827 2827 fntemplate = opts.get(b'output')
2828 2828 if cmdutil.isstdiofilename(fntemplate):
2829 2829 fntemplate = b''
2830 2830
2831 2831 if fntemplate:
2832 2832 fm = formatter.nullformatter(ui, b'export', opts)
2833 2833 else:
2834 2834 ui.pager(b'export')
2835 2835 fm = ui.formatter(b'export', opts)
2836 2836 with fm:
2837 2837 cmdutil.export(
2838 2838 repo,
2839 2839 revs,
2840 2840 fm,
2841 2841 fntemplate=fntemplate,
2842 2842 switch_parent=opts.get(b'switch_parent'),
2843 2843 opts=patch.diffallopts(ui, opts),
2844 2844 )
2845 2845
2846 2846
2847 2847 @command(
2848 2848 b'files',
2849 2849 [
2850 2850 (
2851 2851 b'r',
2852 2852 b'rev',
2853 2853 b'',
2854 2854 _(b'search the repository as it is in REV'),
2855 2855 _(b'REV'),
2856 2856 ),
2857 2857 (
2858 2858 b'0',
2859 2859 b'print0',
2860 2860 None,
2861 2861 _(b'end filenames with NUL, for use with xargs'),
2862 2862 ),
2863 2863 ]
2864 2864 + walkopts
2865 2865 + formatteropts
2866 2866 + subrepoopts,
2867 2867 _(b'[OPTION]... [FILE]...'),
2868 2868 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
2869 2869 intents={INTENT_READONLY},
2870 2870 )
2871 2871 def files(ui, repo, *pats, **opts):
2872 2872 """list tracked files
2873 2873
2874 2874 Print files under Mercurial control in the working directory or
2875 2875 specified revision for given files (excluding removed files).
2876 2876 Files can be specified as filenames or filesets.
2877 2877
2878 2878 If no files are given to match, this command prints the names
2879 2879 of all files under Mercurial control.
2880 2880
2881 2881 .. container:: verbose
2882 2882
2883 2883 Template:
2884 2884
2885 2885 The following keywords are supported in addition to the common template
2886 2886 keywords and functions. See also :hg:`help templates`.
2887 2887
2888 2888 :flags: String. Character denoting file's symlink and executable bits.
2889 2889 :path: String. Repository-absolute path of the file.
2890 2890 :size: Integer. Size of the file in bytes.
2891 2891
2892 2892 Examples:
2893 2893
2894 2894 - list all files under the current directory::
2895 2895
2896 2896 hg files .
2897 2897
2898 2898 - shows sizes and flags for current revision::
2899 2899
2900 2900 hg files -vr .
2901 2901
2902 2902 - list all files named README::
2903 2903
2904 2904 hg files -I "**/README"
2905 2905
2906 2906 - list all binary files::
2907 2907
2908 2908 hg files "set:binary()"
2909 2909
2910 2910 - find files containing a regular expression::
2911 2911
2912 2912 hg files "set:grep('bob')"
2913 2913
2914 2914 - search tracked file contents with xargs and grep::
2915 2915
2916 2916 hg files -0 | xargs -0 grep foo
2917 2917
2918 2918 See :hg:`help patterns` and :hg:`help filesets` for more information
2919 2919 on specifying file patterns.
2920 2920
2921 2921 Returns 0 if a match is found, 1 otherwise.
2922 2922
2923 2923 """
2924 2924
2925 2925 opts = pycompat.byteskwargs(opts)
2926 2926 rev = opts.get(b'rev')
2927 2927 if rev:
2928 2928 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
2929 2929 ctx = logcmdutil.revsingle(repo, rev, None)
2930 2930
2931 2931 end = b'\n'
2932 2932 if opts.get(b'print0'):
2933 2933 end = b'\0'
2934 2934 fmt = b'%s' + end
2935 2935
2936 2936 m = scmutil.match(ctx, pats, opts)
2937 2937 ui.pager(b'files')
2938 2938 uipathfn = scmutil.getuipathfn(ctx.repo(), legacyrelativevalue=True)
2939 2939 with ui.formatter(b'files', opts) as fm:
2940 2940 return cmdutil.files(
2941 2941 ui, ctx, m, uipathfn, fm, fmt, opts.get(b'subrepos')
2942 2942 )
2943 2943
2944 2944
2945 2945 @command(
2946 2946 b'forget',
2947 2947 [
2948 2948 (b'i', b'interactive', None, _(b'use interactive mode')),
2949 2949 ]
2950 2950 + walkopts
2951 2951 + dryrunopts,
2952 2952 _(b'[OPTION]... FILE...'),
2953 2953 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
2954 2954 helpbasic=True,
2955 2955 inferrepo=True,
2956 2956 )
2957 2957 def forget(ui, repo, *pats, **opts):
2958 2958 """forget the specified files on the next commit
2959 2959
2960 2960 Mark the specified files so they will no longer be tracked
2961 2961 after the next commit.
2962 2962
2963 2963 This only removes files from the current branch, not from the
2964 2964 entire project history, and it does not delete them from the
2965 2965 working directory.
2966 2966
2967 2967 To delete the file from the working directory, see :hg:`remove`.
2968 2968
2969 2969 To undo a forget before the next commit, see :hg:`add`.
2970 2970
2971 2971 .. container:: verbose
2972 2972
2973 2973 Examples:
2974 2974
2975 2975 - forget newly-added binary files::
2976 2976
2977 2977 hg forget "set:added() and binary()"
2978 2978
2979 2979 - forget files that would be excluded by .hgignore::
2980 2980
2981 2981 hg forget "set:hgignore()"
2982 2982
2983 2983 Returns 0 on success.
2984 2984 """
2985 2985
2986 2986 opts = pycompat.byteskwargs(opts)
2987 2987 if not pats:
2988 2988 raise error.InputError(_(b'no files specified'))
2989 2989
2990 2990 with repo.wlock(), repo.dirstate.changing_files(repo):
2991 2991 m = scmutil.match(repo[None], pats, opts)
2992 2992 dryrun, interactive = opts.get(b'dry_run'), opts.get(b'interactive')
2993 2993 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2994 2994 rejected = cmdutil.forget(
2995 2995 ui,
2996 2996 repo,
2997 2997 m,
2998 2998 prefix=b"",
2999 2999 uipathfn=uipathfn,
3000 3000 explicitonly=False,
3001 3001 dryrun=dryrun,
3002 3002 interactive=interactive,
3003 3003 )[0]
3004 3004 return rejected and 1 or 0
3005 3005
3006 3006
3007 3007 @command(
3008 3008 b'graft',
3009 3009 [
3010 3010 (b'r', b'rev', [], _(b'revisions to graft'), _(b'REV')),
3011 3011 (
3012 3012 b'',
3013 3013 b'base',
3014 3014 b'',
3015 3015 _(b'base revision when doing the graft merge (ADVANCED)'),
3016 3016 _(b'REV'),
3017 3017 ),
3018 3018 (b'c', b'continue', False, _(b'resume interrupted graft')),
3019 3019 (b'', b'stop', False, _(b'stop interrupted graft')),
3020 3020 (b'', b'abort', False, _(b'abort interrupted graft')),
3021 3021 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
3022 3022 (b'', b'log', None, _(b'append graft info to log message')),
3023 3023 (
3024 3024 b'',
3025 3025 b'no-commit',
3026 3026 None,
3027 3027 _(b"don't commit, just apply the changes in working directory"),
3028 3028 ),
3029 3029 (b'f', b'force', False, _(b'force graft')),
3030 3030 (
3031 3031 b'D',
3032 3032 b'currentdate',
3033 3033 False,
3034 3034 _(b'record the current date as commit date'),
3035 3035 ),
3036 3036 (
3037 3037 b'U',
3038 3038 b'currentuser',
3039 3039 False,
3040 3040 _(b'record the current user as committer'),
3041 3041 ),
3042 3042 ]
3043 3043 + commitopts2
3044 3044 + mergetoolopts
3045 3045 + dryrunopts,
3046 3046 _(b'[OPTION]... [-r REV]... REV...'),
3047 3047 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
3048 3048 )
3049 3049 def graft(ui, repo, *revs, **opts):
3050 3050 """copy changes from other branches onto the current branch
3051 3051
3052 3052 This command uses Mercurial's merge logic to copy individual
3053 3053 changes from other branches without merging branches in the
3054 3054 history graph. This is sometimes known as 'backporting' or
3055 3055 'cherry-picking'. By default, graft will copy user, date, and
3056 3056 description from the source changesets.
3057 3057
3058 3058 Changesets that are ancestors of the current revision, that have
3059 3059 already been grafted, or that are merges will be skipped.
3060 3060
3061 3061 If --log is specified, log messages will have a comment appended
3062 3062 of the form::
3063 3063
3064 3064 (grafted from CHANGESETHASH)
3065 3065
3066 3066 If --force is specified, revisions will be grafted even if they
3067 3067 are already ancestors of, or have been grafted to, the destination.
3068 3068 This is useful when the revisions have since been backed out.
3069 3069
3070 3070 If a graft merge results in conflicts, the graft process is
3071 3071 interrupted so that the current merge can be manually resolved.
3072 3072 Once all conflicts are addressed, the graft process can be
3073 3073 continued with the -c/--continue option.
3074 3074
3075 3075 The -c/--continue option reapplies all the earlier options.
3076 3076
3077 3077 .. container:: verbose
3078 3078
3079 3079 The --base option exposes more of how graft internally uses merge with a
3080 3080 custom base revision. --base can be used to specify another ancestor than
3081 3081 the first and only parent.
3082 3082
3083 3083 The command::
3084 3084
3085 3085 hg graft -r 345 --base 234
3086 3086
3087 3087 is thus pretty much the same as::
3088 3088
3089 3089 hg diff --from 234 --to 345 | hg import
3090 3090
3091 3091 but using merge to resolve conflicts and track moved files.
3092 3092
3093 3093 The result of a merge can thus be backported as a single commit by
3094 3094 specifying one of the merge parents as base, and thus effectively
3095 3095 grafting the changes from the other side.
3096 3096
3097 3097 It is also possible to collapse multiple changesets and clean up history
3098 3098 by specifying another ancestor as base, much like rebase --collapse
3099 3099 --keep.
3100 3100
3101 3101 The commit message can be tweaked after the fact using commit --amend .
3102 3102
3103 3103 For using non-ancestors as the base to backout changes, see the backout
3104 3104 command and the hidden --parent option.
3105 3105
3106 3106 .. container:: verbose
3107 3107
3108 3108 Examples:
3109 3109
3110 3110 - copy a single change to the stable branch and edit its description::
3111 3111
3112 3112 hg update stable
3113 3113 hg graft --edit 9393
3114 3114
3115 3115 - graft a range of changesets with one exception, updating dates::
3116 3116
3117 3117 hg graft -D "2085::2093 and not 2091"
3118 3118
3119 3119 - continue a graft after resolving conflicts::
3120 3120
3121 3121 hg graft -c
3122 3122
3123 3123 - show the source of a grafted changeset::
3124 3124
3125 3125 hg log --debug -r .
3126 3126
3127 3127 - show revisions sorted by date::
3128 3128
3129 3129 hg log -r "sort(all(), date)"
3130 3130
3131 3131 - backport the result of a merge as a single commit::
3132 3132
3133 3133 hg graft -r 123 --base 123^
3134 3134
3135 3135 - land a feature branch as one changeset::
3136 3136
3137 3137 hg up -cr default
3138 3138 hg graft -r featureX --base "ancestor('featureX', 'default')"
3139 3139
3140 3140 See :hg:`help revisions` for more about specifying revisions.
3141 3141
3142 3142 Returns 0 on successful completion, 1 if there are unresolved files.
3143 3143 """
3144 3144 with repo.wlock():
3145 3145 return _dograft(ui, repo, *revs, **opts)
3146 3146
3147 3147
3148 3148 def _dograft(ui, repo, *revs, **opts):
3149 3149 if revs and opts.get('rev'):
3150 3150 ui.warn(
3151 3151 _(
3152 3152 b'warning: inconsistent use of --rev might give unexpected '
3153 3153 b'revision ordering!\n'
3154 3154 )
3155 3155 )
3156 3156
3157 3157 revs = list(revs)
3158 3158 revs.extend(opts.get('rev'))
3159 3159 # a dict of data to be stored in state file
3160 3160 statedata = {}
3161 3161 # list of new nodes created by ongoing graft
3162 3162 statedata[b'newnodes'] = []
3163 3163
3164 3164 cmdutil.resolve_commit_options(ui, opts)
3165 3165
3166 3166 editor = cmdutil.getcommiteditor(editform=b'graft', **opts)
3167 3167
3168 3168 cmdutil.check_at_most_one_arg(opts, 'abort', 'stop', 'continue')
3169 3169
3170 3170 cont = False
3171 3171 if opts.get('no_commit'):
3172 3172 cmdutil.check_incompatible_arguments(
3173 3173 opts,
3174 3174 'no_commit',
3175 3175 ['edit', 'currentuser', 'currentdate', 'log'],
3176 3176 )
3177 3177
3178 3178 graftstate = statemod.cmdstate(repo, b'graftstate')
3179 3179
3180 3180 if opts.get('stop'):
3181 3181 cmdutil.check_incompatible_arguments(
3182 3182 opts,
3183 3183 'stop',
3184 3184 [
3185 3185 'edit',
3186 3186 'log',
3187 3187 'user',
3188 3188 'date',
3189 3189 'currentdate',
3190 3190 'currentuser',
3191 3191 'rev',
3192 3192 ],
3193 3193 )
3194 3194 return _stopgraft(ui, repo, graftstate)
3195 3195 elif opts.get('abort'):
3196 3196 cmdutil.check_incompatible_arguments(
3197 3197 opts,
3198 3198 'abort',
3199 3199 [
3200 3200 'edit',
3201 3201 'log',
3202 3202 'user',
3203 3203 'date',
3204 3204 'currentdate',
3205 3205 'currentuser',
3206 3206 'rev',
3207 3207 ],
3208 3208 )
3209 3209 return cmdutil.abortgraft(ui, repo, graftstate)
3210 3210 elif opts.get('continue'):
3211 3211 cont = True
3212 3212 if revs:
3213 3213 raise error.InputError(_(b"can't specify --continue and revisions"))
3214 3214 # read in unfinished revisions
3215 3215 if graftstate.exists():
3216 3216 statedata = cmdutil.readgraftstate(repo, graftstate)
3217 3217 if statedata.get(b'date'):
3218 3218 opts['date'] = statedata[b'date']
3219 3219 if statedata.get(b'user'):
3220 3220 opts['user'] = statedata[b'user']
3221 3221 if statedata.get(b'log'):
3222 3222 opts['log'] = True
3223 3223 if statedata.get(b'no_commit'):
3224 3224 opts['no_commit'] = statedata.get(b'no_commit')
3225 3225 if statedata.get(b'base'):
3226 3226 opts['base'] = statedata.get(b'base')
3227 3227 nodes = statedata[b'nodes']
3228 3228 revs = [repo[node].rev() for node in nodes]
3229 3229 else:
3230 3230 cmdutil.wrongtooltocontinue(repo, _(b'graft'))
3231 3231 else:
3232 3232 if not revs:
3233 3233 raise error.InputError(_(b'no revisions specified'))
3234 3234 cmdutil.checkunfinished(repo)
3235 3235 cmdutil.bailifchanged(repo)
3236 3236 revs = logcmdutil.revrange(repo, revs)
3237 3237
3238 3238 skipped = set()
3239 3239 basectx = None
3240 3240 if opts.get('base'):
3241 3241 basectx = logcmdutil.revsingle(repo, opts['base'], None)
3242 3242 if basectx is None:
3243 3243 # check for merges
3244 3244 for rev in repo.revs(b'%ld and merge()', revs):
3245 3245 ui.warn(_(b'skipping ungraftable merge revision %d\n') % rev)
3246 3246 skipped.add(rev)
3247 3247 revs = [r for r in revs if r not in skipped]
3248 3248 if not revs:
3249 3249 return -1
3250 3250 if basectx is not None and len(revs) != 1:
3251 3251 raise error.InputError(_(b'only one revision allowed with --base '))
3252 3252
3253 3253 # Don't check in the --continue case, in effect retaining --force across
3254 3254 # --continues. That's because without --force, any revisions we decided to
3255 3255 # skip would have been filtered out here, so they wouldn't have made their
3256 3256 # way to the graftstate. With --force, any revisions we would have otherwise
3257 3257 # skipped would not have been filtered out, and if they hadn't been applied
3258 3258 # already, they'd have been in the graftstate.
3259 3259 if not (cont or opts.get('force')) and basectx is None:
3260 3260 # check for ancestors of dest branch
3261 3261 ancestors = repo.revs(b'%ld & (::.)', revs)
3262 3262 for rev in ancestors:
3263 3263 ui.warn(_(b'skipping ancestor revision %d:%s\n') % (rev, repo[rev]))
3264 3264
3265 3265 revs = [r for r in revs if r not in ancestors]
3266 3266
3267 3267 if not revs:
3268 3268 return -1
3269 3269
3270 3270 # analyze revs for earlier grafts
3271 3271 ids = {}
3272 3272 for ctx in repo.set(b"%ld", revs):
3273 3273 ids[ctx.hex()] = ctx.rev()
3274 3274 n = ctx.extra().get(b'source')
3275 3275 if n:
3276 3276 ids[n] = ctx.rev()
3277 3277
3278 3278 # check ancestors for earlier grafts
3279 3279 ui.debug(b'scanning for duplicate grafts\n')
3280 3280
3281 3281 # The only changesets we can be sure doesn't contain grafts of any
3282 3282 # revs, are the ones that are common ancestors of *all* revs:
3283 3283 for rev in repo.revs(b'only(%d,ancestor(%ld))', repo[b'.'].rev(), revs):
3284 3284 ctx = repo[rev]
3285 3285 n = ctx.extra().get(b'source')
3286 3286 if n in ids:
3287 3287 try:
3288 3288 r = repo[n].rev()
3289 3289 except error.RepoLookupError:
3290 3290 r = None
3291 3291 if r in revs:
3292 3292 ui.warn(
3293 3293 _(
3294 3294 b'skipping revision %d:%s '
3295 3295 b'(already grafted to %d:%s)\n'
3296 3296 )
3297 3297 % (r, repo[r], rev, ctx)
3298 3298 )
3299 3299 revs.remove(r)
3300 3300 elif ids[n] in revs:
3301 3301 if r is None:
3302 3302 ui.warn(
3303 3303 _(
3304 3304 b'skipping already grafted revision %d:%s '
3305 3305 b'(%d:%s also has unknown origin %s)\n'
3306 3306 )
3307 3307 % (ids[n], repo[ids[n]], rev, ctx, n[:12])
3308 3308 )
3309 3309 else:
3310 3310 ui.warn(
3311 3311 _(
3312 3312 b'skipping already grafted revision %d:%s '
3313 3313 b'(%d:%s also has origin %d:%s)\n'
3314 3314 )
3315 3315 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12])
3316 3316 )
3317 3317 revs.remove(ids[n])
3318 3318 elif ctx.hex() in ids:
3319 3319 r = ids[ctx.hex()]
3320 3320 if r in revs:
3321 3321 ui.warn(
3322 3322 _(
3323 3323 b'skipping already grafted revision %d:%s '
3324 3324 b'(was grafted from %d:%s)\n'
3325 3325 )
3326 3326 % (r, repo[r], rev, ctx)
3327 3327 )
3328 3328 revs.remove(r)
3329 3329 if not revs:
3330 3330 return -1
3331 3331
3332 3332 if opts.get('no_commit'):
3333 3333 statedata[b'no_commit'] = True
3334 3334 if opts.get('base'):
3335 3335 statedata[b'base'] = opts['base']
3336 3336 for pos, ctx in enumerate(repo.set(b"%ld", revs)):
3337 3337 desc = b'%d:%s "%s"' % (
3338 3338 ctx.rev(),
3339 3339 ctx,
3340 3340 ctx.description().split(b'\n', 1)[0],
3341 3341 )
3342 3342 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
3343 3343 if names:
3344 3344 desc += b' (%s)' % b' '.join(names)
3345 3345 ui.status(_(b'grafting %s\n') % desc)
3346 3346 if opts.get('dry_run'):
3347 3347 continue
3348 3348
3349 3349 source = ctx.extra().get(b'source')
3350 3350 extra = {}
3351 3351 if source:
3352 3352 extra[b'source'] = source
3353 3353 extra[b'intermediate-source'] = ctx.hex()
3354 3354 else:
3355 3355 extra[b'source'] = ctx.hex()
3356 3356 user = ctx.user()
3357 3357 if opts.get('user'):
3358 3358 user = opts['user']
3359 3359 statedata[b'user'] = user
3360 3360 date = ctx.date()
3361 3361 if opts.get('date'):
3362 3362 date = opts['date']
3363 3363 statedata[b'date'] = date
3364 3364 message = ctx.description()
3365 3365 if opts.get('log'):
3366 3366 message += b'\n(grafted from %s)' % ctx.hex()
3367 3367 statedata[b'log'] = True
3368 3368
3369 3369 # we don't merge the first commit when continuing
3370 3370 if not cont:
3371 3371 # perform the graft merge with p1(rev) as 'ancestor'
3372 3372 overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')}
3373 3373 base = ctx.p1() if basectx is None else basectx
3374 3374 with ui.configoverride(overrides, b'graft'):
3375 3375 stats = mergemod.graft(
3376 3376 repo, ctx, base, [b'local', b'graft', b'parent of graft']
3377 3377 )
3378 3378 # report any conflicts
3379 3379 if stats.unresolvedcount > 0:
3380 3380 # write out state for --continue
3381 3381 nodes = [repo[rev].hex() for rev in revs[pos:]]
3382 3382 statedata[b'nodes'] = nodes
3383 3383 stateversion = 1
3384 3384 graftstate.save(stateversion, statedata)
3385 3385 ui.error(_(b"abort: unresolved conflicts, can't continue\n"))
3386 3386 ui.error(_(b"(use 'hg resolve' and 'hg graft --continue')\n"))
3387 3387 return 1
3388 3388 else:
3389 3389 cont = False
3390 3390
3391 3391 # commit if --no-commit is false
3392 3392 if not opts.get('no_commit'):
3393 3393 node = repo.commit(
3394 3394 text=message, user=user, date=date, extra=extra, editor=editor
3395 3395 )
3396 3396 if node is None:
3397 3397 ui.warn(
3398 3398 _(b'note: graft of %d:%s created no changes to commit\n')
3399 3399 % (ctx.rev(), ctx)
3400 3400 )
3401 3401 # checking that newnodes exist because old state files won't have it
3402 3402 elif statedata.get(b'newnodes') is not None:
3403 3403 nn = statedata[b'newnodes']
3404 3404 assert isinstance(nn, list) # list of bytes
3405 3405 nn.append(node)
3406 3406
3407 3407 # remove state when we complete successfully
3408 3408 if not opts.get('dry_run'):
3409 3409 graftstate.delete()
3410 3410
3411 3411 return 0
3412 3412
3413 3413
3414 3414 def _stopgraft(ui, repo, graftstate):
3415 3415 """stop the interrupted graft"""
3416 3416 if not graftstate.exists():
3417 3417 raise error.StateError(_(b"no interrupted graft found"))
3418 3418 pctx = repo[b'.']
3419 3419 mergemod.clean_update(pctx)
3420 3420 graftstate.delete()
3421 3421 ui.status(_(b"stopped the interrupted graft\n"))
3422 3422 ui.status(_(b"working directory is now at %s\n") % pctx.hex()[:12])
3423 3423 return 0
3424 3424
3425 3425
3426 3426 statemod.addunfinished(
3427 3427 b'graft',
3428 3428 fname=b'graftstate',
3429 3429 clearable=True,
3430 3430 stopflag=True,
3431 3431 continueflag=True,
3432 3432 abortfunc=cmdutil.hgabortgraft,
3433 3433 cmdhint=_(b"use 'hg graft --continue' or 'hg graft --stop' to stop"),
3434 3434 )
3435 3435
3436 3436
3437 3437 @command(
3438 3438 b'grep',
3439 3439 [
3440 3440 (b'0', b'print0', None, _(b'end fields with NUL')),
3441 3441 (b'', b'all', None, _(b'an alias to --diff (DEPRECATED)')),
3442 3442 (
3443 3443 b'',
3444 3444 b'diff',
3445 3445 None,
3446 3446 _(
3447 3447 b'search revision differences for when the pattern was added '
3448 3448 b'or removed'
3449 3449 ),
3450 3450 ),
3451 3451 (b'a', b'text', None, _(b'treat all files as text')),
3452 3452 (
3453 3453 b'f',
3454 3454 b'follow',
3455 3455 None,
3456 3456 _(
3457 3457 b'follow changeset history,'
3458 3458 b' or file history across copies and renames'
3459 3459 ),
3460 3460 ),
3461 3461 (b'i', b'ignore-case', None, _(b'ignore case when matching')),
3462 3462 (
3463 3463 b'l',
3464 3464 b'files-with-matches',
3465 3465 None,
3466 3466 _(b'print only filenames and revisions that match'),
3467 3467 ),
3468 3468 (b'n', b'line-number', None, _(b'print matching line numbers')),
3469 3469 (
3470 3470 b'r',
3471 3471 b'rev',
3472 3472 [],
3473 3473 _(b'search files changed within revision range'),
3474 3474 _(b'REV'),
3475 3475 ),
3476 3476 (
3477 3477 b'',
3478 3478 b'all-files',
3479 3479 None,
3480 3480 _(
3481 3481 b'include all files in the changeset while grepping (DEPRECATED)'
3482 3482 ),
3483 3483 ),
3484 3484 (b'u', b'user', None, _(b'list the author (long with -v)')),
3485 3485 (b'd', b'date', None, _(b'list the date (short with -q)')),
3486 3486 ]
3487 3487 + formatteropts
3488 3488 + walkopts,
3489 3489 _(b'[--diff] [OPTION]... PATTERN [FILE]...'),
3490 3490 helpcategory=command.CATEGORY_FILE_CONTENTS,
3491 3491 inferrepo=True,
3492 3492 intents={INTENT_READONLY},
3493 3493 )
3494 3494 def grep(ui, repo, pattern, *pats, **opts):
3495 3495 """search for a pattern in specified files
3496 3496
3497 3497 Search the working directory or revision history for a regular
3498 3498 expression in the specified files for the entire repository.
3499 3499
3500 3500 By default, grep searches the repository files in the working
3501 3501 directory and prints the files where it finds a match. To specify
3502 3502 historical revisions instead of the working directory, use the
3503 3503 --rev flag.
3504 3504
3505 3505 To search instead historical revision differences that contains a
3506 3506 change in match status ("-" for a match that becomes a non-match,
3507 3507 or "+" for a non-match that becomes a match), use the --diff flag.
3508 3508
3509 3509 PATTERN can be any Python (roughly Perl-compatible) regular
3510 3510 expression.
3511 3511
3512 3512 If no FILEs are specified and the --rev flag isn't supplied, all
3513 3513 files in the working directory are searched. When using the --rev
3514 3514 flag and specifying FILEs, use the --follow argument to also
3515 3515 follow the specified FILEs across renames and copies.
3516 3516
3517 3517 .. container:: verbose
3518 3518
3519 3519 Template:
3520 3520
3521 3521 The following keywords are supported in addition to the common template
3522 3522 keywords and functions. See also :hg:`help templates`.
3523 3523
3524 3524 :change: String. Character denoting insertion ``+`` or removal ``-``.
3525 3525 Available if ``--diff`` is specified.
3526 3526 :lineno: Integer. Line number of the match.
3527 3527 :path: String. Repository-absolute path of the file.
3528 3528 :texts: List of text chunks.
3529 3529
3530 3530 And each entry of ``{texts}`` provides the following sub-keywords.
3531 3531
3532 3532 :matched: Boolean. True if the chunk matches the specified pattern.
3533 3533 :text: String. Chunk content.
3534 3534
3535 3535 See :hg:`help templates.operators` for the list expansion syntax.
3536 3536
3537 3537 Returns 0 if a match is found, 1 otherwise.
3538 3538
3539 3539 """
3540 3540 cmdutil.check_incompatible_arguments(opts, 'all_files', ['all', 'diff'])
3541 3541
3542 3542 diff = opts.get('all') or opts.get('diff')
3543 3543 follow = opts.get('follow')
3544 3544 if opts.get('all_files') is None and not diff:
3545 3545 opts['all_files'] = True
3546 3546 plaingrep = (
3547 3547 opts.get('all_files') and not opts.get('rev') and not opts.get('follow')
3548 3548 )
3549 3549 all_files = opts.get('all_files')
3550 3550 if plaingrep:
3551 3551 opts['rev'] = [b'wdir()']
3552 3552
3553 3553 reflags = re.M
3554 3554 if opts.get('ignore_case'):
3555 3555 reflags |= re.I
3556 3556 try:
3557 3557 regexp = util.re.compile(pattern, reflags)
3558 3558 except re.error as inst:
3559 3559 ui.warn(
3560 3560 _(b"grep: invalid match pattern: %s\n")
3561 3561 % stringutil.forcebytestr(inst)
3562 3562 )
3563 3563 return 1
3564 3564 sep, eol = b':', b'\n'
3565 3565 if opts.get('print0'):
3566 3566 sep = eol = b'\0'
3567 3567
3568 3568 searcher = grepmod.grepsearcher(
3569 3569 ui, repo, regexp, all_files=all_files, diff=diff, follow=follow
3570 3570 )
3571 3571
3572 3572 getfile = searcher._getfile
3573 3573
3574 3574 uipathfn = scmutil.getuipathfn(repo)
3575 3575
3576 3576 def display(fm, fn, ctx, pstates, states):
3577 3577 rev = scmutil.intrev(ctx)
3578 3578 if fm.isplain():
3579 3579 formatuser = ui.shortuser
3580 3580 else:
3581 3581 formatuser = pycompat.bytestr
3582 3582 if ui.quiet:
3583 3583 datefmt = b'%Y-%m-%d'
3584 3584 else:
3585 3585 datefmt = b'%a %b %d %H:%M:%S %Y %1%2'
3586 3586 found = False
3587 3587
3588 3588 @util.cachefunc
3589 3589 def binary():
3590 3590 flog = getfile(fn)
3591 3591 try:
3592 3592 return stringutil.binary(flog.read(ctx.filenode(fn)))
3593 3593 except error.WdirUnsupported:
3594 3594 return ctx[fn].isbinary()
3595 3595
3596 3596 fieldnamemap = {b'linenumber': b'lineno'}
3597 3597 if diff:
3598 3598 iter = grepmod.difflinestates(pstates, states)
3599 3599 else:
3600 3600 iter = [(b'', l) for l in states]
3601 3601 for change, l in iter:
3602 3602 fm.startitem()
3603 3603 fm.context(ctx=ctx)
3604 3604 fm.data(node=fm.hexfunc(scmutil.binnode(ctx)), path=fn)
3605 3605 fm.plain(uipathfn(fn), label=b'grep.filename')
3606 3606
3607 3607 cols = [
3608 3608 (b'rev', b'%d', rev, not plaingrep, b''),
3609 3609 (
3610 3610 b'linenumber',
3611 3611 b'%d',
3612 3612 l.linenum,
3613 3613 opts.get('line_number'),
3614 3614 b'',
3615 3615 ),
3616 3616 ]
3617 3617 if diff:
3618 3618 cols.append(
3619 3619 (
3620 3620 b'change',
3621 3621 b'%s',
3622 3622 change,
3623 3623 True,
3624 3624 b'grep.inserted '
3625 3625 if change == b'+'
3626 3626 else b'grep.deleted ',
3627 3627 )
3628 3628 )
3629 3629 cols.extend(
3630 3630 [
3631 3631 (
3632 3632 b'user',
3633 3633 b'%s',
3634 3634 formatuser(ctx.user()),
3635 3635 opts.get('user'),
3636 3636 b'',
3637 3637 ),
3638 3638 (
3639 3639 b'date',
3640 3640 b'%s',
3641 3641 fm.formatdate(ctx.date(), datefmt),
3642 3642 opts.get('date'),
3643 3643 b'',
3644 3644 ),
3645 3645 ]
3646 3646 )
3647 3647 for name, fmt, data, cond, extra_label in cols:
3648 3648 if cond:
3649 3649 fm.plain(sep, label=b'grep.sep')
3650 3650 field = fieldnamemap.get(name, name)
3651 3651 label = extra_label + (b'grep.%s' % name)
3652 3652 fm.condwrite(cond, field, fmt, data, label=label)
3653 3653 if not opts.get('files_with_matches'):
3654 3654 fm.plain(sep, label=b'grep.sep')
3655 3655 if not opts.get('text') and binary():
3656 3656 fm.plain(_(b" Binary file matches"))
3657 3657 else:
3658 3658 displaymatches(fm.nested(b'texts', tmpl=b'{text}'), l)
3659 3659 fm.plain(eol)
3660 3660 found = True
3661 3661 if opts.get('files_with_matches'):
3662 3662 break
3663 3663 return found
3664 3664
3665 3665 def displaymatches(fm, l):
3666 3666 p = 0
3667 3667 for s, e in l.findpos(regexp):
3668 3668 if p < s:
3669 3669 fm.startitem()
3670 3670 fm.write(b'text', b'%s', l.line[p:s])
3671 3671 fm.data(matched=False)
3672 3672 fm.startitem()
3673 3673 fm.write(b'text', b'%s', l.line[s:e], label=b'grep.match')
3674 3674 fm.data(matched=True)
3675 3675 p = e
3676 3676 if p < len(l.line):
3677 3677 fm.startitem()
3678 3678 fm.write(b'text', b'%s', l.line[p:])
3679 3679 fm.data(matched=False)
3680 3680 fm.end()
3681 3681
3682 3682 found = False
3683 3683
3684 3684 wopts = logcmdutil.walkopts(
3685 3685 pats=pats,
3686 3686 opts=opts,
3687 3687 revspec=opts['rev'],
3688 3688 include_pats=opts['include'],
3689 3689 exclude_pats=opts['exclude'],
3690 3690 follow=follow,
3691 3691 force_changelog_traversal=all_files,
3692 3692 filter_revisions_by_pats=not all_files,
3693 3693 )
3694 3694 revs, makefilematcher = logcmdutil.makewalker(repo, wopts)
3695 3695
3696 3696 ui.pager(b'grep')
3697 3697 fm = ui.formatter(b'grep', pycompat.byteskwargs(opts))
3698 3698 for fn, ctx, pstates, states in searcher.searchfiles(revs, makefilematcher):
3699 3699 r = display(fm, fn, ctx, pstates, states)
3700 3700 found = found or r
3701 3701 if r and not diff and not all_files:
3702 3702 searcher.skipfile(fn, ctx.rev())
3703 3703 fm.end()
3704 3704
3705 3705 return not found
3706 3706
3707 3707
3708 3708 @command(
3709 3709 b'heads',
3710 3710 [
3711 3711 (
3712 3712 b'r',
3713 3713 b'rev',
3714 3714 b'',
3715 3715 _(b'show only heads which are descendants of STARTREV'),
3716 3716 _(b'STARTREV'),
3717 3717 ),
3718 3718 (b't', b'topo', False, _(b'show topological heads only')),
3719 3719 (
3720 3720 b'a',
3721 3721 b'active',
3722 3722 False,
3723 3723 _(b'show active branchheads only (DEPRECATED)'),
3724 3724 ),
3725 3725 (b'c', b'closed', False, _(b'show normal and closed branch heads')),
3726 3726 ]
3727 3727 + templateopts,
3728 3728 _(b'[-ct] [-r STARTREV] [REV]...'),
3729 3729 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
3730 3730 intents={INTENT_READONLY},
3731 3731 )
3732 3732 def heads(ui, repo, *branchrevs, **opts):
3733 3733 """show branch heads
3734 3734
3735 3735 With no arguments, show all open branch heads in the repository.
3736 3736 Branch heads are changesets that have no descendants on the
3737 3737 same branch. They are where development generally takes place and
3738 3738 are the usual targets for update and merge operations.
3739 3739
3740 3740 If one or more REVs are given, only open branch heads on the
3741 3741 branches associated with the specified changesets are shown. This
3742 3742 means that you can use :hg:`heads .` to see the heads on the
3743 3743 currently checked-out branch.
3744 3744
3745 3745 If -c/--closed is specified, also show branch heads marked closed
3746 3746 (see :hg:`commit --close-branch`).
3747 3747
3748 3748 If STARTREV is specified, only those heads that are descendants of
3749 3749 STARTREV will be displayed.
3750 3750
3751 3751 If -t/--topo is specified, named branch mechanics will be ignored and only
3752 3752 topological heads (changesets with no children) will be shown.
3753 3753
3754 3754 Returns 0 if matching heads are found, 1 if not.
3755 3755 """
3756 3756
3757 3757 opts = pycompat.byteskwargs(opts)
3758 3758 start = None
3759 3759 rev = opts.get(b'rev')
3760 3760 if rev:
3761 3761 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
3762 3762 start = logcmdutil.revsingle(repo, rev, None).node()
3763 3763
3764 3764 if opts.get(b'topo'):
3765 3765 heads = [repo[h] for h in repo.heads(start)]
3766 3766 else:
3767 3767 heads = []
3768 3768 for branch in repo.branchmap():
3769 3769 heads += repo.branchheads(branch, start, opts.get(b'closed'))
3770 3770 heads = [repo[h] for h in heads]
3771 3771
3772 3772 if branchrevs:
3773 3773 branches = {
3774 3774 repo[r].branch() for r in logcmdutil.revrange(repo, branchrevs)
3775 3775 }
3776 3776 heads = [h for h in heads if h.branch() in branches]
3777 3777
3778 3778 if opts.get(b'active') and branchrevs:
3779 3779 dagheads = repo.heads(start)
3780 3780 heads = [h for h in heads if h.node() in dagheads]
3781 3781
3782 3782 if branchrevs:
3783 3783 haveheads = {h.branch() for h in heads}
3784 3784 if branches - haveheads:
3785 3785 headless = b', '.join(b for b in branches - haveheads)
3786 3786 msg = _(b'no open branch heads found on branches %s')
3787 3787 if opts.get(b'rev'):
3788 3788 msg += _(b' (started at %s)') % opts[b'rev']
3789 3789 ui.warn((msg + b'\n') % headless)
3790 3790
3791 3791 if not heads:
3792 3792 return 1
3793 3793
3794 3794 ui.pager(b'heads')
3795 3795 heads = sorted(heads, key=lambda x: -(x.rev()))
3796 3796 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
3797 3797 for ctx in heads:
3798 3798 displayer.show(ctx)
3799 3799 displayer.close()
3800 3800
3801 3801
3802 3802 @command(
3803 3803 b'help',
3804 3804 [
3805 3805 (b'e', b'extension', None, _(b'show only help for extensions')),
3806 3806 (b'c', b'command', None, _(b'show only help for commands')),
3807 3807 (b'k', b'keyword', None, _(b'show topics matching keyword')),
3808 3808 (
3809 3809 b's',
3810 3810 b'system',
3811 3811 [],
3812 3812 _(b'show help for specific platform(s)'),
3813 3813 _(b'PLATFORM'),
3814 3814 ),
3815 3815 ],
3816 3816 _(b'[-eck] [-s PLATFORM] [TOPIC]'),
3817 3817 helpcategory=command.CATEGORY_HELP,
3818 3818 norepo=True,
3819 3819 intents={INTENT_READONLY},
3820 3820 )
3821 3821 def help_(ui, name=None, **opts):
3822 3822 """show help for a given topic or a help overview
3823 3823
3824 3824 With no arguments, print a list of commands with short help messages.
3825 3825
3826 3826 Given a topic, extension, or command name, print help for that
3827 3827 topic.
3828 3828
3829 3829 Returns 0 if successful.
3830 3830 """
3831 3831
3832 3832 keep = opts.get('system') or []
3833 3833 if len(keep) == 0:
3834 3834 if pycompat.sysplatform.startswith(b'win'):
3835 3835 keep.append(b'windows')
3836 3836 elif pycompat.sysplatform == b'OpenVMS':
3837 3837 keep.append(b'vms')
3838 3838 elif pycompat.sysplatform == b'plan9':
3839 3839 keep.append(b'plan9')
3840 3840 else:
3841 3841 keep.append(b'unix')
3842 3842 keep.append(pycompat.sysplatform.lower())
3843 3843 if ui.verbose:
3844 3844 keep.append(b'verbose')
3845 3845
3846 3846 commands = sys.modules[__name__]
3847 3847 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
3848 3848 ui.pager(b'help')
3849 3849 ui.write(formatted)
3850 3850
3851 3851
3852 3852 @command(
3853 3853 b'identify|id',
3854 3854 [
3855 3855 (b'r', b'rev', b'', _(b'identify the specified revision'), _(b'REV')),
3856 3856 (b'n', b'num', None, _(b'show local revision number')),
3857 3857 (b'i', b'id', None, _(b'show global revision id')),
3858 3858 (b'b', b'branch', None, _(b'show branch')),
3859 3859 (b't', b'tags', None, _(b'show tags')),
3860 3860 (b'B', b'bookmarks', None, _(b'show bookmarks')),
3861 3861 ]
3862 3862 + remoteopts
3863 3863 + formatteropts,
3864 3864 _(b'[-nibtB] [-r REV] [SOURCE]'),
3865 3865 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
3866 3866 optionalrepo=True,
3867 3867 intents={INTENT_READONLY},
3868 3868 )
3869 3869 def identify(
3870 3870 ui,
3871 3871 repo,
3872 3872 source=None,
3873 3873 rev=None,
3874 3874 num=None,
3875 3875 id=None,
3876 3876 branch=None,
3877 3877 tags=None,
3878 3878 bookmarks=None,
3879 3879 **opts
3880 3880 ):
3881 3881 """identify the working directory or specified revision
3882 3882
3883 3883 Print a summary identifying the repository state at REV using one or
3884 3884 two parent hash identifiers, followed by a "+" if the working
3885 3885 directory has uncommitted changes, the branch name (if not default),
3886 3886 a list of tags, and a list of bookmarks.
3887 3887
3888 3888 When REV is not given, print a summary of the current state of the
3889 3889 repository including the working directory. Specify -r. to get information
3890 3890 of the working directory parent without scanning uncommitted changes.
3891 3891
3892 3892 Specifying a path to a repository root or Mercurial bundle will
3893 3893 cause lookup to operate on that repository/bundle.
3894 3894
3895 3895 .. container:: verbose
3896 3896
3897 3897 Template:
3898 3898
3899 3899 The following keywords are supported in addition to the common template
3900 3900 keywords and functions. See also :hg:`help templates`.
3901 3901
3902 3902 :dirty: String. Character ``+`` denoting if the working directory has
3903 3903 uncommitted changes.
3904 3904 :id: String. One or two nodes, optionally followed by ``+``.
3905 3905 :parents: List of strings. Parent nodes of the changeset.
3906 3906
3907 3907 Examples:
3908 3908
3909 3909 - generate a build identifier for the working directory::
3910 3910
3911 3911 hg id --id > build-id.dat
3912 3912
3913 3913 - find the revision corresponding to a tag::
3914 3914
3915 3915 hg id -n -r 1.3
3916 3916
3917 3917 - check the most recent revision of a remote repository::
3918 3918
3919 3919 hg id -r tip https://www.mercurial-scm.org/repo/hg/
3920 3920
3921 3921 See :hg:`log` for generating more information about specific revisions,
3922 3922 including full hash identifiers.
3923 3923
3924 3924 Returns 0 if successful.
3925 3925 """
3926 3926
3927 3927 opts = pycompat.byteskwargs(opts)
3928 3928 if not repo and not source:
3929 3929 raise error.InputError(
3930 3930 _(b"there is no Mercurial repository here (.hg not found)")
3931 3931 )
3932 3932
3933 3933 default = not (num or id or branch or tags or bookmarks)
3934 3934 output = []
3935 3935 revs = []
3936 3936
3937 3937 peer = None
3938 3938 try:
3939 3939 if source:
3940 3940 path = urlutil.get_unique_pull_path_obj(b'identify', ui, source)
3941 3941 # only pass ui when no repo
3942 3942 peer = hg.peer(repo or ui, opts, path)
3943 3943 repo = peer.local()
3944 3944 branches = (path.branch, [])
3945 3945 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3946 3946
3947 3947 fm = ui.formatter(b'identify', opts)
3948 3948 fm.startitem()
3949 3949
3950 3950 if not repo:
3951 3951 if num or branch or tags:
3952 3952 raise error.InputError(
3953 3953 _(b"can't query remote revision number, branch, or tags")
3954 3954 )
3955 3955 if not rev and revs:
3956 3956 rev = revs[0]
3957 3957 if not rev:
3958 3958 rev = b"tip"
3959 3959
3960 3960 remoterev = peer.lookup(rev)
3961 3961 hexrev = fm.hexfunc(remoterev)
3962 3962 if default or id:
3963 3963 output = [hexrev]
3964 3964 fm.data(id=hexrev)
3965 3965
3966 3966 @util.cachefunc
3967 3967 def getbms():
3968 3968 bms = []
3969 3969
3970 3970 if b'bookmarks' in peer.listkeys(b'namespaces'):
3971 3971 hexremoterev = hex(remoterev)
3972 3972 bms = [
3973 3973 bm
3974 3974 for bm, bmr in peer.listkeys(b'bookmarks').items()
3975 3975 if bmr == hexremoterev
3976 3976 ]
3977 3977
3978 3978 return sorted(bms)
3979 3979
3980 3980 if fm.isplain():
3981 3981 if bookmarks:
3982 3982 output.extend(getbms())
3983 3983 elif default and not ui.quiet:
3984 3984 # multiple bookmarks for a single parent separated by '/'
3985 3985 bm = b'/'.join(getbms())
3986 3986 if bm:
3987 3987 output.append(bm)
3988 3988 else:
3989 3989 fm.data(node=hex(remoterev))
3990 3990 if bookmarks or b'bookmarks' in fm.datahint():
3991 3991 fm.data(bookmarks=fm.formatlist(getbms(), name=b'bookmark'))
3992 3992 else:
3993 3993 if rev:
3994 3994 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
3995 3995 ctx = logcmdutil.revsingle(repo, rev, None)
3996 3996
3997 3997 if ctx.rev() is None:
3998 3998 ctx = repo[None]
3999 3999 parents = ctx.parents()
4000 4000 taglist = []
4001 4001 for p in parents:
4002 4002 taglist.extend(p.tags())
4003 4003
4004 4004 dirty = b""
4005 4005 if ctx.dirty(missing=True, merge=False, branch=False):
4006 4006 dirty = b'+'
4007 4007 fm.data(dirty=dirty)
4008 4008
4009 4009 hexoutput = [fm.hexfunc(p.node()) for p in parents]
4010 4010 if default or id:
4011 4011 output = [b"%s%s" % (b'+'.join(hexoutput), dirty)]
4012 4012 fm.data(id=b"%s%s" % (b'+'.join(hexoutput), dirty))
4013 4013
4014 4014 if num:
4015 4015 numoutput = [b"%d" % p.rev() for p in parents]
4016 4016 output.append(b"%s%s" % (b'+'.join(numoutput), dirty))
4017 4017
4018 4018 fm.data(
4019 4019 parents=fm.formatlist(
4020 4020 [fm.hexfunc(p.node()) for p in parents], name=b'node'
4021 4021 )
4022 4022 )
4023 4023 else:
4024 4024 hexoutput = fm.hexfunc(ctx.node())
4025 4025 if default or id:
4026 4026 output = [hexoutput]
4027 4027 fm.data(id=hexoutput)
4028 4028
4029 4029 if num:
4030 4030 output.append(pycompat.bytestr(ctx.rev()))
4031 4031 taglist = ctx.tags()
4032 4032
4033 4033 if default and not ui.quiet:
4034 4034 b = ctx.branch()
4035 4035 if b != b'default':
4036 4036 output.append(b"(%s)" % b)
4037 4037
4038 4038 # multiple tags for a single parent separated by '/'
4039 4039 t = b'/'.join(taglist)
4040 4040 if t:
4041 4041 output.append(t)
4042 4042
4043 4043 # multiple bookmarks for a single parent separated by '/'
4044 4044 bm = b'/'.join(ctx.bookmarks())
4045 4045 if bm:
4046 4046 output.append(bm)
4047 4047 else:
4048 4048 if branch:
4049 4049 output.append(ctx.branch())
4050 4050
4051 4051 if tags:
4052 4052 output.extend(taglist)
4053 4053
4054 4054 if bookmarks:
4055 4055 output.extend(ctx.bookmarks())
4056 4056
4057 4057 fm.data(node=ctx.hex())
4058 4058 fm.data(branch=ctx.branch())
4059 4059 fm.data(tags=fm.formatlist(taglist, name=b'tag', sep=b':'))
4060 4060 fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'))
4061 4061 fm.context(ctx=ctx)
4062 4062
4063 4063 fm.plain(b"%s\n" % b' '.join(output))
4064 4064 fm.end()
4065 4065 finally:
4066 4066 if peer:
4067 4067 peer.close()
4068 4068
4069 4069
4070 4070 @command(
4071 4071 b'import|patch',
4072 4072 [
4073 4073 (
4074 4074 b'p',
4075 4075 b'strip',
4076 4076 1,
4077 4077 _(
4078 4078 b'directory strip option for patch. This has the same '
4079 4079 b'meaning as the corresponding patch option'
4080 4080 ),
4081 4081 _(b'NUM'),
4082 4082 ),
4083 4083 (b'b', b'base', b'', _(b'base path (DEPRECATED)'), _(b'PATH')),
4084 4084 (b'', b'secret', None, _(b'use the secret phase for committing')),
4085 4085 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
4086 4086 (
4087 4087 b'f',
4088 4088 b'force',
4089 4089 None,
4090 4090 _(b'skip check for outstanding uncommitted changes (DEPRECATED)'),
4091 4091 ),
4092 4092 (
4093 4093 b'',
4094 4094 b'no-commit',
4095 4095 None,
4096 4096 _(b"don't commit, just update the working directory"),
4097 4097 ),
4098 4098 (
4099 4099 b'',
4100 4100 b'bypass',
4101 4101 None,
4102 4102 _(b"apply patch without touching the working directory"),
4103 4103 ),
4104 4104 (b'', b'partial', None, _(b'commit even if some hunks fail')),
4105 4105 (b'', b'exact', None, _(b'abort if patch would apply lossily')),
4106 4106 (b'', b'prefix', b'', _(b'apply patch to subdirectory'), _(b'DIR')),
4107 4107 (
4108 4108 b'',
4109 4109 b'import-branch',
4110 4110 None,
4111 4111 _(b'use any branch information in patch (implied by --exact)'),
4112 4112 ),
4113 4113 ]
4114 4114 + commitopts
4115 4115 + commitopts2
4116 4116 + similarityopts,
4117 4117 _(b'[OPTION]... PATCH...'),
4118 4118 helpcategory=command.CATEGORY_IMPORT_EXPORT,
4119 4119 )
4120 4120 def import_(ui, repo, patch1=None, *patches, **opts):
4121 4121 """import an ordered set of patches
4122 4122
4123 4123 Import a list of patches and commit them individually (unless
4124 4124 --no-commit is specified).
4125 4125
4126 4126 To read a patch from standard input (stdin), use "-" as the patch
4127 4127 name. If a URL is specified, the patch will be downloaded from
4128 4128 there.
4129 4129
4130 4130 Import first applies changes to the working directory (unless
4131 4131 --bypass is specified), import will abort if there are outstanding
4132 4132 changes.
4133 4133
4134 4134 Use --bypass to apply and commit patches directly to the
4135 4135 repository, without affecting the working directory. Without
4136 4136 --exact, patches will be applied on top of the working directory
4137 4137 parent revision.
4138 4138
4139 4139 You can import a patch straight from a mail message. Even patches
4140 4140 as attachments work (to use the body part, it must have type
4141 4141 text/plain or text/x-patch). From and Subject headers of email
4142 4142 message are used as default committer and commit message. All
4143 4143 text/plain body parts before first diff are added to the commit
4144 4144 message.
4145 4145
4146 4146 If the imported patch was generated by :hg:`export`, user and
4147 4147 description from patch override values from message headers and
4148 4148 body. Values given on command line with -m/--message and -u/--user
4149 4149 override these.
4150 4150
4151 4151 If --exact is specified, import will set the working directory to
4152 4152 the parent of each patch before applying it, and will abort if the
4153 4153 resulting changeset has a different ID than the one recorded in
4154 4154 the patch. This will guard against various ways that portable
4155 4155 patch formats and mail systems might fail to transfer Mercurial
4156 4156 data or metadata. See :hg:`bundle` for lossless transmission.
4157 4157
4158 4158 Use --partial to ensure a changeset will be created from the patch
4159 4159 even if some hunks fail to apply. Hunks that fail to apply will be
4160 4160 written to a <target-file>.rej file. Conflicts can then be resolved
4161 4161 by hand before :hg:`commit --amend` is run to update the created
4162 4162 changeset. This flag exists to let people import patches that
4163 4163 partially apply without losing the associated metadata (author,
4164 4164 date, description, ...).
4165 4165
4166 4166 .. note::
4167 4167
4168 4168 When no hunks apply cleanly, :hg:`import --partial` will create
4169 4169 an empty changeset, importing only the patch metadata.
4170 4170
4171 4171 With -s/--similarity, hg will attempt to discover renames and
4172 4172 copies in the patch in the same way as :hg:`addremove`.
4173 4173
4174 4174 It is possible to use external patch programs to perform the patch
4175 4175 by setting the ``ui.patch`` configuration option. For the default
4176 4176 internal tool, the fuzz can also be configured via ``patch.fuzz``.
4177 4177 See :hg:`help config` for more information about configuration
4178 4178 files and how to use these options.
4179 4179
4180 4180 See :hg:`help dates` for a list of formats valid for -d/--date.
4181 4181
4182 4182 .. container:: verbose
4183 4183
4184 4184 Examples:
4185 4185
4186 4186 - import a traditional patch from a website and detect renames::
4187 4187
4188 4188 hg import -s 80 http://example.com/bugfix.patch
4189 4189
4190 4190 - import a changeset from an hgweb server::
4191 4191
4192 4192 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
4193 4193
4194 4194 - import all the patches in an Unix-style mbox::
4195 4195
4196 4196 hg import incoming-patches.mbox
4197 4197
4198 4198 - import patches from stdin::
4199 4199
4200 4200 hg import -
4201 4201
4202 4202 - attempt to exactly restore an exported changeset (not always
4203 4203 possible)::
4204 4204
4205 4205 hg import --exact proposed-fix.patch
4206 4206
4207 4207 - use an external tool to apply a patch which is too fuzzy for
4208 4208 the default internal tool.
4209 4209
4210 4210 hg import --config ui.patch="patch --merge" fuzzy.patch
4211 4211
4212 4212 - change the default fuzzing from 2 to a less strict 7
4213 4213
4214 4214 hg import --config ui.fuzz=7 fuzz.patch
4215 4215
4216 4216 Returns 0 on success, 1 on partial success (see --partial).
4217 4217 """
4218 4218
4219 4219 cmdutil.check_incompatible_arguments(
4220 4220 opts, 'no_commit', ['bypass', 'secret']
4221 4221 )
4222 4222 cmdutil.check_incompatible_arguments(opts, 'exact', ['edit', 'prefix'])
4223 4223 opts = pycompat.byteskwargs(opts)
4224 4224 if not patch1:
4225 4225 raise error.InputError(_(b'need at least one patch to import'))
4226 4226
4227 4227 patches = (patch1,) + patches
4228 4228
4229 4229 date = opts.get(b'date')
4230 4230 if date:
4231 4231 opts[b'date'] = dateutil.parsedate(date)
4232 4232
4233 4233 exact = opts.get(b'exact')
4234 4234 update = not opts.get(b'bypass')
4235 4235 try:
4236 4236 sim = float(opts.get(b'similarity') or 0)
4237 4237 except ValueError:
4238 4238 raise error.InputError(_(b'similarity must be a number'))
4239 4239 if sim < 0 or sim > 100:
4240 4240 raise error.InputError(_(b'similarity must be between 0 and 100'))
4241 4241 if sim and not update:
4242 4242 raise error.InputError(_(b'cannot use --similarity with --bypass'))
4243 4243
4244 4244 base = opts[b"base"]
4245 4245 msgs = []
4246 4246 ret = 0
4247 4247
4248 4248 with repo.wlock():
4249 4249 if update:
4250 4250 cmdutil.checkunfinished(repo)
4251 4251 if exact or not opts.get(b'force'):
4252 4252 cmdutil.bailifchanged(repo)
4253 4253
4254 4254 if not opts.get(b'no_commit'):
4255 4255 lock = repo.lock
4256 4256 tr = lambda: repo.transaction(b'import')
4257 4257 else:
4258 4258 lock = util.nullcontextmanager
4259 4259 tr = util.nullcontextmanager
4260 4260 with lock(), tr():
4261 4261 parents = repo[None].parents()
4262 4262 for patchurl in patches:
4263 4263 if patchurl == b'-':
4264 4264 ui.status(_(b'applying patch from stdin\n'))
4265 4265 patchfile = ui.fin
4266 4266 patchurl = b'stdin' # for error message
4267 4267 else:
4268 4268 patchurl = os.path.join(base, patchurl)
4269 4269 ui.status(_(b'applying %s\n') % patchurl)
4270 4270 patchfile = hg.openpath(ui, patchurl, sendaccept=False)
4271 4271
4272 4272 haspatch = False
4273 4273 for hunk in patch.split(patchfile):
4274 4274 with patch.extract(ui, hunk) as patchdata:
4275 4275 msg, node, rej = cmdutil.tryimportone(
4276 4276 ui, repo, patchdata, parents, opts, msgs, hg.clean
4277 4277 )
4278 4278 if msg:
4279 4279 haspatch = True
4280 4280 ui.note(msg + b'\n')
4281 4281 if update or exact:
4282 4282 parents = repo[None].parents()
4283 4283 else:
4284 4284 parents = [repo[node]]
4285 4285 if rej:
4286 4286 ui.write_err(_(b"patch applied partially\n"))
4287 4287 ui.write_err(
4288 4288 _(
4289 4289 b"(fix the .rej files and run "
4290 4290 b"`hg commit --amend`)\n"
4291 4291 )
4292 4292 )
4293 4293 ret = 1
4294 4294 break
4295 4295
4296 4296 if not haspatch:
4297 4297 raise error.InputError(_(b'%s: no diffs found') % patchurl)
4298 4298
4299 4299 if msgs:
4300 4300 repo.savecommitmessage(b'\n* * *\n'.join(msgs))
4301 4301 return ret
4302 4302
4303 4303
4304 4304 @command(
4305 4305 b'incoming|in',
4306 4306 [
4307 4307 (
4308 4308 b'f',
4309 4309 b'force',
4310 4310 None,
4311 4311 _(b'run even if remote repository is unrelated'),
4312 4312 ),
4313 4313 (b'n', b'newest-first', None, _(b'show newest record first')),
4314 4314 (b'', b'bundle', b'', _(b'file to store the bundles into'), _(b'FILE')),
4315 4315 (
4316 4316 b'r',
4317 4317 b'rev',
4318 4318 [],
4319 4319 _(b'a remote changeset intended to be added'),
4320 4320 _(b'REV'),
4321 4321 ),
4322 4322 (b'B', b'bookmarks', False, _(b"compare bookmarks")),
4323 4323 (
4324 4324 b'b',
4325 4325 b'branch',
4326 4326 [],
4327 4327 _(b'a specific branch you would like to pull'),
4328 4328 _(b'BRANCH'),
4329 4329 ),
4330 4330 ]
4331 4331 + logopts
4332 4332 + remoteopts
4333 4333 + subrepoopts,
4334 4334 _(b'[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'),
4335 4335 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
4336 4336 )
4337 4337 def incoming(ui, repo, source=b"default", **opts):
4338 4338 """show new changesets found in source
4339 4339
4340 4340 Show new changesets found in the specified path/URL or the default
4341 4341 pull location. These are the changesets that would have been pulled
4342 4342 by :hg:`pull` at the time you issued this command.
4343 4343
4344 4344 See pull for valid source format details.
4345 4345
4346 4346 .. container:: verbose
4347 4347
4348 4348 With -B/--bookmarks, the result of bookmark comparison between
4349 4349 local and remote repositories is displayed. With -v/--verbose,
4350 4350 status is also displayed for each bookmark like below::
4351 4351
4352 4352 BM1 01234567890a added
4353 4353 BM2 1234567890ab advanced
4354 4354 BM3 234567890abc diverged
4355 4355 BM4 34567890abcd changed
4356 4356
4357 4357 The action taken locally when pulling depends on the
4358 4358 status of each bookmark:
4359 4359
4360 4360 :``added``: pull will create it
4361 4361 :``advanced``: pull will update it
4362 4362 :``diverged``: pull will create a divergent bookmark
4363 4363 :``changed``: result depends on remote changesets
4364 4364
4365 4365 From the point of view of pulling behavior, bookmark
4366 4366 existing only in the remote repository are treated as ``added``,
4367 4367 even if it is in fact locally deleted.
4368 4368
4369 4369 .. container:: verbose
4370 4370
4371 4371 For remote repository, using --bundle avoids downloading the
4372 4372 changesets twice if the incoming is followed by a pull.
4373 4373
4374 4374 Examples:
4375 4375
4376 4376 - show incoming changes with patches and full description::
4377 4377
4378 4378 hg incoming -vp
4379 4379
4380 4380 - show incoming changes excluding merges, store a bundle::
4381 4381
4382 4382 hg in -vpM --bundle incoming.hg
4383 4383 hg pull incoming.hg
4384 4384
4385 4385 - briefly list changes inside a bundle::
4386 4386
4387 4387 hg in changes.hg -T "{desc|firstline}\\n"
4388 4388
4389 4389 Returns 0 if there are incoming changes, 1 otherwise.
4390 4390 """
4391 4391 opts = pycompat.byteskwargs(opts)
4392 4392 if opts.get(b'graph'):
4393 4393 logcmdutil.checkunsupportedgraphflags([], opts)
4394 4394
4395 4395 def display(other, chlist, displayer):
4396 4396 revdag = logcmdutil.graphrevs(other, chlist, opts)
4397 4397 logcmdutil.displaygraph(
4398 4398 ui, repo, revdag, displayer, graphmod.asciiedges
4399 4399 )
4400 4400
4401 4401 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
4402 4402 return 0
4403 4403
4404 4404 cmdutil.check_incompatible_arguments(opts, b'subrepos', [b'bundle'])
4405 4405
4406 4406 if opts.get(b'bookmarks'):
4407 4407 srcs = urlutil.get_pull_paths(repo, ui, [source])
4408 4408 for path in srcs:
4409 4409 # XXX the "branches" options are not used. Should it be used?
4410 4410 other = hg.peer(repo, opts, path)
4411 4411 try:
4412 4412 if b'bookmarks' not in other.listkeys(b'namespaces'):
4413 4413 ui.warn(_(b"remote doesn't support bookmarks\n"))
4414 4414 return 0
4415 4415 ui.pager(b'incoming')
4416 4416 ui.status(
4417 4417 _(b'comparing with %s\n') % urlutil.hidepassword(path.loc)
4418 4418 )
4419 4419 return bookmarks.incoming(
4420 4420 ui, repo, other, mode=path.bookmarks_mode
4421 4421 )
4422 4422 finally:
4423 4423 other.close()
4424 4424
4425 4425 return hg.incoming(ui, repo, source, opts)
4426 4426
4427 4427
4428 4428 @command(
4429 4429 b'init',
4430 4430 remoteopts,
4431 4431 _(b'[-e CMD] [--remotecmd CMD] [DEST]'),
4432 4432 helpcategory=command.CATEGORY_REPO_CREATION,
4433 4433 helpbasic=True,
4434 4434 norepo=True,
4435 4435 )
4436 4436 def init(ui, dest=b".", **opts):
4437 4437 """create a new repository in the given directory
4438 4438
4439 4439 Initialize a new repository in the given directory. If the given
4440 4440 directory does not exist, it will be created.
4441 4441
4442 4442 If no directory is given, the current directory is used.
4443 4443
4444 4444 It is possible to specify an ``ssh://`` URL as the destination.
4445 4445 See :hg:`help urls` for more information.
4446 4446
4447 4447 Returns 0 on success.
4448 4448 """
4449 4449 opts = pycompat.byteskwargs(opts)
4450 4450 path = urlutil.get_clone_path_obj(ui, dest)
4451 4451 peer = hg.peer(ui, opts, path, create=True)
4452 4452 peer.close()
4453 4453
4454 4454
4455 4455 @command(
4456 4456 b'locate',
4457 4457 [
4458 4458 (
4459 4459 b'r',
4460 4460 b'rev',
4461 4461 b'',
4462 4462 _(b'search the repository as it is in REV'),
4463 4463 _(b'REV'),
4464 4464 ),
4465 4465 (
4466 4466 b'0',
4467 4467 b'print0',
4468 4468 None,
4469 4469 _(b'end filenames with NUL, for use with xargs'),
4470 4470 ),
4471 4471 (
4472 4472 b'f',
4473 4473 b'fullpath',
4474 4474 None,
4475 4475 _(b'print complete paths from the filesystem root'),
4476 4476 ),
4477 4477 ]
4478 4478 + walkopts,
4479 4479 _(b'[OPTION]... [PATTERN]...'),
4480 4480 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
4481 4481 )
4482 4482 def locate(ui, repo, *pats, **opts):
4483 4483 """locate files matching specific patterns (DEPRECATED)
4484 4484
4485 4485 Print files under Mercurial control in the working directory whose
4486 4486 names match the given patterns.
4487 4487
4488 4488 By default, this command searches all directories in the working
4489 4489 directory. To search just the current directory and its
4490 4490 subdirectories, use "--include .".
4491 4491
4492 4492 If no patterns are given to match, this command prints the names
4493 4493 of all files under Mercurial control in the working directory.
4494 4494
4495 4495 If you want to feed the output of this command into the "xargs"
4496 4496 command, use the -0 option to both this command and "xargs". This
4497 4497 will avoid the problem of "xargs" treating single filenames that
4498 4498 contain whitespace as multiple filenames.
4499 4499
4500 4500 See :hg:`help files` for a more versatile command.
4501 4501
4502 4502 Returns 0 if a match is found, 1 otherwise.
4503 4503 """
4504 4504 opts = pycompat.byteskwargs(opts)
4505 4505 if opts.get(b'print0'):
4506 4506 end = b'\0'
4507 4507 else:
4508 4508 end = b'\n'
4509 4509 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
4510 4510
4511 4511 ret = 1
4512 4512 m = scmutil.match(
4513 4513 ctx, pats, opts, default=b'relglob', badfn=lambda x, y: False
4514 4514 )
4515 4515
4516 4516 ui.pager(b'locate')
4517 4517 if ctx.rev() is None:
4518 4518 # When run on the working copy, "locate" includes removed files, so
4519 4519 # we get the list of files from the dirstate.
4520 4520 filesgen = sorted(repo.dirstate.matches(m))
4521 4521 else:
4522 4522 filesgen = ctx.matches(m)
4523 4523 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=bool(pats))
4524 4524 for abs in filesgen:
4525 4525 if opts.get(b'fullpath'):
4526 4526 ui.write(repo.wjoin(abs), end)
4527 4527 else:
4528 4528 ui.write(uipathfn(abs), end)
4529 4529 ret = 0
4530 4530
4531 4531 return ret
4532 4532
4533 4533
4534 4534 @command(
4535 4535 b'log|history',
4536 4536 [
4537 4537 (
4538 4538 b'f',
4539 4539 b'follow',
4540 4540 None,
4541 4541 _(
4542 4542 b'follow changeset history, or file history across copies and renames'
4543 4543 ),
4544 4544 ),
4545 4545 (
4546 4546 b'',
4547 4547 b'follow-first',
4548 4548 None,
4549 4549 _(b'only follow the first parent of merge changesets (DEPRECATED)'),
4550 4550 ),
4551 4551 (
4552 4552 b'd',
4553 4553 b'date',
4554 4554 b'',
4555 4555 _(b'show revisions matching date spec'),
4556 4556 _(b'DATE'),
4557 4557 ),
4558 4558 (b'C', b'copies', None, _(b'show copied files')),
4559 4559 (
4560 4560 b'k',
4561 4561 b'keyword',
4562 4562 [],
4563 4563 _(b'do case-insensitive search for a given text'),
4564 4564 _(b'TEXT'),
4565 4565 ),
4566 4566 (
4567 4567 b'r',
4568 4568 b'rev',
4569 4569 [],
4570 4570 _(b'revisions to select or follow from'),
4571 4571 _(b'REV'),
4572 4572 ),
4573 4573 (
4574 4574 b'L',
4575 4575 b'line-range',
4576 4576 [],
4577 4577 _(b'follow line range of specified file (EXPERIMENTAL)'),
4578 4578 _(b'FILE,RANGE'),
4579 4579 ),
4580 4580 (
4581 4581 b'',
4582 4582 b'removed',
4583 4583 None,
4584 4584 _(b'include revisions where files were removed'),
4585 4585 ),
4586 4586 (
4587 4587 b'm',
4588 4588 b'only-merges',
4589 4589 None,
4590 4590 _(b'show only merges (DEPRECATED) (use -r "merge()" instead)'),
4591 4591 ),
4592 4592 (b'u', b'user', [], _(b'revisions committed by user'), _(b'USER')),
4593 4593 (
4594 4594 b'',
4595 4595 b'only-branch',
4596 4596 [],
4597 4597 _(
4598 4598 b'show only changesets within the given named branch (DEPRECATED)'
4599 4599 ),
4600 4600 _(b'BRANCH'),
4601 4601 ),
4602 4602 (
4603 4603 b'b',
4604 4604 b'branch',
4605 4605 [],
4606 4606 _(b'show changesets within the given named branch'),
4607 4607 _(b'BRANCH'),
4608 4608 ),
4609 4609 (
4610 4610 b'B',
4611 4611 b'bookmark',
4612 4612 [],
4613 4613 _(b"show changesets within the given bookmark"),
4614 4614 _(b'BOOKMARK'),
4615 4615 ),
4616 4616 (
4617 4617 b'P',
4618 4618 b'prune',
4619 4619 [],
4620 4620 _(b'do not display revision or any of its ancestors'),
4621 4621 _(b'REV'),
4622 4622 ),
4623 4623 ]
4624 4624 + logopts
4625 4625 + walkopts,
4626 4626 _(b'[OPTION]... [FILE]'),
4627 4627 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
4628 4628 helpbasic=True,
4629 4629 inferrepo=True,
4630 4630 intents={INTENT_READONLY},
4631 4631 )
4632 4632 def log(ui, repo, *pats, **opts):
4633 4633 """show revision history of entire repository or files
4634 4634
4635 4635 Print the revision history of the specified files or the entire
4636 4636 project.
4637 4637
4638 4638 If no revision range is specified, the default is ``tip:0`` unless
4639 4639 --follow is set.
4640 4640
4641 4641 File history is shown without following rename or copy history of
4642 4642 files. Use -f/--follow with a filename to follow history across
4643 4643 renames and copies. --follow without a filename will only show
4644 4644 ancestors of the starting revisions. The starting revisions can be
4645 4645 specified by -r/--rev, which default to the working directory parent.
4646 4646
4647 4647 By default this command prints revision number and changeset id,
4648 4648 tags, non-trivial parents, user, date and time, and a summary for
4649 4649 each commit. When the -v/--verbose switch is used, the list of
4650 4650 changed files and full commit message are shown.
4651 4651
4652 4652 With --graph the revisions are shown as an ASCII art DAG with the most
4653 4653 recent changeset at the top.
4654 4654 'o' is a changeset, '@' is a working directory parent, '%' is a changeset
4655 4655 involved in an unresolved merge conflict, '_' closes a branch,
4656 4656 'x' is obsolete, '*' is unstable, and '+' represents a fork where the
4657 4657 changeset from the lines below is a parent of the 'o' merge on the same
4658 4658 line.
4659 4659 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
4660 4660 of a '|' indicates one or more revisions in a path are omitted.
4661 4661
4662 4662 .. container:: verbose
4663 4663
4664 4664 Use -L/--line-range FILE,M:N options to follow the history of lines
4665 4665 from M to N in FILE. With -p/--patch only diff hunks affecting
4666 4666 specified line range will be shown. This option requires --follow;
4667 4667 it can be specified multiple times. Currently, this option is not
4668 4668 compatible with --graph. This option is experimental.
4669 4669
4670 4670 .. note::
4671 4671
4672 4672 :hg:`log --patch` may generate unexpected diff output for merge
4673 4673 changesets, as it will only compare the merge changeset against
4674 4674 its first parent. Also, only files different from BOTH parents
4675 4675 will appear in files:.
4676 4676
4677 4677 .. note::
4678 4678
4679 4679 For performance reasons, :hg:`log FILE` may omit duplicate changes
4680 4680 made on branches and will not show removals or mode changes. To
4681 4681 see all such changes, use the --removed switch.
4682 4682
4683 4683 .. container:: verbose
4684 4684
4685 4685 .. note::
4686 4686
4687 4687 The history resulting from -L/--line-range options depends on diff
4688 4688 options; for instance if white-spaces are ignored, respective changes
4689 4689 with only white-spaces in specified line range will not be listed.
4690 4690
4691 4691 .. container:: verbose
4692 4692
4693 4693 Some examples:
4694 4694
4695 4695 - changesets with full descriptions and file lists::
4696 4696
4697 4697 hg log -v
4698 4698
4699 4699 - changesets ancestral to the working directory::
4700 4700
4701 4701 hg log -f
4702 4702
4703 4703 - last 10 commits on the current branch::
4704 4704
4705 4705 hg log -l 10 -b .
4706 4706
4707 4707 - changesets showing all modifications of a file, including removals::
4708 4708
4709 4709 hg log --removed file.c
4710 4710
4711 4711 - all changesets that touch a directory, with diffs, excluding merges::
4712 4712
4713 4713 hg log -Mp lib/
4714 4714
4715 4715 - all revision numbers that match a keyword::
4716 4716
4717 4717 hg log -k bug --template "{rev}\\n"
4718 4718
4719 4719 - the full hash identifier of the working directory parent::
4720 4720
4721 4721 hg log -r . --template "{node}\\n"
4722 4722
4723 4723 - list available log templates::
4724 4724
4725 4725 hg log -T list
4726 4726
4727 4727 - check if a given changeset is included in a tagged release::
4728 4728
4729 4729 hg log -r "a21ccf and ancestor(1.9)"
4730 4730
4731 4731 - find all changesets by some user in a date range::
4732 4732
4733 4733 hg log -k alice -d "may 2008 to jul 2008"
4734 4734
4735 4735 - summary of all changesets after the last tag::
4736 4736
4737 4737 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4738 4738
4739 4739 - changesets touching lines 13 to 23 for file.c::
4740 4740
4741 4741 hg log -L file.c,13:23
4742 4742
4743 4743 - changesets touching lines 13 to 23 for file.c and lines 2 to 6 of
4744 4744 main.c with patch::
4745 4745
4746 4746 hg log -L file.c,13:23 -L main.c,2:6 -p
4747 4747
4748 4748 See :hg:`help dates` for a list of formats valid for -d/--date.
4749 4749
4750 4750 See :hg:`help revisions` for more about specifying and ordering
4751 4751 revisions.
4752 4752
4753 4753 See :hg:`help templates` for more about pre-packaged styles and
4754 4754 specifying custom templates. The default template used by the log
4755 4755 command can be customized via the ``command-templates.log`` configuration
4756 4756 setting.
4757 4757
4758 4758 Returns 0 on success.
4759 4759
4760 4760 """
4761 4761 opts = pycompat.byteskwargs(opts)
4762 4762 linerange = opts.get(b'line_range')
4763 4763
4764 4764 if linerange and not opts.get(b'follow'):
4765 4765 raise error.InputError(_(b'--line-range requires --follow'))
4766 4766
4767 4767 if linerange and pats:
4768 4768 # TODO: take pats as patterns with no line-range filter
4769 4769 raise error.InputError(
4770 4770 _(b'FILE arguments are not compatible with --line-range option')
4771 4771 )
4772 4772
4773 4773 repo = scmutil.unhidehashlikerevs(repo, opts.get(b'rev'), b'nowarn')
4774 4774 walk_opts = logcmdutil.parseopts(ui, pats, opts)
4775 4775 revs, differ = logcmdutil.getrevs(repo, walk_opts)
4776 4776 if linerange:
4777 4777 # TODO: should follow file history from logcmdutil._initialrevs(),
4778 4778 # then filter the result by logcmdutil._makerevset() and --limit
4779 4779 revs, differ = logcmdutil.getlinerangerevs(repo, revs, opts)
4780 4780
4781 4781 getcopies = None
4782 4782 if opts.get(b'copies'):
4783 4783 endrev = None
4784 4784 if revs:
4785 4785 endrev = revs.max() + 1
4786 4786 getcopies = scmutil.getcopiesfn(repo, endrev=endrev)
4787 4787
4788 4788 ui.pager(b'log')
4789 4789 displayer = logcmdutil.changesetdisplayer(
4790 4790 ui, repo, opts, differ, buffered=True
4791 4791 )
4792 4792 if opts.get(b'graph'):
4793 4793 displayfn = logcmdutil.displaygraphrevs
4794 4794 else:
4795 4795 displayfn = logcmdutil.displayrevs
4796 4796 displayfn(ui, repo, revs, displayer, getcopies)
4797 4797
4798 4798
4799 4799 @command(
4800 4800 b'manifest',
4801 4801 [
4802 4802 (b'r', b'rev', b'', _(b'revision to display'), _(b'REV')),
4803 4803 (b'', b'all', False, _(b"list files from all revisions")),
4804 4804 ]
4805 4805 + formatteropts,
4806 4806 _(b'[-r REV]'),
4807 4807 helpcategory=command.CATEGORY_MAINTENANCE,
4808 4808 intents={INTENT_READONLY},
4809 4809 )
4810 4810 def manifest(ui, repo, node=None, rev=None, **opts):
4811 4811 """output the current or given revision of the project manifest
4812 4812
4813 4813 Print a list of version controlled files for the given revision.
4814 4814 If no revision is given, the first parent of the working directory
4815 4815 is used, or the null revision if no revision is checked out.
4816 4816
4817 4817 With -v, print file permissions, symlink and executable bits.
4818 4818 With --debug, print file revision hashes.
4819 4819
4820 4820 If option --all is specified, the list of all files from all revisions
4821 4821 is printed. This includes deleted and renamed files.
4822 4822
4823 4823 Returns 0 on success.
4824 4824 """
4825 4825 opts = pycompat.byteskwargs(opts)
4826 4826 fm = ui.formatter(b'manifest', opts)
4827 4827
4828 4828 if opts.get(b'all'):
4829 4829 if rev or node:
4830 4830 raise error.InputError(_(b"can't specify a revision with --all"))
4831 4831
4832 4832 res = set()
4833 4833 for rev in repo:
4834 4834 ctx = repo[rev]
4835 4835 res |= set(ctx.files())
4836 4836
4837 4837 ui.pager(b'manifest')
4838 4838 for f in sorted(res):
4839 4839 fm.startitem()
4840 4840 fm.write(b"path", b'%s\n', f)
4841 4841 fm.end()
4842 4842 return
4843 4843
4844 4844 if rev and node:
4845 4845 raise error.InputError(_(b"please specify just one revision"))
4846 4846
4847 4847 if not node:
4848 4848 node = rev
4849 4849
4850 4850 char = {b'l': b'@', b'x': b'*', b'': b'', b't': b'd'}
4851 4851 mode = {b'l': b'644', b'x': b'755', b'': b'644', b't': b'755'}
4852 4852 if node:
4853 4853 repo = scmutil.unhidehashlikerevs(repo, [node], b'nowarn')
4854 4854 ctx = logcmdutil.revsingle(repo, node)
4855 4855 mf = ctx.manifest()
4856 4856 ui.pager(b'manifest')
4857 4857 for f in ctx:
4858 4858 fm.startitem()
4859 4859 fm.context(ctx=ctx)
4860 4860 fl = ctx[f].flags()
4861 4861 fm.condwrite(ui.debugflag, b'hash', b'%s ', hex(mf[f]))
4862 4862 fm.condwrite(ui.verbose, b'mode type', b'%s %1s ', mode[fl], char[fl])
4863 4863 fm.write(b'path', b'%s\n', f)
4864 4864 fm.end()
4865 4865
4866 4866
4867 4867 @command(
4868 4868 b'merge',
4869 4869 [
4870 4870 (
4871 4871 b'f',
4872 4872 b'force',
4873 4873 None,
4874 4874 _(b'force a merge including outstanding changes (DEPRECATED)'),
4875 4875 ),
4876 4876 (b'r', b'rev', b'', _(b'revision to merge'), _(b'REV')),
4877 4877 (
4878 4878 b'P',
4879 4879 b'preview',
4880 4880 None,
4881 4881 _(b'review revisions to merge (no merge is performed)'),
4882 4882 ),
4883 4883 (b'', b'abort', None, _(b'abort the ongoing merge')),
4884 4884 ]
4885 4885 + mergetoolopts,
4886 4886 _(b'[-P] [[-r] REV]'),
4887 4887 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
4888 4888 helpbasic=True,
4889 4889 )
4890 4890 def merge(ui, repo, node=None, **opts):
4891 4891 """merge another revision into working directory
4892 4892
4893 4893 The current working directory is updated with all changes made in
4894 4894 the requested revision since the last common predecessor revision.
4895 4895
4896 4896 Files that changed between either parent are marked as changed for
4897 4897 the next commit and a commit must be performed before any further
4898 4898 updates to the repository are allowed. The next commit will have
4899 4899 two parents.
4900 4900
4901 4901 ``--tool`` can be used to specify the merge tool used for file
4902 4902 merges. It overrides the HGMERGE environment variable and your
4903 4903 configuration files. See :hg:`help merge-tools` for options.
4904 4904
4905 4905 If no revision is specified, the working directory's parent is a
4906 4906 head revision, and the current branch contains exactly one other
4907 4907 head, the other head is merged with by default. Otherwise, an
4908 4908 explicit revision with which to merge must be provided.
4909 4909
4910 4910 See :hg:`help resolve` for information on handling file conflicts.
4911 4911
4912 4912 To undo an uncommitted merge, use :hg:`merge --abort` which
4913 4913 will check out a clean copy of the original merge parent, losing
4914 4914 all changes.
4915 4915
4916 4916 Returns 0 on success, 1 if there are unresolved files.
4917 4917 """
4918 4918
4919 4919 opts = pycompat.byteskwargs(opts)
4920 4920 abort = opts.get(b'abort')
4921 4921 if abort and repo.dirstate.p2() == repo.nullid:
4922 4922 cmdutil.wrongtooltocontinue(repo, _(b'merge'))
4923 4923 cmdutil.check_incompatible_arguments(opts, b'abort', [b'rev', b'preview'])
4924 4924 if abort:
4925 4925 state = cmdutil.getunfinishedstate(repo)
4926 4926 if state and state._opname != b'merge':
4927 4927 raise error.StateError(
4928 4928 _(b'cannot abort merge with %s in progress') % (state._opname),
4929 4929 hint=state.hint(),
4930 4930 )
4931 4931 if node:
4932 4932 raise error.InputError(_(b"cannot specify a node with --abort"))
4933 4933 return hg.abortmerge(repo.ui, repo)
4934 4934
4935 4935 if opts.get(b'rev') and node:
4936 4936 raise error.InputError(_(b"please specify just one revision"))
4937 4937 if not node:
4938 4938 node = opts.get(b'rev')
4939 4939
4940 4940 if node:
4941 4941 ctx = logcmdutil.revsingle(repo, node)
4942 4942 else:
4943 4943 if ui.configbool(b'commands', b'merge.require-rev'):
4944 4944 raise error.InputError(
4945 4945 _(
4946 4946 b'configuration requires specifying revision to merge '
4947 4947 b'with'
4948 4948 )
4949 4949 )
4950 4950 ctx = repo[destutil.destmerge(repo)]
4951 4951
4952 4952 if ctx.node() is None:
4953 4953 raise error.InputError(
4954 4954 _(b'merging with the working copy has no effect')
4955 4955 )
4956 4956
4957 4957 if opts.get(b'preview'):
4958 4958 # find nodes that are ancestors of p2 but not of p1
4959 4959 p1 = repo[b'.'].node()
4960 4960 p2 = ctx.node()
4961 4961 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4962 4962
4963 4963 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
4964 4964 for node in nodes:
4965 4965 displayer.show(repo[node])
4966 4966 displayer.close()
4967 4967 return 0
4968 4968
4969 4969 # ui.forcemerge is an internal variable, do not document
4970 4970 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
4971 4971 with ui.configoverride(overrides, b'merge'):
4972 4972 force = opts.get(b'force')
4973 4973 labels = [b'working copy', b'merge rev', b'common ancestor']
4974 4974 return hg.merge(ctx, force=force, labels=labels)
4975 4975
4976 4976
4977 4977 statemod.addunfinished(
4978 4978 b'merge',
4979 4979 fname=None,
4980 4980 clearable=True,
4981 4981 allowcommit=True,
4982 4982 cmdmsg=_(b'outstanding uncommitted merge'),
4983 4983 abortfunc=hg.abortmerge,
4984 4984 statushint=_(
4985 4985 b'To continue: hg commit\nTo abort: hg merge --abort'
4986 4986 ),
4987 4987 cmdhint=_(b"use 'hg commit' or 'hg merge --abort'"),
4988 4988 )
4989 4989
4990 4990
4991 4991 @command(
4992 4992 b'outgoing|out',
4993 4993 [
4994 4994 (
4995 4995 b'f',
4996 4996 b'force',
4997 4997 None,
4998 4998 _(b'run even when the destination is unrelated'),
4999 4999 ),
5000 5000 (
5001 5001 b'r',
5002 5002 b'rev',
5003 5003 [],
5004 5004 _(b'a changeset intended to be included in the destination'),
5005 5005 _(b'REV'),
5006 5006 ),
5007 5007 (b'n', b'newest-first', None, _(b'show newest record first')),
5008 5008 (b'B', b'bookmarks', False, _(b'compare bookmarks')),
5009 5009 (
5010 5010 b'b',
5011 5011 b'branch',
5012 5012 [],
5013 5013 _(b'a specific branch you would like to push'),
5014 5014 _(b'BRANCH'),
5015 5015 ),
5016 5016 ]
5017 5017 + logopts
5018 5018 + remoteopts
5019 5019 + subrepoopts,
5020 5020 _(b'[-M] [-p] [-n] [-f] [-r REV]... [DEST]...'),
5021 5021 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5022 5022 )
5023 5023 def outgoing(ui, repo, *dests, **opts):
5024 5024 """show changesets not found in the destination
5025 5025
5026 5026 Show changesets not found in the specified destination repository
5027 5027 or the default push location. These are the changesets that would
5028 5028 be pushed if a push was requested.
5029 5029
5030 5030 See pull for details of valid destination formats.
5031 5031
5032 5032 .. container:: verbose
5033 5033
5034 5034 With -B/--bookmarks, the result of bookmark comparison between
5035 5035 local and remote repositories is displayed. With -v/--verbose,
5036 5036 status is also displayed for each bookmark like below::
5037 5037
5038 5038 BM1 01234567890a added
5039 5039 BM2 deleted
5040 5040 BM3 234567890abc advanced
5041 5041 BM4 34567890abcd diverged
5042 5042 BM5 4567890abcde changed
5043 5043
5044 5044 The action taken when pushing depends on the
5045 5045 status of each bookmark:
5046 5046
5047 5047 :``added``: push with ``-B`` will create it
5048 5048 :``deleted``: push with ``-B`` will delete it
5049 5049 :``advanced``: push will update it
5050 5050 :``diverged``: push with ``-B`` will update it
5051 5051 :``changed``: push with ``-B`` will update it
5052 5052
5053 5053 From the point of view of pushing behavior, bookmarks
5054 5054 existing only in the remote repository are treated as
5055 5055 ``deleted``, even if it is in fact added remotely.
5056 5056
5057 5057 Returns 0 if there are outgoing changes, 1 otherwise.
5058 5058 """
5059 5059 opts = pycompat.byteskwargs(opts)
5060 5060 if opts.get(b'bookmarks'):
5061 5061 for path in urlutil.get_push_paths(repo, ui, dests):
5062 5062 other = hg.peer(repo, opts, path)
5063 5063 try:
5064 5064 if b'bookmarks' not in other.listkeys(b'namespaces'):
5065 5065 ui.warn(_(b"remote doesn't support bookmarks\n"))
5066 5066 return 0
5067 5067 ui.status(
5068 5068 _(b'comparing with %s\n') % urlutil.hidepassword(path.loc)
5069 5069 )
5070 5070 ui.pager(b'outgoing')
5071 5071 return bookmarks.outgoing(ui, repo, other)
5072 5072 finally:
5073 5073 other.close()
5074 5074
5075 5075 return hg.outgoing(ui, repo, dests, opts)
5076 5076
5077 5077
5078 5078 @command(
5079 5079 b'parents',
5080 5080 [
5081 5081 (
5082 5082 b'r',
5083 5083 b'rev',
5084 5084 b'',
5085 5085 _(b'show parents of the specified revision'),
5086 5086 _(b'REV'),
5087 5087 ),
5088 5088 ]
5089 5089 + templateopts,
5090 5090 _(b'[-r REV] [FILE]'),
5091 5091 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
5092 5092 inferrepo=True,
5093 5093 )
5094 5094 def parents(ui, repo, file_=None, **opts):
5095 5095 """show the parents of the working directory or revision (DEPRECATED)
5096 5096
5097 5097 Print the working directory's parent revisions. If a revision is
5098 5098 given via -r/--rev, the parent of that revision will be printed.
5099 5099 If a file argument is given, the revision in which the file was
5100 5100 last changed (before the working directory revision or the
5101 5101 argument to --rev if given) is printed.
5102 5102
5103 5103 This command is equivalent to::
5104 5104
5105 5105 hg log -r "p1()+p2()" or
5106 5106 hg log -r "p1(REV)+p2(REV)" or
5107 5107 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
5108 5108 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
5109 5109
5110 5110 See :hg:`summary` and :hg:`help revsets` for related information.
5111 5111
5112 5112 Returns 0 on success.
5113 5113 """
5114 5114
5115 5115 opts = pycompat.byteskwargs(opts)
5116 5116 rev = opts.get(b'rev')
5117 5117 if rev:
5118 5118 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
5119 5119 ctx = logcmdutil.revsingle(repo, rev, None)
5120 5120
5121 5121 if file_:
5122 5122 m = scmutil.match(ctx, (file_,), opts)
5123 5123 if m.anypats() or len(m.files()) != 1:
5124 5124 raise error.InputError(_(b'can only specify an explicit filename'))
5125 5125 file_ = m.files()[0]
5126 5126 filenodes = []
5127 5127 for cp in ctx.parents():
5128 5128 if not cp:
5129 5129 continue
5130 5130 try:
5131 5131 filenodes.append(cp.filenode(file_))
5132 5132 except error.LookupError:
5133 5133 pass
5134 5134 if not filenodes:
5135 5135 raise error.InputError(_(b"'%s' not found in manifest") % file_)
5136 5136 p = []
5137 5137 for fn in filenodes:
5138 5138 fctx = repo.filectx(file_, fileid=fn)
5139 5139 p.append(fctx.node())
5140 5140 else:
5141 5141 p = [cp.node() for cp in ctx.parents()]
5142 5142
5143 5143 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
5144 5144 for n in p:
5145 5145 if n != repo.nullid:
5146 5146 displayer.show(repo[n])
5147 5147 displayer.close()
5148 5148
5149 5149
5150 5150 @command(
5151 5151 b'paths',
5152 5152 formatteropts,
5153 5153 _(b'[NAME]'),
5154 5154 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5155 5155 optionalrepo=True,
5156 5156 intents={INTENT_READONLY},
5157 5157 )
5158 5158 def paths(ui, repo, search=None, **opts):
5159 5159 """show aliases for remote repositories
5160 5160
5161 5161 Show definition of symbolic path name NAME. If no name is given,
5162 5162 show definition of all available names.
5163 5163
5164 5164 Option -q/--quiet suppresses all output when searching for NAME
5165 5165 and shows only the path names when listing all definitions.
5166 5166
5167 5167 Path names are defined in the [paths] section of your
5168 5168 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
5169 5169 repository, ``.hg/hgrc`` is used, too.
5170 5170
5171 5171 The path names ``default`` and ``default-push`` have a special
5172 5172 meaning. When performing a push or pull operation, they are used
5173 5173 as fallbacks if no location is specified on the command-line.
5174 5174 When ``default-push`` is set, it will be used for push and
5175 5175 ``default`` will be used for pull; otherwise ``default`` is used
5176 5176 as the fallback for both. When cloning a repository, the clone
5177 5177 source is written as ``default`` in ``.hg/hgrc``.
5178 5178
5179 5179 .. note::
5180 5180
5181 5181 ``default`` and ``default-push`` apply to all inbound (e.g.
5182 5182 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
5183 5183 and :hg:`bundle`) operations.
5184 5184
5185 5185 See :hg:`help urls` for more information.
5186 5186
5187 5187 .. container:: verbose
5188 5188
5189 5189 Template:
5190 5190
5191 5191 The following keywords are supported. See also :hg:`help templates`.
5192 5192
5193 5193 :name: String. Symbolic name of the path alias.
5194 5194 :pushurl: String. URL for push operations.
5195 5195 :url: String. URL or directory path for the other operations.
5196 5196
5197 5197 Returns 0 on success.
5198 5198 """
5199 5199
5200 5200 opts = pycompat.byteskwargs(opts)
5201 5201
5202 5202 pathitems = urlutil.list_paths(ui, search)
5203 5203 ui.pager(b'paths')
5204 5204
5205 5205 fm = ui.formatter(b'paths', opts)
5206 5206 if fm.isplain():
5207 5207 hidepassword = urlutil.hidepassword
5208 5208 else:
5209 5209 hidepassword = bytes
5210 5210 if ui.quiet:
5211 5211 namefmt = b'%s\n'
5212 5212 else:
5213 5213 namefmt = b'%s = '
5214 5214 showsubopts = not search and not ui.quiet
5215 5215
5216 5216 for name, path in pathitems:
5217 5217 fm.startitem()
5218 5218 fm.condwrite(not search, b'name', namefmt, name)
5219 5219 fm.condwrite(not ui.quiet, b'url', b'%s\n', hidepassword(path.rawloc))
5220 5220 for subopt, value in sorted(path.suboptions.items()):
5221 5221 assert subopt not in (b'name', b'url')
5222 5222 if showsubopts:
5223 5223 fm.plain(b'%s:%s = ' % (name, subopt))
5224 5224 if isinstance(value, bool):
5225 5225 if value:
5226 5226 value = b'yes'
5227 5227 else:
5228 5228 value = b'no'
5229 5229 fm.condwrite(showsubopts, subopt, b'%s\n', value)
5230 5230
5231 5231 fm.end()
5232 5232
5233 5233 if search and not pathitems:
5234 5234 if not ui.quiet:
5235 5235 ui.warn(_(b"not found!\n"))
5236 5236 return 1
5237 5237 else:
5238 5238 return 0
5239 5239
5240 5240
5241 5241 @command(
5242 5242 b'phase',
5243 5243 [
5244 5244 (b'p', b'public', False, _(b'set changeset phase to public')),
5245 5245 (b'd', b'draft', False, _(b'set changeset phase to draft')),
5246 5246 (b's', b'secret', False, _(b'set changeset phase to secret')),
5247 5247 (b'f', b'force', False, _(b'allow to move boundary backward')),
5248 5248 (b'r', b'rev', [], _(b'target revision'), _(b'REV')),
5249 5249 ],
5250 5250 _(b'[-p|-d|-s] [-f] [-r] [REV...]'),
5251 5251 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
5252 5252 )
5253 5253 def phase(ui, repo, *revs, **opts):
5254 5254 """set or show the current phase name
5255 5255
5256 5256 With no argument, show the phase name of the current revision(s).
5257 5257
5258 5258 With one of -p/--public, -d/--draft or -s/--secret, change the
5259 5259 phase value of the specified revisions.
5260 5260
5261 5261 Unless -f/--force is specified, :hg:`phase` won't move changesets from a
5262 5262 lower phase to a higher phase. Phases are ordered as follows::
5263 5263
5264 5264 public < draft < secret
5265 5265
5266 5266 Returns 0 on success, 1 if some phases could not be changed.
5267 5267
5268 5268 (For more information about the phases concept, see :hg:`help phases`.)
5269 5269 """
5270 5270 opts = pycompat.byteskwargs(opts)
5271 5271 # search for a unique phase argument
5272 5272 targetphase = None
5273 5273 for idx, name in enumerate(phases.cmdphasenames):
5274 5274 if opts[name]:
5275 5275 if targetphase is not None:
5276 5276 raise error.InputError(_(b'only one phase can be specified'))
5277 5277 targetphase = idx
5278 5278
5279 5279 # look for specified revision
5280 5280 revs = list(revs)
5281 5281 revs.extend(opts[b'rev'])
5282 5282 if revs:
5283 5283 revs = logcmdutil.revrange(repo, revs)
5284 5284 else:
5285 5285 # display both parents as the second parent phase can influence
5286 5286 # the phase of a merge commit
5287 5287 revs = [c.rev() for c in repo[None].parents()]
5288 5288
5289 5289 ret = 0
5290 5290 if targetphase is None:
5291 5291 # display
5292 5292 for r in revs:
5293 5293 ctx = repo[r]
5294 5294 ui.write(b'%i: %s\n' % (ctx.rev(), ctx.phasestr()))
5295 5295 else:
5296 5296 with repo.lock(), repo.transaction(b"phase") as tr:
5297 5297 # set phase
5298 5298 if not revs:
5299 5299 raise error.InputError(_(b'empty revision set'))
5300 5300 nodes = [repo[r].node() for r in revs]
5301 5301 # moving revision from public to draft may hide them
5302 5302 # We have to check result on an unfiltered repository
5303 5303 unfi = repo.unfiltered()
5304 5304 getphase = unfi._phasecache.phase
5305 5305 olddata = [getphase(unfi, r) for r in unfi]
5306 5306 phases.advanceboundary(repo, tr, targetphase, nodes)
5307 5307 if opts[b'force']:
5308 5308 phases.retractboundary(repo, tr, targetphase, nodes)
5309 5309 getphase = unfi._phasecache.phase
5310 5310 newdata = [getphase(unfi, r) for r in unfi]
5311 5311 changes = sum(newdata[r] != olddata[r] for r in unfi)
5312 5312 cl = unfi.changelog
5313 5313 rejected = [n for n in nodes if newdata[cl.rev(n)] < targetphase]
5314 5314 if rejected:
5315 5315 ui.warn(
5316 5316 _(
5317 5317 b'cannot move %i changesets to a higher '
5318 5318 b'phase, use --force\n'
5319 5319 )
5320 5320 % len(rejected)
5321 5321 )
5322 5322 ret = 1
5323 5323 if changes:
5324 5324 msg = _(b'phase changed for %i changesets\n') % changes
5325 5325 if ret:
5326 5326 ui.status(msg)
5327 5327 else:
5328 5328 ui.note(msg)
5329 5329 else:
5330 5330 ui.warn(_(b'no phases changed\n'))
5331 5331 return ret
5332 5332
5333 5333
5334 5334 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
5335 5335 """Run after a changegroup has been added via pull/unbundle
5336 5336
5337 5337 This takes arguments below:
5338 5338
5339 5339 :modheads: change of heads by pull/unbundle
5340 5340 :optupdate: updating working directory is needed or not
5341 5341 :checkout: update destination revision (or None to default destination)
5342 5342 :brev: a name, which might be a bookmark to be activated after updating
5343 5343
5344 5344 return True if update raise any conflict, False otherwise.
5345 5345 """
5346 5346 if modheads == 0:
5347 5347 return False
5348 5348 if optupdate:
5349 5349 try:
5350 5350 return hg.updatetotally(ui, repo, checkout, brev)
5351 5351 except error.UpdateAbort as inst:
5352 5352 msg = _(b"not updating: %s") % stringutil.forcebytestr(inst)
5353 5353 hint = inst.hint
5354 5354 raise error.UpdateAbort(msg, hint=hint)
5355 5355 if modheads is not None and modheads > 1:
5356 5356 currentbranchheads = len(repo.branchheads())
5357 5357 if currentbranchheads == modheads:
5358 5358 ui.status(
5359 5359 _(b"(run 'hg heads' to see heads, 'hg merge' to merge)\n")
5360 5360 )
5361 5361 elif currentbranchheads > 1:
5362 5362 ui.status(
5363 5363 _(b"(run 'hg heads .' to see heads, 'hg merge' to merge)\n")
5364 5364 )
5365 5365 else:
5366 5366 ui.status(_(b"(run 'hg heads' to see heads)\n"))
5367 5367 elif not ui.configbool(b'commands', b'update.requiredest'):
5368 5368 ui.status(_(b"(run 'hg update' to get a working copy)\n"))
5369 5369 return False
5370 5370
5371 5371
5372 5372 @command(
5373 5373 b'pull',
5374 5374 [
5375 5375 (
5376 5376 b'u',
5377 5377 b'update',
5378 5378 None,
5379 5379 _(b'update to new branch head if new descendants were pulled'),
5380 5380 ),
5381 5381 (
5382 5382 b'f',
5383 5383 b'force',
5384 5384 None,
5385 5385 _(b'run even when remote repository is unrelated'),
5386 5386 ),
5387 5387 (
5388 5388 b'',
5389 5389 b'confirm',
5390 5390 None,
5391 5391 _(b'confirm pull before applying changes'),
5392 5392 ),
5393 5393 (
5394 5394 b'r',
5395 5395 b'rev',
5396 5396 [],
5397 5397 _(b'a remote changeset intended to be added'),
5398 5398 _(b'REV'),
5399 5399 ),
5400 5400 (b'B', b'bookmark', [], _(b"bookmark to pull"), _(b'BOOKMARK')),
5401 5401 (
5402 5402 b'b',
5403 5403 b'branch',
5404 5404 [],
5405 5405 _(b'a specific branch you would like to pull'),
5406 5406 _(b'BRANCH'),
5407 5407 ),
5408 (
5409 b'',
5410 b'remote-hidden',
5411 False,
5412 _(b"include changesets hidden on the remote (EXPERIMENTAL)"),
5413 ),
5408 5414 ]
5409 5415 + remoteopts,
5410 5416 _(b'[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]...'),
5411 5417 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5412 5418 helpbasic=True,
5413 5419 )
5414 5420 def pull(ui, repo, *sources, **opts):
5415 5421 """pull changes from the specified source
5416 5422
5417 5423 Pull changes from a remote repository to a local one.
5418 5424
5419 5425 This finds all changes from the repository at the specified path
5420 5426 or URL and adds them to a local repository (the current one unless
5421 5427 -R is specified). By default, this does not update the copy of the
5422 5428 project in the working directory.
5423 5429
5424 5430 When cloning from servers that support it, Mercurial may fetch
5425 5431 pre-generated data. When this is done, hooks operating on incoming
5426 5432 changesets and changegroups may fire more than once, once for each
5427 5433 pre-generated bundle and as well as for any additional remaining
5428 5434 data. See :hg:`help -e clonebundles` for more.
5429 5435
5430 5436 Use :hg:`incoming` if you want to see what would have been added
5431 5437 by a pull at the time you issued this command. If you then decide
5432 5438 to add those changes to the repository, you should use :hg:`pull
5433 5439 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5434 5440
5435 5441 If SOURCE is omitted, the 'default' path will be used.
5436 5442 See :hg:`help urls` for more information.
5437 5443
5438 5444 If multiple sources are specified, they will be pulled sequentially as if
5439 5445 the command was run multiple time. If --update is specify and the command
5440 5446 will stop at the first failed --update.
5441 5447
5442 5448 Specifying bookmark as ``.`` is equivalent to specifying the active
5443 5449 bookmark's name.
5444 5450
5451 .. container:: verbose
5452
5453 One can use the `--remote-hidden` flag to pull changesets
5454 hidden on the remote. This flag is "best effort", and will only
5455 work if the server supports the feature and is configured to
5456 allow the user to access hidden changesets. This option is
5457 experimental and backwards compatibility is not garanteed.
5458
5445 5459 Returns 0 on success, 1 if an update had unresolved files.
5446 5460 """
5447 5461
5448 5462 opts = pycompat.byteskwargs(opts)
5449 5463 if ui.configbool(b'commands', b'update.requiredest') and opts.get(
5450 5464 b'update'
5451 5465 ):
5452 5466 msg = _(b'update destination required by configuration')
5453 5467 hint = _(b'use hg pull followed by hg update DEST')
5454 5468 raise error.InputError(msg, hint=hint)
5455 5469
5456 5470 for path in urlutil.get_pull_paths(repo, ui, sources):
5457 5471 ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(path.loc))
5458 5472 ui.flush()
5459 other = hg.peer(repo, opts, path)
5473 other = hg.peer(repo, opts, path, remotehidden=opts[b'remote_hidden'])
5460 5474 update_conflict = None
5461 5475 try:
5462 5476 branches = (path.branch, opts.get(b'branch', []))
5463 5477 revs, checkout = hg.addbranchrevs(
5464 repo, other, branches, opts.get(b'rev')
5478 repo,
5479 other,
5480 branches,
5481 opts.get(b'rev'),
5482 remotehidden=opts[b'remote_hidden'],
5465 5483 )
5466 5484
5467 5485 pullopargs = {}
5468 5486
5469 5487 nodes = None
5470 5488 if opts.get(b'bookmark') or revs:
5471 5489 # The list of bookmark used here is the same used to actually update
5472 5490 # the bookmark names, to avoid the race from issue 4689 and we do
5473 5491 # all lookup and bookmark queries in one go so they see the same
5474 5492 # version of the server state (issue 4700).
5475 5493 nodes = []
5476 5494 fnodes = []
5477 5495 revs = revs or []
5478 5496 if revs and not other.capable(b'lookup'):
5479 5497 err = _(
5480 5498 b"other repository doesn't support revision lookup, "
5481 5499 b"so a rev cannot be specified."
5482 5500 )
5483 5501 raise error.Abort(err)
5484 5502 with other.commandexecutor() as e:
5485 5503 fremotebookmarks = e.callcommand(
5486 5504 b'listkeys', {b'namespace': b'bookmarks'}
5487 5505 )
5488 5506 for r in revs:
5489 5507 fnodes.append(e.callcommand(b'lookup', {b'key': r}))
5490 5508 remotebookmarks = fremotebookmarks.result()
5491 5509 remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks)
5492 5510 pullopargs[b'remotebookmarks'] = remotebookmarks
5493 5511 for b in opts.get(b'bookmark', []):
5494 5512 b = repo._bookmarks.expandname(b)
5495 5513 if b not in remotebookmarks:
5496 5514 raise error.InputError(
5497 5515 _(b'remote bookmark %s not found!') % b
5498 5516 )
5499 5517 nodes.append(remotebookmarks[b])
5500 5518 for i, rev in enumerate(revs):
5501 5519 node = fnodes[i].result()
5502 5520 nodes.append(node)
5503 5521 if rev == checkout:
5504 5522 checkout = node
5505 5523
5506 5524 wlock = util.nullcontextmanager()
5507 5525 if opts.get(b'update'):
5508 5526 wlock = repo.wlock()
5509 5527 with wlock:
5510 5528 pullopargs.update(opts.get(b'opargs', {}))
5511 5529 modheads = exchange.pull(
5512 5530 repo,
5513 5531 other,
5514 5532 path=path,
5515 5533 heads=nodes,
5516 5534 force=opts.get(b'force'),
5517 5535 bookmarks=opts.get(b'bookmark', ()),
5518 5536 opargs=pullopargs,
5519 5537 confirm=opts.get(b'confirm'),
5520 5538 ).cgresult
5521 5539
5522 5540 # brev is a name, which might be a bookmark to be activated at
5523 5541 # the end of the update. In other words, it is an explicit
5524 5542 # destination of the update
5525 5543 brev = None
5526 5544
5527 5545 if checkout:
5528 5546 checkout = repo.unfiltered().changelog.rev(checkout)
5529 5547
5530 5548 # order below depends on implementation of
5531 5549 # hg.addbranchrevs(). opts['bookmark'] is ignored,
5532 5550 # because 'checkout' is determined without it.
5533 5551 if opts.get(b'rev'):
5534 5552 brev = opts[b'rev'][0]
5535 5553 elif opts.get(b'branch'):
5536 5554 brev = opts[b'branch'][0]
5537 5555 else:
5538 5556 brev = path.branch
5539 5557
5540 5558 # XXX path: we are losing the `path` object here. Keeping it
5541 5559 # would be valuable. For example as a "variant" as we do
5542 5560 # for pushes.
5543 5561 repo._subtoppath = path.loc
5544 5562 try:
5545 5563 update_conflict = postincoming(
5546 5564 ui, repo, modheads, opts.get(b'update'), checkout, brev
5547 5565 )
5548 5566 except error.FilteredRepoLookupError as exc:
5549 5567 msg = _(b'cannot update to target: %s') % exc.args[0]
5550 5568 exc.args = (msg,) + exc.args[1:]
5551 5569 raise
5552 5570 finally:
5553 5571 del repo._subtoppath
5554 5572
5555 5573 finally:
5556 5574 other.close()
5557 5575 # skip the remaining pull source if they are some conflict.
5558 5576 if update_conflict:
5559 5577 break
5560 5578 if update_conflict:
5561 5579 return 1
5562 5580 else:
5563 5581 return 0
5564 5582
5565 5583
5566 5584 @command(
5567 5585 b'purge|clean',
5568 5586 [
5569 5587 (b'a', b'abort-on-err', None, _(b'abort if an error occurs')),
5570 5588 (b'', b'all', None, _(b'purge ignored files too')),
5571 5589 (b'i', b'ignored', None, _(b'purge only ignored files')),
5572 5590 (b'', b'dirs', None, _(b'purge empty directories')),
5573 5591 (b'', b'files', None, _(b'purge files')),
5574 5592 (b'p', b'print', None, _(b'print filenames instead of deleting them')),
5575 5593 (
5576 5594 b'0',
5577 5595 b'print0',
5578 5596 None,
5579 5597 _(
5580 5598 b'end filenames with NUL, for use with xargs'
5581 5599 b' (implies -p/--print)'
5582 5600 ),
5583 5601 ),
5584 5602 (b'', b'confirm', None, _(b'ask before permanently deleting files')),
5585 5603 ]
5586 5604 + cmdutil.walkopts,
5587 5605 _(b'hg purge [OPTION]... [DIR]...'),
5588 5606 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5589 5607 )
5590 5608 def purge(ui, repo, *dirs, **opts):
5591 5609 """removes files not tracked by Mercurial
5592 5610
5593 5611 Delete files not known to Mercurial. This is useful to test local
5594 5612 and uncommitted changes in an otherwise-clean source tree.
5595 5613
5596 5614 This means that purge will delete the following by default:
5597 5615
5598 5616 - Unknown files: files marked with "?" by :hg:`status`
5599 5617 - Empty directories: in fact Mercurial ignores directories unless
5600 5618 they contain files under source control management
5601 5619
5602 5620 But it will leave untouched:
5603 5621
5604 5622 - Modified and unmodified tracked files
5605 5623 - Ignored files (unless -i or --all is specified)
5606 5624 - New files added to the repository (with :hg:`add`)
5607 5625
5608 5626 The --files and --dirs options can be used to direct purge to delete
5609 5627 only files, only directories, or both. If neither option is given,
5610 5628 both will be deleted.
5611 5629
5612 5630 If directories are given on the command line, only files in these
5613 5631 directories are considered.
5614 5632
5615 5633 Be careful with purge, as you could irreversibly delete some files
5616 5634 you forgot to add to the repository. If you only want to print the
5617 5635 list of files that this program would delete, use the --print
5618 5636 option.
5619 5637 """
5620 5638 opts = pycompat.byteskwargs(opts)
5621 5639 cmdutil.check_at_most_one_arg(opts, b'all', b'ignored')
5622 5640
5623 5641 act = not opts.get(b'print')
5624 5642 eol = b'\n'
5625 5643 if opts.get(b'print0'):
5626 5644 eol = b'\0'
5627 5645 act = False # --print0 implies --print
5628 5646 if opts.get(b'all', False):
5629 5647 ignored = True
5630 5648 unknown = True
5631 5649 else:
5632 5650 ignored = opts.get(b'ignored', False)
5633 5651 unknown = not ignored
5634 5652
5635 5653 removefiles = opts.get(b'files')
5636 5654 removedirs = opts.get(b'dirs')
5637 5655 confirm = opts.get(b'confirm')
5638 5656 if confirm is None:
5639 5657 try:
5640 5658 extensions.find(b'purge')
5641 5659 confirm = False
5642 5660 except KeyError:
5643 5661 confirm = True
5644 5662
5645 5663 if not removefiles and not removedirs:
5646 5664 removefiles = True
5647 5665 removedirs = True
5648 5666
5649 5667 match = scmutil.match(repo[None], dirs, opts)
5650 5668
5651 5669 paths = mergemod.purge(
5652 5670 repo,
5653 5671 match,
5654 5672 unknown=unknown,
5655 5673 ignored=ignored,
5656 5674 removeemptydirs=removedirs,
5657 5675 removefiles=removefiles,
5658 5676 abortonerror=opts.get(b'abort_on_err'),
5659 5677 noop=not act,
5660 5678 confirm=confirm,
5661 5679 )
5662 5680
5663 5681 for path in paths:
5664 5682 if not act:
5665 5683 ui.write(b'%s%s' % (path, eol))
5666 5684
5667 5685
5668 5686 @command(
5669 5687 b'push',
5670 5688 [
5671 5689 (b'f', b'force', None, _(b'force push')),
5672 5690 (
5673 5691 b'r',
5674 5692 b'rev',
5675 5693 [],
5676 5694 _(b'a changeset intended to be included in the destination'),
5677 5695 _(b'REV'),
5678 5696 ),
5679 5697 (b'B', b'bookmark', [], _(b"bookmark to push"), _(b'BOOKMARK')),
5680 5698 (b'', b'all-bookmarks', None, _(b"push all bookmarks (EXPERIMENTAL)")),
5681 5699 (
5682 5700 b'b',
5683 5701 b'branch',
5684 5702 [],
5685 5703 _(b'a specific branch you would like to push'),
5686 5704 _(b'BRANCH'),
5687 5705 ),
5688 5706 (b'', b'new-branch', False, _(b'allow pushing a new branch')),
5689 5707 (
5690 5708 b'',
5691 5709 b'pushvars',
5692 5710 [],
5693 5711 _(b'variables that can be sent to server (ADVANCED)'),
5694 5712 ),
5695 5713 (
5696 5714 b'',
5697 5715 b'publish',
5698 5716 False,
5699 5717 _(b'push the changeset as public (EXPERIMENTAL)'),
5700 5718 ),
5701 5719 ]
5702 5720 + remoteopts,
5703 5721 _(b'[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]...'),
5704 5722 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5705 5723 helpbasic=True,
5706 5724 )
5707 5725 def push(ui, repo, *dests, **opts):
5708 5726 """push changes to the specified destination
5709 5727
5710 5728 Push changesets from the local repository to the specified
5711 5729 destination.
5712 5730
5713 5731 This operation is symmetrical to pull: it is identical to a pull
5714 5732 in the destination repository from the current one.
5715 5733
5716 5734 By default, push will not allow creation of new heads at the
5717 5735 destination, since multiple heads would make it unclear which head
5718 5736 to use. In this situation, it is recommended to pull and merge
5719 5737 before pushing.
5720 5738
5721 5739 Use --new-branch if you want to allow push to create a new named
5722 5740 branch that is not present at the destination. This allows you to
5723 5741 only create a new branch without forcing other changes.
5724 5742
5725 5743 .. note::
5726 5744
5727 5745 Extra care should be taken with the -f/--force option,
5728 5746 which will push all new heads on all branches, an action which will
5729 5747 almost always cause confusion for collaborators.
5730 5748
5731 5749 If -r/--rev is used, the specified revision and all its ancestors
5732 5750 will be pushed to the remote repository.
5733 5751
5734 5752 If -B/--bookmark is used, the specified bookmarked revision, its
5735 5753 ancestors, and the bookmark will be pushed to the remote
5736 5754 repository. Specifying ``.`` is equivalent to specifying the active
5737 5755 bookmark's name. Use the --all-bookmarks option for pushing all
5738 5756 current bookmarks.
5739 5757
5740 5758 Please see :hg:`help urls` for important details about ``ssh://``
5741 5759 URLs. If DESTINATION is omitted, a default path will be used.
5742 5760
5743 5761 When passed multiple destinations, push will process them one after the
5744 5762 other, but stop should an error occur.
5745 5763
5746 5764 .. container:: verbose
5747 5765
5748 5766 The --pushvars option sends strings to the server that become
5749 5767 environment variables prepended with ``HG_USERVAR_``. For example,
5750 5768 ``--pushvars ENABLE_FEATURE=true``, provides the server side hooks with
5751 5769 ``HG_USERVAR_ENABLE_FEATURE=true`` as part of their environment.
5752 5770
5753 5771 pushvars can provide for user-overridable hooks as well as set debug
5754 5772 levels. One example is having a hook that blocks commits containing
5755 5773 conflict markers, but enables the user to override the hook if the file
5756 5774 is using conflict markers for testing purposes or the file format has
5757 5775 strings that look like conflict markers.
5758 5776
5759 5777 By default, servers will ignore `--pushvars`. To enable it add the
5760 5778 following to your configuration file::
5761 5779
5762 5780 [push]
5763 5781 pushvars.server = true
5764 5782
5765 5783 Returns 0 if push was successful, 1 if nothing to push.
5766 5784 """
5767 5785
5768 5786 opts = pycompat.byteskwargs(opts)
5769 5787
5770 5788 if opts.get(b'all_bookmarks'):
5771 5789 cmdutil.check_incompatible_arguments(
5772 5790 opts,
5773 5791 b'all_bookmarks',
5774 5792 [b'bookmark', b'rev'],
5775 5793 )
5776 5794 opts[b'bookmark'] = list(repo._bookmarks)
5777 5795
5778 5796 if opts.get(b'bookmark'):
5779 5797 ui.setconfig(b'bookmarks', b'pushing', opts[b'bookmark'], b'push')
5780 5798 for b in opts[b'bookmark']:
5781 5799 # translate -B options to -r so changesets get pushed
5782 5800 b = repo._bookmarks.expandname(b)
5783 5801 if b in repo._bookmarks:
5784 5802 opts.setdefault(b'rev', []).append(b)
5785 5803 else:
5786 5804 # if we try to push a deleted bookmark, translate it to null
5787 5805 # this lets simultaneous -r, -b options continue working
5788 5806 opts.setdefault(b'rev', []).append(b"null")
5789 5807
5790 5808 some_pushed = False
5791 5809 result = 0
5792 5810 for path in urlutil.get_push_paths(repo, ui, dests):
5793 5811 dest = path.loc
5794 5812 branches = (path.branch, opts.get(b'branch') or [])
5795 5813 ui.status(_(b'pushing to %s\n') % urlutil.hidepassword(dest))
5796 5814 revs, checkout = hg.addbranchrevs(
5797 5815 repo, repo, branches, opts.get(b'rev')
5798 5816 )
5799 5817 other = hg.peer(repo, opts, dest)
5800 5818
5801 5819 try:
5802 5820 if revs:
5803 5821 revs = [repo[r].node() for r in logcmdutil.revrange(repo, revs)]
5804 5822 if not revs:
5805 5823 raise error.InputError(
5806 5824 _(b"specified revisions evaluate to an empty set"),
5807 5825 hint=_(b"use different revision arguments"),
5808 5826 )
5809 5827 elif path.pushrev:
5810 5828 # It doesn't make any sense to specify ancestor revisions. So limit
5811 5829 # to DAG heads to make discovery simpler.
5812 5830 expr = revsetlang.formatspec(b'heads(%r)', path.pushrev)
5813 5831 revs = scmutil.revrange(repo, [expr])
5814 5832 revs = [repo[rev].node() for rev in revs]
5815 5833 if not revs:
5816 5834 raise error.InputError(
5817 5835 _(
5818 5836 b'default push revset for path evaluates to an empty set'
5819 5837 )
5820 5838 )
5821 5839 elif ui.configbool(b'commands', b'push.require-revs'):
5822 5840 raise error.InputError(
5823 5841 _(b'no revisions specified to push'),
5824 5842 hint=_(b'did you mean "hg push -r ."?'),
5825 5843 )
5826 5844
5827 5845 repo._subtoppath = dest
5828 5846 try:
5829 5847 # push subrepos depth-first for coherent ordering
5830 5848 c = repo[b'.']
5831 5849 subs = c.substate # only repos that are committed
5832 5850 for s in sorted(subs):
5833 5851 sub_result = c.sub(s).push(opts)
5834 5852 if sub_result == 0:
5835 5853 return 1
5836 5854 finally:
5837 5855 del repo._subtoppath
5838 5856
5839 5857 opargs = dict(
5840 5858 opts.get(b'opargs', {})
5841 5859 ) # copy opargs since we may mutate it
5842 5860 opargs.setdefault(b'pushvars', []).extend(opts.get(b'pushvars', []))
5843 5861
5844 5862 pushop = exchange.push(
5845 5863 repo,
5846 5864 other,
5847 5865 opts.get(b'force'),
5848 5866 revs=revs,
5849 5867 newbranch=opts.get(b'new_branch'),
5850 5868 bookmarks=opts.get(b'bookmark', ()),
5851 5869 publish=opts.get(b'publish'),
5852 5870 opargs=opargs,
5853 5871 )
5854 5872
5855 5873 if pushop.cgresult == 0:
5856 5874 result = 1
5857 5875 elif pushop.cgresult is not None:
5858 5876 some_pushed = True
5859 5877
5860 5878 if pushop.bkresult is not None:
5861 5879 if pushop.bkresult == 2:
5862 5880 result = 2
5863 5881 elif not result and pushop.bkresult:
5864 5882 result = 2
5865 5883
5866 5884 if result:
5867 5885 break
5868 5886
5869 5887 finally:
5870 5888 other.close()
5871 5889 if result == 0 and not some_pushed:
5872 5890 result = 1
5873 5891 return result
5874 5892
5875 5893
5876 5894 @command(
5877 5895 b'recover',
5878 5896 [
5879 5897 (b'', b'verify', False, b"run `hg verify` after successful recover"),
5880 5898 ],
5881 5899 helpcategory=command.CATEGORY_MAINTENANCE,
5882 5900 )
5883 5901 def recover(ui, repo, **opts):
5884 5902 """roll back an interrupted transaction
5885 5903
5886 5904 Recover from an interrupted commit or pull.
5887 5905
5888 5906 This command tries to fix the repository status after an
5889 5907 interrupted operation. It should only be necessary when Mercurial
5890 5908 suggests it.
5891 5909
5892 5910 Returns 0 if successful, 1 if nothing to recover or verify fails.
5893 5911 """
5894 5912 ret = repo.recover()
5895 5913 if ret:
5896 5914 if opts['verify']:
5897 5915 return hg.verify(repo)
5898 5916 else:
5899 5917 msg = _(
5900 5918 b"(verify step skipped, run `hg verify` to check your "
5901 5919 b"repository content)\n"
5902 5920 )
5903 5921 ui.warn(msg)
5904 5922 return 0
5905 5923 return 1
5906 5924
5907 5925
5908 5926 @command(
5909 5927 b'remove|rm',
5910 5928 [
5911 5929 (b'A', b'after', None, _(b'record delete for missing files')),
5912 5930 (b'f', b'force', None, _(b'forget added files, delete modified files')),
5913 5931 ]
5914 5932 + subrepoopts
5915 5933 + walkopts
5916 5934 + dryrunopts,
5917 5935 _(b'[OPTION]... FILE...'),
5918 5936 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5919 5937 helpbasic=True,
5920 5938 inferrepo=True,
5921 5939 )
5922 5940 def remove(ui, repo, *pats, **opts):
5923 5941 """remove the specified files on the next commit
5924 5942
5925 5943 Schedule the indicated files for removal from the current branch.
5926 5944
5927 5945 This command schedules the files to be removed at the next commit.
5928 5946 To undo a remove before that, see :hg:`revert`. To undo added
5929 5947 files, see :hg:`forget`.
5930 5948
5931 5949 .. container:: verbose
5932 5950
5933 5951 -A/--after can be used to remove only files that have already
5934 5952 been deleted, -f/--force can be used to force deletion, and -Af
5935 5953 can be used to remove files from the next revision without
5936 5954 deleting them from the working directory.
5937 5955
5938 5956 The following table details the behavior of remove for different
5939 5957 file states (columns) and option combinations (rows). The file
5940 5958 states are Added [A], Clean [C], Modified [M] and Missing [!]
5941 5959 (as reported by :hg:`status`). The actions are Warn, Remove
5942 5960 (from branch) and Delete (from disk):
5943 5961
5944 5962 ========= == == == ==
5945 5963 opt/state A C M !
5946 5964 ========= == == == ==
5947 5965 none W RD W R
5948 5966 -f R RD RD R
5949 5967 -A W W W R
5950 5968 -Af R R R R
5951 5969 ========= == == == ==
5952 5970
5953 5971 .. note::
5954 5972
5955 5973 :hg:`remove` never deletes files in Added [A] state from the
5956 5974 working directory, not even if ``--force`` is specified.
5957 5975
5958 5976 Returns 0 on success, 1 if any warnings encountered.
5959 5977 """
5960 5978
5961 5979 opts = pycompat.byteskwargs(opts)
5962 5980 after, force = opts.get(b'after'), opts.get(b'force')
5963 5981 dryrun = opts.get(b'dry_run')
5964 5982 if not pats and not after:
5965 5983 raise error.InputError(_(b'no files specified'))
5966 5984
5967 5985 with repo.wlock(), repo.dirstate.changing_files(repo):
5968 5986 m = scmutil.match(repo[None], pats, opts)
5969 5987 subrepos = opts.get(b'subrepos')
5970 5988 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
5971 5989 return cmdutil.remove(
5972 5990 ui, repo, m, b"", uipathfn, after, force, subrepos, dryrun=dryrun
5973 5991 )
5974 5992
5975 5993
5976 5994 @command(
5977 5995 b'rename|move|mv',
5978 5996 [
5979 5997 (b'', b'forget', None, _(b'unmark a destination file as renamed')),
5980 5998 (b'A', b'after', None, _(b'record a rename that has already occurred')),
5981 5999 (
5982 6000 b'',
5983 6001 b'at-rev',
5984 6002 b'',
5985 6003 _(b'(un)mark renames in the given revision (EXPERIMENTAL)'),
5986 6004 _(b'REV'),
5987 6005 ),
5988 6006 (
5989 6007 b'f',
5990 6008 b'force',
5991 6009 None,
5992 6010 _(b'forcibly move over an existing managed file'),
5993 6011 ),
5994 6012 ]
5995 6013 + walkopts
5996 6014 + dryrunopts,
5997 6015 _(b'[OPTION]... SOURCE... DEST'),
5998 6016 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5999 6017 )
6000 6018 def rename(ui, repo, *pats, **opts):
6001 6019 """rename files; equivalent of copy + remove
6002 6020
6003 6021 Mark dest as copies of sources; mark sources for deletion. If dest
6004 6022 is a directory, copies are put in that directory. If dest is a
6005 6023 file, there can only be one source.
6006 6024
6007 6025 By default, this command copies the contents of files as they
6008 6026 exist in the working directory. If invoked with -A/--after, the
6009 6027 operation is recorded, but no copying is performed.
6010 6028
6011 6029 To undo marking a destination file as renamed, use --forget. With that
6012 6030 option, all given (positional) arguments are unmarked as renames. The
6013 6031 destination file(s) will be left in place (still tracked). The source
6014 6032 file(s) will not be restored. Note that :hg:`rename --forget` behaves
6015 6033 the same way as :hg:`copy --forget`.
6016 6034
6017 6035 This command takes effect with the next commit by default.
6018 6036
6019 6037 Returns 0 on success, 1 if errors are encountered.
6020 6038 """
6021 6039 opts = pycompat.byteskwargs(opts)
6022 6040 context = lambda repo: repo.dirstate.changing_files(repo)
6023 6041 rev = opts.get(b'at_rev')
6024 6042 ctx = None
6025 6043 if rev:
6026 6044 ctx = logcmdutil.revsingle(repo, rev)
6027 6045 if ctx.rev() is not None:
6028 6046
6029 6047 def context(repo):
6030 6048 return util.nullcontextmanager()
6031 6049
6032 6050 opts[b'at_rev'] = ctx.rev()
6033 6051 with repo.wlock(), context(repo):
6034 6052 return cmdutil.copy(ui, repo, pats, opts, rename=True)
6035 6053
6036 6054
6037 6055 @command(
6038 6056 b'resolve',
6039 6057 [
6040 6058 (b'a', b'all', None, _(b'select all unresolved files')),
6041 6059 (b'l', b'list', None, _(b'list state of files needing merge')),
6042 6060 (b'm', b'mark', None, _(b'mark files as resolved')),
6043 6061 (b'u', b'unmark', None, _(b'mark files as unresolved')),
6044 6062 (b'n', b'no-status', None, _(b'hide status prefix')),
6045 6063 (b'', b're-merge', None, _(b're-merge files')),
6046 6064 ]
6047 6065 + mergetoolopts
6048 6066 + walkopts
6049 6067 + formatteropts,
6050 6068 _(b'[OPTION]... [FILE]...'),
6051 6069 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6052 6070 inferrepo=True,
6053 6071 )
6054 6072 def resolve(ui, repo, *pats, **opts):
6055 6073 """redo merges or set/view the merge status of files
6056 6074
6057 6075 Merges with unresolved conflicts are often the result of
6058 6076 non-interactive merging using the ``internal:merge`` configuration
6059 6077 setting, or a command-line merge tool like ``diff3``. The resolve
6060 6078 command is used to manage the files involved in a merge, after
6061 6079 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
6062 6080 working directory must have two parents). See :hg:`help
6063 6081 merge-tools` for information on configuring merge tools.
6064 6082
6065 6083 The resolve command can be used in the following ways:
6066 6084
6067 6085 - :hg:`resolve [--re-merge] [--tool TOOL] FILE...`: attempt to re-merge
6068 6086 the specified files, discarding any previous merge attempts. Re-merging
6069 6087 is not performed for files already marked as resolved. Use ``--all/-a``
6070 6088 to select all unresolved files. ``--tool`` can be used to specify
6071 6089 the merge tool used for the given files. It overrides the HGMERGE
6072 6090 environment variable and your configuration files. Previous file
6073 6091 contents are saved with a ``.orig`` suffix.
6074 6092
6075 6093 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
6076 6094 (e.g. after having manually fixed-up the files). The default is
6077 6095 to mark all unresolved files.
6078 6096
6079 6097 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
6080 6098 default is to mark all resolved files.
6081 6099
6082 6100 - :hg:`resolve -l`: list files which had or still have conflicts.
6083 6101 In the printed list, ``U`` = unresolved and ``R`` = resolved.
6084 6102 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
6085 6103 the list. See :hg:`help filesets` for details.
6086 6104
6087 6105 .. note::
6088 6106
6089 6107 Mercurial will not let you commit files with unresolved merge
6090 6108 conflicts. You must use :hg:`resolve -m ...` before you can
6091 6109 commit after a conflicting merge.
6092 6110
6093 6111 .. container:: verbose
6094 6112
6095 6113 Template:
6096 6114
6097 6115 The following keywords are supported in addition to the common template
6098 6116 keywords and functions. See also :hg:`help templates`.
6099 6117
6100 6118 :mergestatus: String. Character denoting merge conflicts, ``U`` or ``R``.
6101 6119 :path: String. Repository-absolute path of the file.
6102 6120
6103 6121 Returns 0 on success, 1 if any files fail a resolve attempt.
6104 6122 """
6105 6123
6106 6124 opts = pycompat.byteskwargs(opts)
6107 6125 confirm = ui.configbool(b'commands', b'resolve.confirm')
6108 6126 flaglist = b'all mark unmark list no_status re_merge'.split()
6109 6127 all, mark, unmark, show, nostatus, remerge = [opts.get(o) for o in flaglist]
6110 6128
6111 6129 actioncount = len(list(filter(None, [show, mark, unmark, remerge])))
6112 6130 if actioncount > 1:
6113 6131 raise error.InputError(_(b"too many actions specified"))
6114 6132 elif actioncount == 0 and ui.configbool(
6115 6133 b'commands', b'resolve.explicit-re-merge'
6116 6134 ):
6117 6135 hint = _(b'use --mark, --unmark, --list or --re-merge')
6118 6136 raise error.InputError(_(b'no action specified'), hint=hint)
6119 6137 if pats and all:
6120 6138 raise error.InputError(_(b"can't specify --all and patterns"))
6121 6139 if not (all or pats or show or mark or unmark):
6122 6140 raise error.InputError(
6123 6141 _(b'no files or directories specified'),
6124 6142 hint=b'use --all to re-merge all unresolved files',
6125 6143 )
6126 6144
6127 6145 if confirm:
6128 6146 if all:
6129 6147 if ui.promptchoice(
6130 6148 _(b're-merge all unresolved files (yn)?$$ &Yes $$ &No')
6131 6149 ):
6132 6150 raise error.CanceledError(_(b'user quit'))
6133 6151 if mark and not pats:
6134 6152 if ui.promptchoice(
6135 6153 _(
6136 6154 b'mark all unresolved files as resolved (yn)?'
6137 6155 b'$$ &Yes $$ &No'
6138 6156 )
6139 6157 ):
6140 6158 raise error.CanceledError(_(b'user quit'))
6141 6159 if unmark and not pats:
6142 6160 if ui.promptchoice(
6143 6161 _(
6144 6162 b'mark all resolved files as unresolved (yn)?'
6145 6163 b'$$ &Yes $$ &No'
6146 6164 )
6147 6165 ):
6148 6166 raise error.CanceledError(_(b'user quit'))
6149 6167
6150 6168 uipathfn = scmutil.getuipathfn(repo)
6151 6169
6152 6170 if show:
6153 6171 ui.pager(b'resolve')
6154 6172 fm = ui.formatter(b'resolve', opts)
6155 6173 ms = mergestatemod.mergestate.read(repo)
6156 6174 wctx = repo[None]
6157 6175 m = scmutil.match(wctx, pats, opts)
6158 6176
6159 6177 # Labels and keys based on merge state. Unresolved path conflicts show
6160 6178 # as 'P'. Resolved path conflicts show as 'R', the same as normal
6161 6179 # resolved conflicts.
6162 6180 mergestateinfo = {
6163 6181 mergestatemod.MERGE_RECORD_UNRESOLVED: (
6164 6182 b'resolve.unresolved',
6165 6183 b'U',
6166 6184 ),
6167 6185 mergestatemod.MERGE_RECORD_RESOLVED: (b'resolve.resolved', b'R'),
6168 6186 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH: (
6169 6187 b'resolve.unresolved',
6170 6188 b'P',
6171 6189 ),
6172 6190 mergestatemod.MERGE_RECORD_RESOLVED_PATH: (
6173 6191 b'resolve.resolved',
6174 6192 b'R',
6175 6193 ),
6176 6194 }
6177 6195
6178 6196 for f in ms:
6179 6197 if not m(f):
6180 6198 continue
6181 6199
6182 6200 label, key = mergestateinfo[ms[f]]
6183 6201 fm.startitem()
6184 6202 fm.context(ctx=wctx)
6185 6203 fm.condwrite(not nostatus, b'mergestatus', b'%s ', key, label=label)
6186 6204 fm.data(path=f)
6187 6205 fm.plain(b'%s\n' % uipathfn(f), label=label)
6188 6206 fm.end()
6189 6207 return 0
6190 6208
6191 6209 with repo.wlock():
6192 6210 ms = mergestatemod.mergestate.read(repo)
6193 6211
6194 6212 if not (ms.active() or repo.dirstate.p2() != repo.nullid):
6195 6213 raise error.StateError(
6196 6214 _(b'resolve command not applicable when not merging')
6197 6215 )
6198 6216
6199 6217 wctx = repo[None]
6200 6218 m = scmutil.match(wctx, pats, opts)
6201 6219 ret = 0
6202 6220 didwork = False
6203 6221
6204 6222 hasconflictmarkers = []
6205 6223 if mark:
6206 6224 markcheck = ui.config(b'commands', b'resolve.mark-check')
6207 6225 if markcheck not in [b'warn', b'abort']:
6208 6226 # Treat all invalid / unrecognized values as 'none'.
6209 6227 markcheck = False
6210 6228 for f in ms:
6211 6229 if not m(f):
6212 6230 continue
6213 6231
6214 6232 didwork = True
6215 6233
6216 6234 # path conflicts must be resolved manually
6217 6235 if ms[f] in (
6218 6236 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
6219 6237 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
6220 6238 ):
6221 6239 if mark:
6222 6240 ms.mark(f, mergestatemod.MERGE_RECORD_RESOLVED_PATH)
6223 6241 elif unmark:
6224 6242 ms.mark(f, mergestatemod.MERGE_RECORD_UNRESOLVED_PATH)
6225 6243 elif ms[f] == mergestatemod.MERGE_RECORD_UNRESOLVED_PATH:
6226 6244 ui.warn(
6227 6245 _(b'%s: path conflict must be resolved manually\n')
6228 6246 % uipathfn(f)
6229 6247 )
6230 6248 continue
6231 6249
6232 6250 if mark:
6233 6251 if markcheck:
6234 6252 fdata = repo.wvfs.tryread(f)
6235 6253 if (
6236 6254 filemerge.hasconflictmarkers(fdata)
6237 6255 and ms[f] != mergestatemod.MERGE_RECORD_RESOLVED
6238 6256 ):
6239 6257 hasconflictmarkers.append(f)
6240 6258 ms.mark(f, mergestatemod.MERGE_RECORD_RESOLVED)
6241 6259 elif unmark:
6242 6260 ms.mark(f, mergestatemod.MERGE_RECORD_UNRESOLVED)
6243 6261 else:
6244 6262 # backup pre-resolve (merge uses .orig for its own purposes)
6245 6263 a = repo.wjoin(f)
6246 6264 try:
6247 6265 util.copyfile(a, a + b".resolve")
6248 6266 except FileNotFoundError:
6249 6267 pass
6250 6268
6251 6269 try:
6252 6270 # preresolve file
6253 6271 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
6254 6272 with ui.configoverride(overrides, b'resolve'):
6255 6273 r = ms.resolve(f, wctx)
6256 6274 if r:
6257 6275 ret = 1
6258 6276 finally:
6259 6277 ms.commit()
6260 6278
6261 6279 # replace filemerge's .orig file with our resolve file
6262 6280 try:
6263 6281 util.rename(
6264 6282 a + b".resolve", scmutil.backuppath(ui, repo, f)
6265 6283 )
6266 6284 except FileNotFoundError:
6267 6285 pass
6268 6286
6269 6287 if hasconflictmarkers:
6270 6288 ui.warn(
6271 6289 _(
6272 6290 b'warning: the following files still have conflict '
6273 6291 b'markers:\n'
6274 6292 )
6275 6293 + b''.join(
6276 6294 b' ' + uipathfn(f) + b'\n' for f in hasconflictmarkers
6277 6295 )
6278 6296 )
6279 6297 if markcheck == b'abort' and not all and not pats:
6280 6298 raise error.StateError(
6281 6299 _(b'conflict markers detected'),
6282 6300 hint=_(b'use --all to mark anyway'),
6283 6301 )
6284 6302
6285 6303 ms.commit()
6286 6304 branchmerge = repo.dirstate.p2() != repo.nullid
6287 6305 # resolve is not doing a parent change here, however, `record updates`
6288 6306 # will call some dirstate API that at intended for parent changes call.
6289 6307 # Ideally we would not need this and could implement a lighter version
6290 6308 # of the recordupdateslogic that will not have to deal with the part
6291 6309 # related to parent changes. However this would requires that:
6292 6310 # - we are sure we passed around enough information at update/merge
6293 6311 # time to no longer needs it at `hg resolve time`
6294 6312 # - we are sure we store that information well enough to be able to reuse it
6295 6313 # - we are the necessary logic to reuse it right.
6296 6314 #
6297 6315 # All this should eventually happens, but in the mean time, we use this
6298 6316 # context manager slightly out of the context it should be.
6299 6317 with repo.dirstate.changing_parents(repo):
6300 6318 mergestatemod.recordupdates(repo, ms.actions(), branchmerge, None)
6301 6319
6302 6320 if not didwork and pats:
6303 6321 hint = None
6304 6322 if not any([p for p in pats if p.find(b':') >= 0]):
6305 6323 pats = [b'path:%s' % p for p in pats]
6306 6324 m = scmutil.match(wctx, pats, opts)
6307 6325 for f in ms:
6308 6326 if not m(f):
6309 6327 continue
6310 6328
6311 6329 def flag(o):
6312 6330 if o == b're_merge':
6313 6331 return b'--re-merge '
6314 6332 return b'-%s ' % o[0:1]
6315 6333
6316 6334 flags = b''.join([flag(o) for o in flaglist if opts.get(o)])
6317 6335 hint = _(b"(try: hg resolve %s%s)\n") % (
6318 6336 flags,
6319 6337 b' '.join(pats),
6320 6338 )
6321 6339 break
6322 6340 ui.warn(_(b"arguments do not match paths that need resolving\n"))
6323 6341 if hint:
6324 6342 ui.warn(hint)
6325 6343
6326 6344 unresolvedf = ms.unresolvedcount()
6327 6345 if not unresolvedf:
6328 6346 ui.status(_(b'(no more unresolved files)\n'))
6329 6347 cmdutil.checkafterresolved(repo)
6330 6348
6331 6349 return ret
6332 6350
6333 6351
6334 6352 @command(
6335 6353 b'revert',
6336 6354 [
6337 6355 (b'a', b'all', None, _(b'revert all changes when no arguments given')),
6338 6356 (b'd', b'date', b'', _(b'tipmost revision matching date'), _(b'DATE')),
6339 6357 (b'r', b'rev', b'', _(b'revert to the specified revision'), _(b'REV')),
6340 6358 (b'C', b'no-backup', None, _(b'do not save backup copies of files')),
6341 6359 (b'i', b'interactive', None, _(b'interactively select the changes')),
6342 6360 ]
6343 6361 + walkopts
6344 6362 + dryrunopts,
6345 6363 _(b'[OPTION]... [-r REV] [NAME]...'),
6346 6364 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6347 6365 )
6348 6366 def revert(ui, repo, *pats, **opts):
6349 6367 """restore files to their checkout state
6350 6368
6351 6369 .. note::
6352 6370
6353 6371 To check out earlier revisions, you should use :hg:`update REV`.
6354 6372 To cancel an uncommitted merge (and lose your changes),
6355 6373 use :hg:`merge --abort`.
6356 6374
6357 6375 With no revision specified, revert the specified files or directories
6358 6376 to the contents they had in the parent of the working directory.
6359 6377 This restores the contents of files to an unmodified
6360 6378 state and unschedules adds, removes, copies, and renames. If the
6361 6379 working directory has two parents, you must explicitly specify a
6362 6380 revision.
6363 6381
6364 6382 Using the -r/--rev or -d/--date options, revert the given files or
6365 6383 directories to their states as of a specific revision. Because
6366 6384 revert does not change the working directory parents, this will
6367 6385 cause these files to appear modified. This can be helpful to "back
6368 6386 out" some or all of an earlier change. See :hg:`backout` for a
6369 6387 related method.
6370 6388
6371 6389 Modified files are saved with a .orig suffix before reverting.
6372 6390 To disable these backups, use --no-backup. It is possible to store
6373 6391 the backup files in a custom directory relative to the root of the
6374 6392 repository by setting the ``ui.origbackuppath`` configuration
6375 6393 option.
6376 6394
6377 6395 See :hg:`help dates` for a list of formats valid for -d/--date.
6378 6396
6379 6397 See :hg:`help backout` for a way to reverse the effect of an
6380 6398 earlier changeset.
6381 6399
6382 6400 Returns 0 on success.
6383 6401 """
6384 6402
6385 6403 opts = pycompat.byteskwargs(opts)
6386 6404 if opts.get(b"date"):
6387 6405 cmdutil.check_incompatible_arguments(opts, b'date', [b'rev'])
6388 6406 opts[b"rev"] = cmdutil.finddate(ui, repo, opts[b"date"])
6389 6407
6390 6408 parent, p2 = repo.dirstate.parents()
6391 6409 if not opts.get(b'rev') and p2 != repo.nullid:
6392 6410 # revert after merge is a trap for new users (issue2915)
6393 6411 raise error.InputError(
6394 6412 _(b'uncommitted merge with no revision specified'),
6395 6413 hint=_(b"use 'hg update' or see 'hg help revert'"),
6396 6414 )
6397 6415
6398 6416 rev = opts.get(b'rev')
6399 6417 if rev:
6400 6418 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
6401 6419 ctx = logcmdutil.revsingle(repo, rev)
6402 6420
6403 6421 if not (
6404 6422 pats
6405 6423 or opts.get(b'include')
6406 6424 or opts.get(b'exclude')
6407 6425 or opts.get(b'all')
6408 6426 or opts.get(b'interactive')
6409 6427 ):
6410 6428 msg = _(b"no files or directories specified")
6411 6429 if p2 != repo.nullid:
6412 6430 hint = _(
6413 6431 b"uncommitted merge, use --all to discard all changes,"
6414 6432 b" or 'hg update -C .' to abort the merge"
6415 6433 )
6416 6434 raise error.InputError(msg, hint=hint)
6417 6435 dirty = any(repo.status())
6418 6436 node = ctx.node()
6419 6437 if node != parent:
6420 6438 if dirty:
6421 6439 hint = (
6422 6440 _(
6423 6441 b"uncommitted changes, use --all to discard all"
6424 6442 b" changes, or 'hg update %d' to update"
6425 6443 )
6426 6444 % ctx.rev()
6427 6445 )
6428 6446 else:
6429 6447 hint = (
6430 6448 _(
6431 6449 b"use --all to revert all files,"
6432 6450 b" or 'hg update %d' to update"
6433 6451 )
6434 6452 % ctx.rev()
6435 6453 )
6436 6454 elif dirty:
6437 6455 hint = _(b"uncommitted changes, use --all to discard all changes")
6438 6456 else:
6439 6457 hint = _(b"use --all to revert all files")
6440 6458 raise error.InputError(msg, hint=hint)
6441 6459
6442 6460 return cmdutil.revert(ui, repo, ctx, *pats, **pycompat.strkwargs(opts))
6443 6461
6444 6462
6445 6463 @command(
6446 6464 b'rollback',
6447 6465 dryrunopts + [(b'f', b'force', False, _(b'ignore safety measures'))],
6448 6466 helpcategory=command.CATEGORY_MAINTENANCE,
6449 6467 )
6450 6468 def rollback(ui, repo, **opts):
6451 6469 """roll back the last transaction (DANGEROUS) (DEPRECATED)
6452 6470
6453 6471 Please use :hg:`commit --amend` instead of rollback to correct
6454 6472 mistakes in the last commit.
6455 6473
6456 6474 This command should be used with care. There is only one level of
6457 6475 rollback, and there is no way to undo a rollback. It will also
6458 6476 restore the dirstate at the time of the last transaction, losing
6459 6477 any dirstate changes since that time. This command does not alter
6460 6478 the working directory.
6461 6479
6462 6480 Transactions are used to encapsulate the effects of all commands
6463 6481 that create new changesets or propagate existing changesets into a
6464 6482 repository.
6465 6483
6466 6484 .. container:: verbose
6467 6485
6468 6486 For example, the following commands are transactional, and their
6469 6487 effects can be rolled back:
6470 6488
6471 6489 - commit
6472 6490 - import
6473 6491 - pull
6474 6492 - push (with this repository as the destination)
6475 6493 - unbundle
6476 6494
6477 6495 To avoid permanent data loss, rollback will refuse to rollback a
6478 6496 commit transaction if it isn't checked out. Use --force to
6479 6497 override this protection.
6480 6498
6481 6499 The rollback command can be entirely disabled by setting the
6482 6500 ``ui.rollback`` configuration setting to false. If you're here
6483 6501 because you want to use rollback and it's disabled, you can
6484 6502 re-enable the command by setting ``ui.rollback`` to true.
6485 6503
6486 6504 This command is not intended for use on public repositories. Once
6487 6505 changes are visible for pull by other users, rolling a transaction
6488 6506 back locally is ineffective (someone else may already have pulled
6489 6507 the changes). Furthermore, a race is possible with readers of the
6490 6508 repository; for example an in-progress pull from the repository
6491 6509 may fail if a rollback is performed.
6492 6510
6493 6511 Returns 0 on success, 1 if no rollback data is available.
6494 6512 """
6495 6513 if not ui.configbool(b'ui', b'rollback'):
6496 6514 raise error.Abort(
6497 6515 _(b'rollback is disabled because it is unsafe'),
6498 6516 hint=b'see `hg help -v rollback` for information',
6499 6517 )
6500 6518 return repo.rollback(dryrun=opts.get('dry_run'), force=opts.get('force'))
6501 6519
6502 6520
6503 6521 @command(
6504 6522 b'root',
6505 6523 [] + formatteropts,
6506 6524 intents={INTENT_READONLY},
6507 6525 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6508 6526 )
6509 6527 def root(ui, repo, **opts):
6510 6528 """print the root (top) of the current working directory
6511 6529
6512 6530 Print the root directory of the current repository.
6513 6531
6514 6532 .. container:: verbose
6515 6533
6516 6534 Template:
6517 6535
6518 6536 The following keywords are supported in addition to the common template
6519 6537 keywords and functions. See also :hg:`help templates`.
6520 6538
6521 6539 :hgpath: String. Path to the .hg directory.
6522 6540 :storepath: String. Path to the directory holding versioned data.
6523 6541
6524 6542 Returns 0 on success.
6525 6543 """
6526 6544 opts = pycompat.byteskwargs(opts)
6527 6545 with ui.formatter(b'root', opts) as fm:
6528 6546 fm.startitem()
6529 6547 fm.write(b'reporoot', b'%s\n', repo.root)
6530 6548 fm.data(hgpath=repo.path, storepath=repo.spath)
6531 6549
6532 6550
6533 6551 @command(
6534 6552 b'serve',
6535 6553 [
6536 6554 (
6537 6555 b'A',
6538 6556 b'accesslog',
6539 6557 b'',
6540 6558 _(b'name of access log file to write to'),
6541 6559 _(b'FILE'),
6542 6560 ),
6543 6561 (b'd', b'daemon', None, _(b'run server in background')),
6544 6562 (b'', b'daemon-postexec', [], _(b'used internally by daemon mode')),
6545 6563 (
6546 6564 b'E',
6547 6565 b'errorlog',
6548 6566 b'',
6549 6567 _(b'name of error log file to write to'),
6550 6568 _(b'FILE'),
6551 6569 ),
6552 6570 # use string type, then we can check if something was passed
6553 6571 (
6554 6572 b'p',
6555 6573 b'port',
6556 6574 b'',
6557 6575 _(b'port to listen on (default: 8000)'),
6558 6576 _(b'PORT'),
6559 6577 ),
6560 6578 (
6561 6579 b'a',
6562 6580 b'address',
6563 6581 b'',
6564 6582 _(b'address to listen on (default: all interfaces)'),
6565 6583 _(b'ADDR'),
6566 6584 ),
6567 6585 (
6568 6586 b'',
6569 6587 b'prefix',
6570 6588 b'',
6571 6589 _(b'prefix path to serve from (default: server root)'),
6572 6590 _(b'PREFIX'),
6573 6591 ),
6574 6592 (
6575 6593 b'n',
6576 6594 b'name',
6577 6595 b'',
6578 6596 _(b'name to show in web pages (default: working directory)'),
6579 6597 _(b'NAME'),
6580 6598 ),
6581 6599 (
6582 6600 b'',
6583 6601 b'web-conf',
6584 6602 b'',
6585 6603 _(b"name of the hgweb config file (see 'hg help hgweb')"),
6586 6604 _(b'FILE'),
6587 6605 ),
6588 6606 (
6589 6607 b'',
6590 6608 b'webdir-conf',
6591 6609 b'',
6592 6610 _(b'name of the hgweb config file (DEPRECATED)'),
6593 6611 _(b'FILE'),
6594 6612 ),
6595 6613 (
6596 6614 b'',
6597 6615 b'pid-file',
6598 6616 b'',
6599 6617 _(b'name of file to write process ID to'),
6600 6618 _(b'FILE'),
6601 6619 ),
6602 6620 (b'', b'stdio', None, _(b'for remote clients (ADVANCED)')),
6603 6621 (
6604 6622 b'',
6605 6623 b'cmdserver',
6606 6624 b'',
6607 6625 _(b'for remote clients (ADVANCED)'),
6608 6626 _(b'MODE'),
6609 6627 ),
6610 6628 (b't', b'templates', b'', _(b'web templates to use'), _(b'TEMPLATE')),
6611 6629 (b'', b'style', b'', _(b'template style to use'), _(b'STYLE')),
6612 6630 (b'6', b'ipv6', None, _(b'use IPv6 instead of IPv4')),
6613 6631 (b'', b'certificate', b'', _(b'SSL certificate file'), _(b'FILE')),
6614 6632 (b'', b'print-url', None, _(b'start and print only the URL')),
6615 6633 ]
6616 6634 + subrepoopts,
6617 6635 _(b'[OPTION]...'),
6618 6636 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
6619 6637 helpbasic=True,
6620 6638 optionalrepo=True,
6621 6639 )
6622 6640 def serve(ui, repo, **opts):
6623 6641 """start stand-alone webserver
6624 6642
6625 6643 Start a local HTTP repository browser and pull server. You can use
6626 6644 this for ad-hoc sharing and browsing of repositories. It is
6627 6645 recommended to use a real web server to serve a repository for
6628 6646 longer periods of time.
6629 6647
6630 6648 Please note that the server does not implement access control.
6631 6649 This means that, by default, anybody can read from the server and
6632 6650 nobody can write to it by default. Set the ``web.allow-push``
6633 6651 option to ``*`` to allow everybody to push to the server. You
6634 6652 should use a real web server if you need to authenticate users.
6635 6653
6636 6654 By default, the server logs accesses to stdout and errors to
6637 6655 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
6638 6656 files.
6639 6657
6640 6658 To have the server choose a free port number to listen on, specify
6641 6659 a port number of 0; in this case, the server will print the port
6642 6660 number it uses.
6643 6661
6644 6662 Returns 0 on success.
6645 6663 """
6646 6664
6647 6665 cmdutil.check_incompatible_arguments(opts, 'stdio', ['cmdserver'])
6648 6666 opts = pycompat.byteskwargs(opts)
6649 6667 if opts[b"print_url"] and ui.verbose:
6650 6668 raise error.InputError(_(b"cannot use --print-url with --verbose"))
6651 6669
6652 6670 if opts[b"stdio"]:
6653 6671 if repo is None:
6654 6672 raise error.RepoError(
6655 6673 _(b"there is no Mercurial repository here (.hg not found)")
6656 6674 )
6657 6675 s = wireprotoserver.sshserver(ui, repo)
6658 6676 s.serve_forever()
6659 6677 return
6660 6678
6661 6679 service = server.createservice(ui, repo, opts)
6662 6680 return server.runservice(opts, initfn=service.init, runfn=service.run)
6663 6681
6664 6682
6665 6683 @command(
6666 6684 b'shelve',
6667 6685 [
6668 6686 (
6669 6687 b'A',
6670 6688 b'addremove',
6671 6689 None,
6672 6690 _(b'mark new/missing files as added/removed before shelving'),
6673 6691 ),
6674 6692 (b'u', b'unknown', None, _(b'store unknown files in the shelve')),
6675 6693 (b'', b'cleanup', None, _(b'delete all shelved changes')),
6676 6694 (
6677 6695 b'',
6678 6696 b'date',
6679 6697 b'',
6680 6698 _(b'shelve with the specified commit date'),
6681 6699 _(b'DATE'),
6682 6700 ),
6683 6701 (b'd', b'delete', None, _(b'delete the named shelved change(s)')),
6684 6702 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
6685 6703 (
6686 6704 b'k',
6687 6705 b'keep',
6688 6706 False,
6689 6707 _(b'shelve, but keep changes in the working directory'),
6690 6708 ),
6691 6709 (b'l', b'list', None, _(b'list current shelves')),
6692 6710 (b'm', b'message', b'', _(b'use text as shelve message'), _(b'TEXT')),
6693 6711 (
6694 6712 b'n',
6695 6713 b'name',
6696 6714 b'',
6697 6715 _(b'use the given name for the shelved commit'),
6698 6716 _(b'NAME'),
6699 6717 ),
6700 6718 (
6701 6719 b'p',
6702 6720 b'patch',
6703 6721 None,
6704 6722 _(
6705 6723 b'output patches for changes (provide the names of the shelved '
6706 6724 b'changes as positional arguments)'
6707 6725 ),
6708 6726 ),
6709 6727 (b'i', b'interactive', None, _(b'interactive mode')),
6710 6728 (
6711 6729 b'',
6712 6730 b'stat',
6713 6731 None,
6714 6732 _(
6715 6733 b'output diffstat-style summary of changes (provide the names of '
6716 6734 b'the shelved changes as positional arguments)'
6717 6735 ),
6718 6736 ),
6719 6737 ]
6720 6738 + cmdutil.walkopts,
6721 6739 _(b'hg shelve [OPTION]... [FILE]...'),
6722 6740 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6723 6741 )
6724 6742 def shelve(ui, repo, *pats, **opts):
6725 6743 """save and set aside changes from the working directory
6726 6744
6727 6745 Shelving takes files that "hg status" reports as not clean, saves
6728 6746 the modifications to a bundle (a shelved change), and reverts the
6729 6747 files so that their state in the working directory becomes clean.
6730 6748
6731 6749 To restore these changes to the working directory, using "hg
6732 6750 unshelve"; this will work even if you switch to a different
6733 6751 commit.
6734 6752
6735 6753 When no files are specified, "hg shelve" saves all not-clean
6736 6754 files. If specific files or directories are named, only changes to
6737 6755 those files are shelved.
6738 6756
6739 6757 In bare shelve (when no files are specified, without interactive,
6740 6758 include and exclude option), shelving remembers information if the
6741 6759 working directory was on newly created branch, in other words working
6742 6760 directory was on different branch than its first parent. In this
6743 6761 situation unshelving restores branch information to the working directory.
6744 6762
6745 6763 Each shelved change has a name that makes it easier to find later.
6746 6764 The name of a shelved change defaults to being based on the active
6747 6765 bookmark, or if there is no active bookmark, the current named
6748 6766 branch. To specify a different name, use ``--name``.
6749 6767
6750 6768 To see a list of existing shelved changes, use the ``--list``
6751 6769 option. For each shelved change, this will print its name, age,
6752 6770 and description; use ``--patch`` or ``--stat`` for more details.
6753 6771
6754 6772 To delete specific shelved changes, use ``--delete``. To delete
6755 6773 all shelved changes, use ``--cleanup``.
6756 6774 """
6757 6775 opts = pycompat.byteskwargs(opts)
6758 6776 allowables = [
6759 6777 (b'addremove', {b'create'}), # 'create' is pseudo action
6760 6778 (b'unknown', {b'create'}),
6761 6779 (b'cleanup', {b'cleanup'}),
6762 6780 # ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
6763 6781 (b'delete', {b'delete'}),
6764 6782 (b'edit', {b'create'}),
6765 6783 (b'keep', {b'create'}),
6766 6784 (b'list', {b'list'}),
6767 6785 (b'message', {b'create'}),
6768 6786 (b'name', {b'create'}),
6769 6787 (b'patch', {b'patch', b'list'}),
6770 6788 (b'stat', {b'stat', b'list'}),
6771 6789 ]
6772 6790
6773 6791 def checkopt(opt):
6774 6792 if opts.get(opt):
6775 6793 for i, allowable in allowables:
6776 6794 if opts[i] and opt not in allowable:
6777 6795 raise error.InputError(
6778 6796 _(
6779 6797 b"options '--%s' and '--%s' may not be "
6780 6798 b"used together"
6781 6799 )
6782 6800 % (opt, i)
6783 6801 )
6784 6802 return True
6785 6803
6786 6804 if checkopt(b'cleanup'):
6787 6805 if pats:
6788 6806 raise error.InputError(
6789 6807 _(b"cannot specify names when using '--cleanup'")
6790 6808 )
6791 6809 return shelvemod.cleanupcmd(ui, repo)
6792 6810 elif checkopt(b'delete'):
6793 6811 return shelvemod.deletecmd(ui, repo, pats)
6794 6812 elif checkopt(b'list'):
6795 6813 return shelvemod.listcmd(ui, repo, pats, opts)
6796 6814 elif checkopt(b'patch') or checkopt(b'stat'):
6797 6815 return shelvemod.patchcmds(ui, repo, pats, opts)
6798 6816 else:
6799 6817 return shelvemod.createcmd(ui, repo, pats, opts)
6800 6818
6801 6819
6802 6820 _NOTTERSE = b'nothing'
6803 6821
6804 6822
6805 6823 @command(
6806 6824 b'status|st',
6807 6825 [
6808 6826 (b'A', b'all', None, _(b'show status of all files')),
6809 6827 (b'm', b'modified', None, _(b'show only modified files')),
6810 6828 (b'a', b'added', None, _(b'show only added files')),
6811 6829 (b'r', b'removed', None, _(b'show only removed files')),
6812 6830 (b'd', b'deleted', None, _(b'show only missing files')),
6813 6831 (b'c', b'clean', None, _(b'show only files without changes')),
6814 6832 (b'u', b'unknown', None, _(b'show only unknown (not tracked) files')),
6815 6833 (b'i', b'ignored', None, _(b'show only ignored files')),
6816 6834 (b'n', b'no-status', None, _(b'hide status prefix')),
6817 6835 (b't', b'terse', _NOTTERSE, _(b'show the terse output (EXPERIMENTAL)')),
6818 6836 (
6819 6837 b'C',
6820 6838 b'copies',
6821 6839 None,
6822 6840 _(b'show source of copied files (DEFAULT: ui.statuscopies)'),
6823 6841 ),
6824 6842 (
6825 6843 b'0',
6826 6844 b'print0',
6827 6845 None,
6828 6846 _(b'end filenames with NUL, for use with xargs'),
6829 6847 ),
6830 6848 (b'', b'rev', [], _(b'show difference from revision'), _(b'REV')),
6831 6849 (
6832 6850 b'',
6833 6851 b'change',
6834 6852 b'',
6835 6853 _(b'list the changed files of a revision'),
6836 6854 _(b'REV'),
6837 6855 ),
6838 6856 ]
6839 6857 + walkopts
6840 6858 + subrepoopts
6841 6859 + formatteropts,
6842 6860 _(b'[OPTION]... [FILE]...'),
6843 6861 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6844 6862 helpbasic=True,
6845 6863 inferrepo=True,
6846 6864 intents={INTENT_READONLY},
6847 6865 )
6848 6866 def status(ui, repo, *pats, **opts):
6849 6867 """show changed files in the working directory
6850 6868
6851 6869 Show status of files in the repository. If names are given, only
6852 6870 files that match are shown. Files that are clean or ignored or
6853 6871 the source of a copy/move operation, are not listed unless
6854 6872 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
6855 6873 Unless options described with "show only ..." are given, the
6856 6874 options -mardu are used.
6857 6875
6858 6876 Option -q/--quiet hides untracked (unknown and ignored) files
6859 6877 unless explicitly requested with -u/--unknown or -i/--ignored.
6860 6878
6861 6879 .. note::
6862 6880
6863 6881 :hg:`status` may appear to disagree with diff if permissions have
6864 6882 changed or a merge has occurred. The standard diff format does
6865 6883 not report permission changes and diff only reports changes
6866 6884 relative to one merge parent.
6867 6885
6868 6886 If one revision is given, it is used as the base revision.
6869 6887 If two revisions are given, the differences between them are
6870 6888 shown. The --change option can also be used as a shortcut to list
6871 6889 the changed files of a revision from its first parent.
6872 6890
6873 6891 The codes used to show the status of files are::
6874 6892
6875 6893 M = modified
6876 6894 A = added
6877 6895 R = removed
6878 6896 C = clean
6879 6897 ! = missing (deleted by non-hg command, but still tracked)
6880 6898 ? = not tracked
6881 6899 I = ignored
6882 6900 = origin of the previous file (with --copies)
6883 6901
6884 6902 .. container:: verbose
6885 6903
6886 6904 The -t/--terse option abbreviates the output by showing only the directory
6887 6905 name if all the files in it share the same status. The option takes an
6888 6906 argument indicating the statuses to abbreviate: 'm' for 'modified', 'a'
6889 6907 for 'added', 'r' for 'removed', 'd' for 'deleted', 'u' for 'unknown', 'i'
6890 6908 for 'ignored' and 'c' for clean.
6891 6909
6892 6910 It abbreviates only those statuses which are passed. Note that clean and
6893 6911 ignored files are not displayed with '--terse ic' unless the -c/--clean
6894 6912 and -i/--ignored options are also used.
6895 6913
6896 6914 The -v/--verbose option shows information when the repository is in an
6897 6915 unfinished merge, shelve, rebase state etc. You can have this behavior
6898 6916 turned on by default by enabling the ``commands.status.verbose`` option.
6899 6917
6900 6918 You can skip displaying some of these states by setting
6901 6919 ``commands.status.skipstates`` to one or more of: 'bisect', 'graft',
6902 6920 'histedit', 'merge', 'rebase', or 'unshelve'.
6903 6921
6904 6922 Template:
6905 6923
6906 6924 The following keywords are supported in addition to the common template
6907 6925 keywords and functions. See also :hg:`help templates`.
6908 6926
6909 6927 :path: String. Repository-absolute path of the file.
6910 6928 :source: String. Repository-absolute path of the file originated from.
6911 6929 Available if ``--copies`` is specified.
6912 6930 :status: String. Character denoting file's status.
6913 6931
6914 6932 Examples:
6915 6933
6916 6934 - show changes in the working directory relative to a
6917 6935 changeset::
6918 6936
6919 6937 hg status --rev 9353
6920 6938
6921 6939 - show changes in the working directory relative to the
6922 6940 current directory (see :hg:`help patterns` for more information)::
6923 6941
6924 6942 hg status re:
6925 6943
6926 6944 - show all changes including copies in an existing changeset::
6927 6945
6928 6946 hg status --copies --change 9353
6929 6947
6930 6948 - get a NUL separated list of added files, suitable for xargs::
6931 6949
6932 6950 hg status -an0
6933 6951
6934 6952 - show more information about the repository status, abbreviating
6935 6953 added, removed, modified, deleted, and untracked paths::
6936 6954
6937 6955 hg status -v -t mardu
6938 6956
6939 6957 Returns 0 on success.
6940 6958
6941 6959 """
6942 6960
6943 6961 cmdutil.check_at_most_one_arg(opts, 'rev', 'change')
6944 6962 opts = pycompat.byteskwargs(opts)
6945 6963 revs = opts.get(b'rev', [])
6946 6964 change = opts.get(b'change', b'')
6947 6965 terse = opts.get(b'terse', _NOTTERSE)
6948 6966 if terse is _NOTTERSE:
6949 6967 if revs:
6950 6968 terse = b''
6951 6969 else:
6952 6970 terse = ui.config(b'commands', b'status.terse')
6953 6971
6954 6972 if revs and terse:
6955 6973 msg = _(b'cannot use --terse with --rev')
6956 6974 raise error.InputError(msg)
6957 6975 elif change:
6958 6976 repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
6959 6977 ctx2 = logcmdutil.revsingle(repo, change, None)
6960 6978 ctx1 = ctx2.p1()
6961 6979 else:
6962 6980 repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn')
6963 6981 ctx1, ctx2 = logcmdutil.revpair(repo, revs)
6964 6982
6965 6983 forcerelativevalue = None
6966 6984 if ui.hasconfig(b'commands', b'status.relative'):
6967 6985 forcerelativevalue = ui.configbool(b'commands', b'status.relative')
6968 6986 uipathfn = scmutil.getuipathfn(
6969 6987 repo,
6970 6988 legacyrelativevalue=bool(pats),
6971 6989 forcerelativevalue=forcerelativevalue,
6972 6990 )
6973 6991
6974 6992 if opts.get(b'print0'):
6975 6993 end = b'\0'
6976 6994 else:
6977 6995 end = b'\n'
6978 6996 states = b'modified added removed deleted unknown ignored clean'.split()
6979 6997 show = [k for k in states if opts.get(k)]
6980 6998 if opts.get(b'all'):
6981 6999 show += ui.quiet and (states[:4] + [b'clean']) or states
6982 7000
6983 7001 if not show:
6984 7002 if ui.quiet:
6985 7003 show = states[:4]
6986 7004 else:
6987 7005 show = states[:5]
6988 7006
6989 7007 m = scmutil.match(ctx2, pats, opts)
6990 7008 if terse:
6991 7009 # we need to compute clean and unknown to terse
6992 7010 stat = repo.status(
6993 7011 ctx1.node(),
6994 7012 ctx2.node(),
6995 7013 m,
6996 7014 b'ignored' in show or b'i' in terse,
6997 7015 clean=True,
6998 7016 unknown=True,
6999 7017 listsubrepos=opts.get(b'subrepos'),
7000 7018 )
7001 7019
7002 7020 stat = cmdutil.tersedir(stat, terse)
7003 7021 else:
7004 7022 stat = repo.status(
7005 7023 ctx1.node(),
7006 7024 ctx2.node(),
7007 7025 m,
7008 7026 b'ignored' in show,
7009 7027 b'clean' in show,
7010 7028 b'unknown' in show,
7011 7029 opts.get(b'subrepos'),
7012 7030 )
7013 7031
7014 7032 changestates = zip(
7015 7033 states,
7016 7034 pycompat.iterbytestr(b'MAR!?IC'),
7017 7035 [getattr(stat, s.decode('utf8')) for s in states],
7018 7036 )
7019 7037
7020 7038 copy = {}
7021 7039 show_copies = ui.configbool(b'ui', b'statuscopies')
7022 7040 if opts.get(b'copies') is not None:
7023 7041 show_copies = opts.get(b'copies')
7024 7042 show_copies = (show_copies or opts.get(b'all')) and not opts.get(
7025 7043 b'no_status'
7026 7044 )
7027 7045 if show_copies:
7028 7046 copy = copies.pathcopies(ctx1, ctx2, m)
7029 7047
7030 7048 morestatus = None
7031 7049 if (
7032 7050 (ui.verbose or ui.configbool(b'commands', b'status.verbose'))
7033 7051 and not ui.plain()
7034 7052 and not opts.get(b'print0')
7035 7053 ):
7036 7054 morestatus = cmdutil.readmorestatus(repo)
7037 7055
7038 7056 ui.pager(b'status')
7039 7057 fm = ui.formatter(b'status', opts)
7040 7058 fmt = b'%s' + end
7041 7059 showchar = not opts.get(b'no_status')
7042 7060
7043 7061 for state, char, files in changestates:
7044 7062 if state in show:
7045 7063 label = b'status.' + state
7046 7064 for f in files:
7047 7065 fm.startitem()
7048 7066 fm.context(ctx=ctx2)
7049 7067 fm.data(itemtype=b'file', path=f)
7050 7068 fm.condwrite(showchar, b'status', b'%s ', char, label=label)
7051 7069 fm.plain(fmt % uipathfn(f), label=label)
7052 7070 if f in copy:
7053 7071 fm.data(source=copy[f])
7054 7072 fm.plain(
7055 7073 (b' %s' + end) % uipathfn(copy[f]),
7056 7074 label=b'status.copied',
7057 7075 )
7058 7076 if morestatus:
7059 7077 morestatus.formatfile(f, fm)
7060 7078
7061 7079 if morestatus:
7062 7080 morestatus.formatfooter(fm)
7063 7081 fm.end()
7064 7082
7065 7083
7066 7084 @command(
7067 7085 b'summary|sum',
7068 7086 [(b'', b'remote', None, _(b'check for push and pull'))],
7069 7087 b'[--remote]',
7070 7088 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
7071 7089 helpbasic=True,
7072 7090 intents={INTENT_READONLY},
7073 7091 )
7074 7092 def summary(ui, repo, **opts):
7075 7093 """summarize working directory state
7076 7094
7077 7095 This generates a brief summary of the working directory state,
7078 7096 including parents, branch, commit status, phase and available updates.
7079 7097
7080 7098 With the --remote option, this will check the default paths for
7081 7099 incoming and outgoing changes. This can be time-consuming.
7082 7100
7083 7101 Returns 0 on success.
7084 7102 """
7085 7103
7086 7104 opts = pycompat.byteskwargs(opts)
7087 7105 ui.pager(b'summary')
7088 7106 ctx = repo[None]
7089 7107 parents = ctx.parents()
7090 7108 pnode = parents[0].node()
7091 7109 marks = []
7092 7110
7093 7111 try:
7094 7112 ms = mergestatemod.mergestate.read(repo)
7095 7113 except error.UnsupportedMergeRecords as e:
7096 7114 s = b' '.join(e.recordtypes)
7097 7115 ui.warn(
7098 7116 _(b'warning: merge state has unsupported record types: %s\n') % s
7099 7117 )
7100 7118 unresolved = []
7101 7119 else:
7102 7120 unresolved = list(ms.unresolved())
7103 7121
7104 7122 for p in parents:
7105 7123 # label with log.changeset (instead of log.parent) since this
7106 7124 # shows a working directory parent *changeset*:
7107 7125 # i18n: column positioning for "hg summary"
7108 7126 ui.write(
7109 7127 _(b'parent: %d:%s ') % (p.rev(), p),
7110 7128 label=logcmdutil.changesetlabels(p),
7111 7129 )
7112 7130 ui.write(b' '.join(p.tags()), label=b'log.tag')
7113 7131 if p.bookmarks():
7114 7132 marks.extend(p.bookmarks())
7115 7133 if p.rev() == -1:
7116 7134 if not len(repo):
7117 7135 ui.write(_(b' (empty repository)'))
7118 7136 else:
7119 7137 ui.write(_(b' (no revision checked out)'))
7120 7138 if p.obsolete():
7121 7139 ui.write(_(b' (obsolete)'))
7122 7140 if p.isunstable():
7123 7141 instabilities = (
7124 7142 ui.label(instability, b'trouble.%s' % instability)
7125 7143 for instability in p.instabilities()
7126 7144 )
7127 7145 ui.write(b' (' + b', '.join(instabilities) + b')')
7128 7146 ui.write(b'\n')
7129 7147 if p.description():
7130 7148 ui.status(
7131 7149 b' ' + p.description().splitlines()[0].strip() + b'\n',
7132 7150 label=b'log.summary',
7133 7151 )
7134 7152
7135 7153 branch = ctx.branch()
7136 7154 bheads = repo.branchheads(branch)
7137 7155 # i18n: column positioning for "hg summary"
7138 7156 m = _(b'branch: %s\n') % branch
7139 7157 if branch != b'default':
7140 7158 ui.write(m, label=b'log.branch')
7141 7159 else:
7142 7160 ui.status(m, label=b'log.branch')
7143 7161
7144 7162 if marks:
7145 7163 active = repo._activebookmark
7146 7164 # i18n: column positioning for "hg summary"
7147 7165 ui.write(_(b'bookmarks:'), label=b'log.bookmark')
7148 7166 if active is not None:
7149 7167 if active in marks:
7150 7168 ui.write(b' *' + active, label=bookmarks.activebookmarklabel)
7151 7169 marks.remove(active)
7152 7170 else:
7153 7171 ui.write(b' [%s]' % active, label=bookmarks.activebookmarklabel)
7154 7172 for m in marks:
7155 7173 ui.write(b' ' + m, label=b'log.bookmark')
7156 7174 ui.write(b'\n', label=b'log.bookmark')
7157 7175
7158 7176 status = repo.status(unknown=True)
7159 7177
7160 7178 c = repo.dirstate.copies()
7161 7179 copied, renamed = [], []
7162 7180 for d, s in c.items():
7163 7181 if s in status.removed:
7164 7182 status.removed.remove(s)
7165 7183 renamed.append(d)
7166 7184 else:
7167 7185 copied.append(d)
7168 7186 if d in status.added:
7169 7187 status.added.remove(d)
7170 7188
7171 7189 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
7172 7190
7173 7191 labels = [
7174 7192 (ui.label(_(b'%d modified'), b'status.modified'), status.modified),
7175 7193 (ui.label(_(b'%d added'), b'status.added'), status.added),
7176 7194 (ui.label(_(b'%d removed'), b'status.removed'), status.removed),
7177 7195 (ui.label(_(b'%d renamed'), b'status.copied'), renamed),
7178 7196 (ui.label(_(b'%d copied'), b'status.copied'), copied),
7179 7197 (ui.label(_(b'%d deleted'), b'status.deleted'), status.deleted),
7180 7198 (ui.label(_(b'%d unknown'), b'status.unknown'), status.unknown),
7181 7199 (ui.label(_(b'%d unresolved'), b'resolve.unresolved'), unresolved),
7182 7200 (ui.label(_(b'%d subrepos'), b'status.modified'), subs),
7183 7201 ]
7184 7202 t = []
7185 7203 for l, s in labels:
7186 7204 if s:
7187 7205 t.append(l % len(s))
7188 7206
7189 7207 t = b', '.join(t)
7190 7208 cleanworkdir = False
7191 7209
7192 7210 if repo.vfs.exists(b'graftstate'):
7193 7211 t += _(b' (graft in progress)')
7194 7212 if repo.vfs.exists(b'updatestate'):
7195 7213 t += _(b' (interrupted update)')
7196 7214 elif len(parents) > 1:
7197 7215 t += _(b' (merge)')
7198 7216 elif branch != parents[0].branch():
7199 7217 t += _(b' (new branch)')
7200 7218 elif parents[0].closesbranch() and pnode in repo.branchheads(
7201 7219 branch, closed=True
7202 7220 ):
7203 7221 t += _(b' (head closed)')
7204 7222 elif not (
7205 7223 status.modified
7206 7224 or status.added
7207 7225 or status.removed
7208 7226 or renamed
7209 7227 or copied
7210 7228 or subs
7211 7229 ):
7212 7230 t += _(b' (clean)')
7213 7231 cleanworkdir = True
7214 7232 elif pnode not in bheads:
7215 7233 t += _(b' (new branch head)')
7216 7234
7217 7235 if parents:
7218 7236 pendingphase = max(p.phase() for p in parents)
7219 7237 else:
7220 7238 pendingphase = phases.public
7221 7239
7222 7240 if pendingphase > phases.newcommitphase(ui):
7223 7241 t += b' (%s)' % phases.phasenames[pendingphase]
7224 7242
7225 7243 if cleanworkdir:
7226 7244 # i18n: column positioning for "hg summary"
7227 7245 ui.status(_(b'commit: %s\n') % t.strip())
7228 7246 else:
7229 7247 # i18n: column positioning for "hg summary"
7230 7248 ui.write(_(b'commit: %s\n') % t.strip())
7231 7249
7232 7250 # all ancestors of branch heads - all ancestors of parent = new csets
7233 7251 new = len(
7234 7252 repo.changelog.findmissing([pctx.node() for pctx in parents], bheads)
7235 7253 )
7236 7254
7237 7255 if new == 0:
7238 7256 # i18n: column positioning for "hg summary"
7239 7257 ui.status(_(b'update: (current)\n'))
7240 7258 elif pnode not in bheads:
7241 7259 # i18n: column positioning for "hg summary"
7242 7260 ui.write(_(b'update: %d new changesets (update)\n') % new)
7243 7261 else:
7244 7262 # i18n: column positioning for "hg summary"
7245 7263 ui.write(
7246 7264 _(b'update: %d new changesets, %d branch heads (merge)\n')
7247 7265 % (new, len(bheads))
7248 7266 )
7249 7267
7250 7268 t = []
7251 7269 draft = len(repo.revs(b'draft()'))
7252 7270 if draft:
7253 7271 t.append(_(b'%d draft') % draft)
7254 7272 secret = len(repo.revs(b'secret()'))
7255 7273 if secret:
7256 7274 t.append(_(b'%d secret') % secret)
7257 7275
7258 7276 if draft or secret:
7259 7277 ui.status(_(b'phases: %s\n') % b', '.join(t))
7260 7278
7261 7279 if obsolete.isenabled(repo, obsolete.createmarkersopt):
7262 7280 for trouble in (b"orphan", b"contentdivergent", b"phasedivergent"):
7263 7281 numtrouble = len(repo.revs(trouble + b"()"))
7264 7282 # We write all the possibilities to ease translation
7265 7283 troublemsg = {
7266 7284 b"orphan": _(b"orphan: %d changesets"),
7267 7285 b"contentdivergent": _(b"content-divergent: %d changesets"),
7268 7286 b"phasedivergent": _(b"phase-divergent: %d changesets"),
7269 7287 }
7270 7288 if numtrouble > 0:
7271 7289 ui.status(troublemsg[trouble] % numtrouble + b"\n")
7272 7290
7273 7291 cmdutil.summaryhooks(ui, repo)
7274 7292
7275 7293 if opts.get(b'remote'):
7276 7294 needsincoming, needsoutgoing = True, True
7277 7295 else:
7278 7296 needsincoming, needsoutgoing = False, False
7279 7297 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
7280 7298 if i:
7281 7299 needsincoming = True
7282 7300 if o:
7283 7301 needsoutgoing = True
7284 7302 if not needsincoming and not needsoutgoing:
7285 7303 return
7286 7304
7287 7305 def getincoming():
7288 7306 # XXX We should actually skip this if no default is specified, instead
7289 7307 # of passing "default" which will resolve as "./default/" if no default
7290 7308 # path is defined.
7291 7309 path = urlutil.get_unique_pull_path_obj(b'summary', ui, b'default')
7292 7310 sbranch = path.branch
7293 7311 try:
7294 7312 other = hg.peer(repo, {}, path)
7295 7313 except error.RepoError:
7296 7314 if opts.get(b'remote'):
7297 7315 raise
7298 7316 return path.loc, sbranch, None, None, None
7299 7317 branches = (path.branch, [])
7300 7318 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
7301 7319 if revs:
7302 7320 revs = [other.lookup(rev) for rev in revs]
7303 7321 ui.debug(b'comparing with %s\n' % urlutil.hidepassword(path.loc))
7304 7322 with repo.ui.silent():
7305 7323 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
7306 7324 return path.loc, sbranch, other, commoninc, commoninc[1]
7307 7325
7308 7326 if needsincoming:
7309 7327 source, sbranch, sother, commoninc, incoming = getincoming()
7310 7328 else:
7311 7329 source = sbranch = sother = commoninc = incoming = None
7312 7330
7313 7331 def getoutgoing():
7314 7332 # XXX We should actually skip this if no default is specified, instead
7315 7333 # of passing "default" which will resolve as "./default/" if no default
7316 7334 # path is defined.
7317 7335 d = None
7318 7336 if b'default-push' in ui.paths:
7319 7337 d = b'default-push'
7320 7338 elif b'default' in ui.paths:
7321 7339 d = b'default'
7322 7340 path = None
7323 7341 if d is not None:
7324 7342 path = urlutil.get_unique_push_path(b'summary', repo, ui, d)
7325 7343 dest = path.loc
7326 7344 dbranch = path.branch
7327 7345 else:
7328 7346 dest = b'default'
7329 7347 dbranch = None
7330 7348 revs, checkout = hg.addbranchrevs(repo, repo, (dbranch, []), None)
7331 7349 if source != dest:
7332 7350 try:
7333 7351 dother = hg.peer(repo, {}, path if path is not None else dest)
7334 7352 except error.RepoError:
7335 7353 if opts.get(b'remote'):
7336 7354 raise
7337 7355 return dest, dbranch, None, None
7338 7356 ui.debug(b'comparing with %s\n' % urlutil.hidepassword(dest))
7339 7357 elif sother is None:
7340 7358 # there is no explicit destination peer, but source one is invalid
7341 7359 return dest, dbranch, None, None
7342 7360 else:
7343 7361 dother = sother
7344 7362 if source != dest or (sbranch is not None and sbranch != dbranch):
7345 7363 common = None
7346 7364 else:
7347 7365 common = commoninc
7348 7366 if revs:
7349 7367 revs = [repo.lookup(rev) for rev in revs]
7350 7368 with repo.ui.silent():
7351 7369 outgoing = discovery.findcommonoutgoing(
7352 7370 repo, dother, onlyheads=revs, commoninc=common
7353 7371 )
7354 7372 return dest, dbranch, dother, outgoing
7355 7373
7356 7374 if needsoutgoing:
7357 7375 dest, dbranch, dother, outgoing = getoutgoing()
7358 7376 else:
7359 7377 dest = dbranch = dother = outgoing = None
7360 7378
7361 7379 if opts.get(b'remote'):
7362 7380 # Help pytype. --remote sets both `needsincoming` and `needsoutgoing`.
7363 7381 # The former always sets `sother` (or raises an exception if it can't);
7364 7382 # the latter always sets `outgoing`.
7365 7383 assert sother is not None
7366 7384 assert outgoing is not None
7367 7385
7368 7386 t = []
7369 7387 if incoming:
7370 7388 t.append(_(b'1 or more incoming'))
7371 7389 o = outgoing.missing
7372 7390 if o:
7373 7391 t.append(_(b'%d outgoing') % len(o))
7374 7392 other = dother or sother
7375 7393 if b'bookmarks' in other.listkeys(b'namespaces'):
7376 7394 counts = bookmarks.summary(repo, other)
7377 7395 if counts[0] > 0:
7378 7396 t.append(_(b'%d incoming bookmarks') % counts[0])
7379 7397 if counts[1] > 0:
7380 7398 t.append(_(b'%d outgoing bookmarks') % counts[1])
7381 7399
7382 7400 if t:
7383 7401 # i18n: column positioning for "hg summary"
7384 7402 ui.write(_(b'remote: %s\n') % (b', '.join(t)))
7385 7403 else:
7386 7404 # i18n: column positioning for "hg summary"
7387 7405 ui.status(_(b'remote: (synced)\n'))
7388 7406
7389 7407 cmdutil.summaryremotehooks(
7390 7408 ui,
7391 7409 repo,
7392 7410 opts,
7393 7411 (
7394 7412 (source, sbranch, sother, commoninc),
7395 7413 (dest, dbranch, dother, outgoing),
7396 7414 ),
7397 7415 )
7398 7416
7399 7417
7400 7418 @command(
7401 7419 b'tag',
7402 7420 [
7403 7421 (b'f', b'force', None, _(b'force tag')),
7404 7422 (b'l', b'local', None, _(b'make the tag local')),
7405 7423 (b'r', b'rev', b'', _(b'revision to tag'), _(b'REV')),
7406 7424 (b'', b'remove', None, _(b'remove a tag')),
7407 7425 # -l/--local is already there, commitopts cannot be used
7408 7426 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
7409 7427 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
7410 7428 ]
7411 7429 + commitopts2,
7412 7430 _(b'[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'),
7413 7431 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
7414 7432 )
7415 7433 def tag(ui, repo, name1, *names, **opts):
7416 7434 """add one or more tags for the current or given revision
7417 7435
7418 7436 Name a particular revision using <name>.
7419 7437
7420 7438 Tags are used to name particular revisions of the repository and are
7421 7439 very useful to compare different revisions, to go back to significant
7422 7440 earlier versions or to mark branch points as releases, etc. Changing
7423 7441 an existing tag is normally disallowed; use -f/--force to override.
7424 7442
7425 7443 If no revision is given, the parent of the working directory is
7426 7444 used.
7427 7445
7428 7446 To facilitate version control, distribution, and merging of tags,
7429 7447 they are stored as a file named ".hgtags" which is managed similarly
7430 7448 to other project files and can be hand-edited if necessary. This
7431 7449 also means that tagging creates a new commit. The file
7432 7450 ".hg/localtags" is used for local tags (not shared among
7433 7451 repositories).
7434 7452
7435 7453 Tag commits are usually made at the head of a branch. If the parent
7436 7454 of the working directory is not a branch head, :hg:`tag` aborts; use
7437 7455 -f/--force to force the tag commit to be based on a non-head
7438 7456 changeset.
7439 7457
7440 7458 See :hg:`help dates` for a list of formats valid for -d/--date.
7441 7459
7442 7460 Since tag names have priority over branch names during revision
7443 7461 lookup, using an existing branch name as a tag name is discouraged.
7444 7462
7445 7463 Returns 0 on success.
7446 7464 """
7447 7465 cmdutil.check_incompatible_arguments(opts, 'remove', ['rev'])
7448 7466 opts = pycompat.byteskwargs(opts)
7449 7467 with repo.wlock(), repo.lock():
7450 7468 rev_ = b"."
7451 7469 names = [t.strip() for t in (name1,) + names]
7452 7470 if len(names) != len(set(names)):
7453 7471 raise error.InputError(_(b'tag names must be unique'))
7454 7472 for n in names:
7455 7473 scmutil.checknewlabel(repo, n, b'tag')
7456 7474 if not n:
7457 7475 raise error.InputError(
7458 7476 _(b'tag names cannot consist entirely of whitespace')
7459 7477 )
7460 7478 if opts.get(b'rev'):
7461 7479 rev_ = opts[b'rev']
7462 7480 message = opts.get(b'message')
7463 7481 if opts.get(b'remove'):
7464 7482 if opts.get(b'local'):
7465 7483 expectedtype = b'local'
7466 7484 else:
7467 7485 expectedtype = b'global'
7468 7486
7469 7487 for n in names:
7470 7488 if repo.tagtype(n) == b'global':
7471 7489 alltags = tagsmod.findglobaltags(ui, repo)
7472 7490 if alltags[n][0] == repo.nullid:
7473 7491 raise error.InputError(
7474 7492 _(b"tag '%s' is already removed") % n
7475 7493 )
7476 7494 if not repo.tagtype(n):
7477 7495 raise error.InputError(_(b"tag '%s' does not exist") % n)
7478 7496 if repo.tagtype(n) != expectedtype:
7479 7497 if expectedtype == b'global':
7480 7498 raise error.InputError(
7481 7499 _(b"tag '%s' is not a global tag") % n
7482 7500 )
7483 7501 else:
7484 7502 raise error.InputError(
7485 7503 _(b"tag '%s' is not a local tag") % n
7486 7504 )
7487 7505 rev_ = b'null'
7488 7506 if not message:
7489 7507 # we don't translate commit messages
7490 7508 message = b'Removed tag %s' % b', '.join(names)
7491 7509 elif not opts.get(b'force'):
7492 7510 for n in names:
7493 7511 if n in repo.tags():
7494 7512 raise error.InputError(
7495 7513 _(b"tag '%s' already exists (use -f to force)") % n
7496 7514 )
7497 7515 if not opts.get(b'local'):
7498 7516 p1, p2 = repo.dirstate.parents()
7499 7517 if p2 != repo.nullid:
7500 7518 raise error.StateError(_(b'uncommitted merge'))
7501 7519 bheads = repo.branchheads()
7502 7520 if not opts.get(b'force') and bheads and p1 not in bheads:
7503 7521 raise error.InputError(
7504 7522 _(
7505 7523 b'working directory is not at a branch head '
7506 7524 b'(use -f to force)'
7507 7525 )
7508 7526 )
7509 7527 node = logcmdutil.revsingle(repo, rev_).node()
7510 7528
7511 7529 # don't allow tagging the null rev or the working directory
7512 7530 if node is None:
7513 7531 raise error.InputError(_(b"cannot tag working directory"))
7514 7532 elif not opts.get(b'remove') and node == nullid:
7515 7533 raise error.InputError(_(b"cannot tag null revision"))
7516 7534
7517 7535 if not message:
7518 7536 # we don't translate commit messages
7519 7537 message = b'Added tag %s for changeset %s' % (
7520 7538 b', '.join(names),
7521 7539 short(node),
7522 7540 )
7523 7541
7524 7542 date = opts.get(b'date')
7525 7543 if date:
7526 7544 date = dateutil.parsedate(date)
7527 7545
7528 7546 if opts.get(b'remove'):
7529 7547 editform = b'tag.remove'
7530 7548 else:
7531 7549 editform = b'tag.add'
7532 7550 editor = cmdutil.getcommiteditor(
7533 7551 editform=editform, **pycompat.strkwargs(opts)
7534 7552 )
7535 7553
7536 7554 tagsmod.tag(
7537 7555 repo,
7538 7556 names,
7539 7557 node,
7540 7558 message,
7541 7559 opts.get(b'local'),
7542 7560 opts.get(b'user'),
7543 7561 date,
7544 7562 editor=editor,
7545 7563 )
7546 7564
7547 7565
7548 7566 @command(
7549 7567 b'tags',
7550 7568 formatteropts,
7551 7569 b'',
7552 7570 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
7553 7571 intents={INTENT_READONLY},
7554 7572 )
7555 7573 def tags(ui, repo, **opts):
7556 7574 """list repository tags
7557 7575
7558 7576 This lists both regular and local tags. When the -v/--verbose
7559 7577 switch is used, a third column "local" is printed for local tags.
7560 7578 When the -q/--quiet switch is used, only the tag name is printed.
7561 7579
7562 7580 .. container:: verbose
7563 7581
7564 7582 Template:
7565 7583
7566 7584 The following keywords are supported in addition to the common template
7567 7585 keywords and functions such as ``{tag}``. See also
7568 7586 :hg:`help templates`.
7569 7587
7570 7588 :type: String. ``local`` for local tags.
7571 7589
7572 7590 Returns 0 on success.
7573 7591 """
7574 7592
7575 7593 opts = pycompat.byteskwargs(opts)
7576 7594 ui.pager(b'tags')
7577 7595 fm = ui.formatter(b'tags', opts)
7578 7596 hexfunc = fm.hexfunc
7579 7597
7580 7598 for t, n in reversed(repo.tagslist()):
7581 7599 hn = hexfunc(n)
7582 7600 label = b'tags.normal'
7583 7601 tagtype = repo.tagtype(t)
7584 7602 if not tagtype or tagtype == b'global':
7585 7603 tagtype = b''
7586 7604 else:
7587 7605 label = b'tags.' + tagtype
7588 7606
7589 7607 fm.startitem()
7590 7608 fm.context(repo=repo)
7591 7609 fm.write(b'tag', b'%s', t, label=label)
7592 7610 fmt = b" " * (30 - encoding.colwidth(t)) + b' %5d:%s'
7593 7611 fm.condwrite(
7594 7612 not ui.quiet,
7595 7613 b'rev node',
7596 7614 fmt,
7597 7615 repo.changelog.rev(n),
7598 7616 hn,
7599 7617 label=label,
7600 7618 )
7601 7619 fm.condwrite(
7602 7620 ui.verbose and tagtype, b'type', b' %s', tagtype, label=label
7603 7621 )
7604 7622 fm.plain(b'\n')
7605 7623 fm.end()
7606 7624
7607 7625
7608 7626 @command(
7609 7627 b'tip',
7610 7628 [
7611 7629 (b'p', b'patch', None, _(b'show patch')),
7612 7630 (b'g', b'git', None, _(b'use git extended diff format')),
7613 7631 ]
7614 7632 + templateopts,
7615 7633 _(b'[-p] [-g]'),
7616 7634 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
7617 7635 )
7618 7636 def tip(ui, repo, **opts):
7619 7637 """show the tip revision (DEPRECATED)
7620 7638
7621 7639 The tip revision (usually just called the tip) is the changeset
7622 7640 most recently added to the repository (and therefore the most
7623 7641 recently changed head).
7624 7642
7625 7643 If you have just made a commit, that commit will be the tip. If
7626 7644 you have just pulled changes from another repository, the tip of
7627 7645 that repository becomes the current tip. The "tip" tag is special
7628 7646 and cannot be renamed or assigned to a different changeset.
7629 7647
7630 7648 This command is deprecated, please use :hg:`heads` instead.
7631 7649
7632 7650 Returns 0 on success.
7633 7651 """
7634 7652 opts = pycompat.byteskwargs(opts)
7635 7653 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
7636 7654 displayer.show(repo[b'tip'])
7637 7655 displayer.close()
7638 7656
7639 7657
7640 7658 @command(
7641 7659 b'unbundle',
7642 7660 [
7643 7661 (
7644 7662 b'u',
7645 7663 b'update',
7646 7664 None,
7647 7665 _(b'update to new branch head if changesets were unbundled'),
7648 7666 )
7649 7667 ],
7650 7668 _(b'[-u] FILE...'),
7651 7669 helpcategory=command.CATEGORY_IMPORT_EXPORT,
7652 7670 )
7653 7671 def unbundle(ui, repo, fname1, *fnames, **opts):
7654 7672 """apply one or more bundle files
7655 7673
7656 7674 Apply one or more bundle files generated by :hg:`bundle`.
7657 7675
7658 7676 Returns 0 on success, 1 if an update has unresolved files.
7659 7677 """
7660 7678 fnames = (fname1,) + fnames
7661 7679
7662 7680 with repo.lock():
7663 7681 for fname in fnames:
7664 7682 f = hg.openpath(ui, fname)
7665 7683 gen = exchange.readbundle(ui, f, fname)
7666 7684 if isinstance(gen, streamclone.streamcloneapplier):
7667 7685 raise error.InputError(
7668 7686 _(
7669 7687 b'packed bundles cannot be applied with '
7670 7688 b'"hg unbundle"'
7671 7689 ),
7672 7690 hint=_(b'use "hg debugapplystreamclonebundle"'),
7673 7691 )
7674 7692 url = b'bundle:' + fname
7675 7693 try:
7676 7694 txnname = b'unbundle'
7677 7695 if not isinstance(gen, bundle2.unbundle20):
7678 7696 txnname = b'unbundle\n%s' % urlutil.hidepassword(url)
7679 7697 with repo.transaction(txnname) as tr:
7680 7698 op = bundle2.applybundle(
7681 7699 repo, gen, tr, source=b'unbundle', url=url
7682 7700 )
7683 7701 except error.BundleUnknownFeatureError as exc:
7684 7702 raise error.Abort(
7685 7703 _(b'%s: unknown bundle feature, %s') % (fname, exc),
7686 7704 hint=_(
7687 7705 b"see https://mercurial-scm.org/"
7688 7706 b"wiki/BundleFeature for more "
7689 7707 b"information"
7690 7708 ),
7691 7709 )
7692 7710 modheads = bundle2.combinechangegroupresults(op)
7693 7711
7694 7712 if postincoming(ui, repo, modheads, opts.get('update'), None, None):
7695 7713 return 1
7696 7714 else:
7697 7715 return 0
7698 7716
7699 7717
7700 7718 @command(
7701 7719 b'unshelve',
7702 7720 [
7703 7721 (b'a', b'abort', None, _(b'abort an incomplete unshelve operation')),
7704 7722 (
7705 7723 b'c',
7706 7724 b'continue',
7707 7725 None,
7708 7726 _(b'continue an incomplete unshelve operation'),
7709 7727 ),
7710 7728 (b'i', b'interactive', None, _(b'use interactive mode (EXPERIMENTAL)')),
7711 7729 (b'k', b'keep', None, _(b'keep shelve after unshelving')),
7712 7730 (
7713 7731 b'n',
7714 7732 b'name',
7715 7733 b'',
7716 7734 _(b'restore shelved change with given name'),
7717 7735 _(b'NAME'),
7718 7736 ),
7719 7737 (b't', b'tool', b'', _(b'specify merge tool')),
7720 7738 (
7721 7739 b'',
7722 7740 b'date',
7723 7741 b'',
7724 7742 _(b'set date for temporary commits (DEPRECATED)'),
7725 7743 _(b'DATE'),
7726 7744 ),
7727 7745 ],
7728 7746 _(b'hg unshelve [OPTION]... [[-n] SHELVED]'),
7729 7747 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
7730 7748 )
7731 7749 def unshelve(ui, repo, *shelved, **opts):
7732 7750 """restore a shelved change to the working directory
7733 7751
7734 7752 This command accepts an optional name of a shelved change to
7735 7753 restore. If none is given, the most recent shelved change is used.
7736 7754
7737 7755 If a shelved change is applied successfully, the bundle that
7738 7756 contains the shelved changes is moved to a backup location
7739 7757 (.hg/shelve-backup).
7740 7758
7741 7759 Since you can restore a shelved change on top of an arbitrary
7742 7760 commit, it is possible that unshelving will result in a conflict
7743 7761 between your changes and the commits you are unshelving onto. If
7744 7762 this occurs, you must resolve the conflict, then use
7745 7763 ``--continue`` to complete the unshelve operation. (The bundle
7746 7764 will not be moved until you successfully complete the unshelve.)
7747 7765
7748 7766 (Alternatively, you can use ``--abort`` to abandon an unshelve
7749 7767 that causes a conflict. This reverts the unshelved changes, and
7750 7768 leaves the bundle in place.)
7751 7769
7752 7770 If bare shelved change (without interactive, include and exclude
7753 7771 option) was done on newly created branch it would restore branch
7754 7772 information to the working directory.
7755 7773
7756 7774 After a successful unshelve, the shelved changes are stored in a
7757 7775 backup directory. Only the N most recent backups are kept. N
7758 7776 defaults to 10 but can be overridden using the ``shelve.maxbackups``
7759 7777 configuration option.
7760 7778
7761 7779 .. container:: verbose
7762 7780
7763 7781 Timestamp in seconds is used to decide order of backups. More
7764 7782 than ``maxbackups`` backups are kept, if same timestamp
7765 7783 prevents from deciding exact order of them, for safety.
7766 7784
7767 7785 Selected changes can be unshelved with ``--interactive`` flag.
7768 7786 The working directory is updated with the selected changes, and
7769 7787 only the unselected changes remain shelved.
7770 7788 Note: The whole shelve is applied to working directory first before
7771 7789 running interactively. So, this will bring up all the conflicts between
7772 7790 working directory and the shelve, irrespective of which changes will be
7773 7791 unshelved.
7774 7792 """
7775 7793 with repo.wlock():
7776 7794 return shelvemod.unshelvecmd(ui, repo, *shelved, **opts)
7777 7795
7778 7796
7779 7797 statemod.addunfinished(
7780 7798 b'unshelve',
7781 7799 fname=b'shelvedstate',
7782 7800 continueflag=True,
7783 7801 abortfunc=shelvemod.hgabortunshelve,
7784 7802 continuefunc=shelvemod.hgcontinueunshelve,
7785 7803 cmdmsg=_(b'unshelve already in progress'),
7786 7804 )
7787 7805
7788 7806
7789 7807 @command(
7790 7808 b'update|up|checkout|co',
7791 7809 [
7792 7810 (b'C', b'clean', None, _(b'discard uncommitted changes (no backup)')),
7793 7811 (b'c', b'check', None, _(b'require clean working directory')),
7794 7812 (b'm', b'merge', None, _(b'merge uncommitted changes')),
7795 7813 (b'd', b'date', b'', _(b'tipmost revision matching date'), _(b'DATE')),
7796 7814 (b'r', b'rev', b'', _(b'revision'), _(b'REV')),
7797 7815 ]
7798 7816 + mergetoolopts,
7799 7817 _(b'[-C|-c|-m] [-d DATE] [[-r] REV]'),
7800 7818 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
7801 7819 helpbasic=True,
7802 7820 )
7803 7821 def update(ui, repo, node=None, **opts):
7804 7822 """update working directory (or switch revisions)
7805 7823
7806 7824 Update the repository's working directory to the specified
7807 7825 changeset. If no changeset is specified, update to the tip of the
7808 7826 current named branch and move the active bookmark (see :hg:`help
7809 7827 bookmarks`).
7810 7828
7811 7829 Update sets the working directory's parent revision to the specified
7812 7830 changeset (see :hg:`help parents`).
7813 7831
7814 7832 If the changeset is not a descendant or ancestor of the working
7815 7833 directory's parent and there are uncommitted changes, the update is
7816 7834 aborted. With the -c/--check option, the working directory is checked
7817 7835 for uncommitted changes; if none are found, the working directory is
7818 7836 updated to the specified changeset.
7819 7837
7820 7838 .. container:: verbose
7821 7839
7822 7840 The -C/--clean, -c/--check, and -m/--merge options control what
7823 7841 happens if the working directory contains uncommitted changes.
7824 7842 At most of one of them can be specified.
7825 7843
7826 7844 1. If no option is specified, and if
7827 7845 the requested changeset is an ancestor or descendant of
7828 7846 the working directory's parent, the uncommitted changes
7829 7847 are merged into the requested changeset and the merged
7830 7848 result is left uncommitted. If the requested changeset is
7831 7849 not an ancestor or descendant (that is, it is on another
7832 7850 branch), the update is aborted and the uncommitted changes
7833 7851 are preserved.
7834 7852
7835 7853 2. With the -m/--merge option, the update is allowed even if the
7836 7854 requested changeset is not an ancestor or descendant of
7837 7855 the working directory's parent.
7838 7856
7839 7857 3. With the -c/--check option, the update is aborted and the
7840 7858 uncommitted changes are preserved.
7841 7859
7842 7860 4. With the -C/--clean option, uncommitted changes are discarded and
7843 7861 the working directory is updated to the requested changeset.
7844 7862
7845 7863 To cancel an uncommitted merge (and lose your changes), use
7846 7864 :hg:`merge --abort`.
7847 7865
7848 7866 Use null as the changeset to remove the working directory (like
7849 7867 :hg:`clone -U`).
7850 7868
7851 7869 If you want to revert just one file to an older revision, use
7852 7870 :hg:`revert [-r REV] NAME`.
7853 7871
7854 7872 See :hg:`help dates` for a list of formats valid for -d/--date.
7855 7873
7856 7874 Returns 0 on success, 1 if there are unresolved files.
7857 7875 """
7858 7876 cmdutil.check_at_most_one_arg(opts, 'clean', 'check', 'merge')
7859 7877 rev = opts.get('rev')
7860 7878 date = opts.get('date')
7861 7879 clean = opts.get('clean')
7862 7880 check = opts.get('check')
7863 7881 merge = opts.get('merge')
7864 7882 if rev and node:
7865 7883 raise error.InputError(_(b"please specify just one revision"))
7866 7884
7867 7885 if ui.configbool(b'commands', b'update.requiredest'):
7868 7886 if not node and not rev and not date:
7869 7887 raise error.InputError(
7870 7888 _(b'you must specify a destination'),
7871 7889 hint=_(b'for example: hg update ".::"'),
7872 7890 )
7873 7891
7874 7892 if rev is None or rev == b'':
7875 7893 rev = node
7876 7894
7877 7895 if date and rev is not None:
7878 7896 raise error.InputError(_(b"you can't specify a revision and a date"))
7879 7897
7880 7898 updatecheck = None
7881 7899 if check or merge is not None and not merge:
7882 7900 updatecheck = b'abort'
7883 7901 elif merge or check is not None and not check:
7884 7902 updatecheck = b'none'
7885 7903
7886 7904 with repo.wlock():
7887 7905 cmdutil.clearunfinished(repo)
7888 7906 if date:
7889 7907 rev = cmdutil.finddate(ui, repo, date)
7890 7908
7891 7909 # if we defined a bookmark, we have to remember the original name
7892 7910 brev = rev
7893 7911 if rev:
7894 7912 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
7895 7913 ctx = logcmdutil.revsingle(repo, rev, default=None)
7896 7914 rev = ctx.rev()
7897 7915 hidden = ctx.hidden()
7898 7916 overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')}
7899 7917 with ui.configoverride(overrides, b'update'):
7900 7918 ret = hg.updatetotally(
7901 7919 ui, repo, rev, brev, clean=clean, updatecheck=updatecheck
7902 7920 )
7903 7921 if hidden:
7904 7922 ctxstr = ctx.hex()[:12]
7905 7923 ui.warn(_(b"updated to hidden changeset %s\n") % ctxstr)
7906 7924
7907 7925 if ctx.obsolete():
7908 7926 obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
7909 7927 ui.warn(b"(%s)\n" % obsfatemsg)
7910 7928 return ret
7911 7929
7912 7930
7913 7931 @command(
7914 7932 b'verify',
7915 7933 [(b'', b'full', False, b'perform more checks (EXPERIMENTAL)')],
7916 7934 helpcategory=command.CATEGORY_MAINTENANCE,
7917 7935 )
7918 7936 def verify(ui, repo, **opts):
7919 7937 """verify the integrity of the repository
7920 7938
7921 7939 Verify the integrity of the current repository.
7922 7940
7923 7941 This will perform an extensive check of the repository's
7924 7942 integrity, validating the hashes and checksums of each entry in
7925 7943 the changelog, manifest, and tracked files, as well as the
7926 7944 integrity of their crosslinks and indices.
7927 7945
7928 7946 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
7929 7947 for more information about recovery from corruption of the
7930 7948 repository.
7931 7949
7932 7950 Returns 0 on success, 1 if errors are encountered.
7933 7951 """
7934 7952 opts = pycompat.byteskwargs(opts)
7935 7953
7936 7954 level = None
7937 7955 if opts[b'full']:
7938 7956 level = verifymod.VERIFY_FULL
7939 7957 return hg.verify(repo, level)
7940 7958
7941 7959
7942 7960 @command(
7943 7961 b'version',
7944 7962 [] + formatteropts,
7945 7963 helpcategory=command.CATEGORY_HELP,
7946 7964 norepo=True,
7947 7965 intents={INTENT_READONLY},
7948 7966 )
7949 7967 def version_(ui, **opts):
7950 7968 """output version and copyright information
7951 7969
7952 7970 .. container:: verbose
7953 7971
7954 7972 Template:
7955 7973
7956 7974 The following keywords are supported. See also :hg:`help templates`.
7957 7975
7958 7976 :extensions: List of extensions.
7959 7977 :ver: String. Version number.
7960 7978
7961 7979 And each entry of ``{extensions}`` provides the following sub-keywords
7962 7980 in addition to ``{ver}``.
7963 7981
7964 7982 :bundled: Boolean. True if included in the release.
7965 7983 :name: String. Extension name.
7966 7984 """
7967 7985 opts = pycompat.byteskwargs(opts)
7968 7986 if ui.verbose:
7969 7987 ui.pager(b'version')
7970 7988 fm = ui.formatter(b"version", opts)
7971 7989 fm.startitem()
7972 7990 fm.write(
7973 7991 b"ver", _(b"Mercurial Distributed SCM (version %s)\n"), util.version()
7974 7992 )
7975 7993 license = _(
7976 7994 b"(see https://mercurial-scm.org for more information)\n"
7977 7995 b"\nCopyright (C) 2005-2023 Olivia Mackall and others\n"
7978 7996 b"This is free software; see the source for copying conditions. "
7979 7997 b"There is NO\nwarranty; "
7980 7998 b"not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
7981 7999 )
7982 8000 if not ui.quiet:
7983 8001 fm.plain(license)
7984 8002
7985 8003 if ui.verbose:
7986 8004 fm.plain(_(b"\nEnabled extensions:\n\n"))
7987 8005 # format names and versions into columns
7988 8006 names = []
7989 8007 vers = []
7990 8008 isinternals = []
7991 8009 for name, module in sorted(extensions.extensions()):
7992 8010 names.append(name)
7993 8011 vers.append(extensions.moduleversion(module) or None)
7994 8012 isinternals.append(extensions.ismoduleinternal(module))
7995 8013 fn = fm.nested(b"extensions", tmpl=b'{name}\n')
7996 8014 if names:
7997 8015 namefmt = b" %%-%ds " % max(len(n) for n in names)
7998 8016 places = [_(b"external"), _(b"internal")]
7999 8017 for n, v, p in zip(names, vers, isinternals):
8000 8018 fn.startitem()
8001 8019 fn.condwrite(ui.verbose, b"name", namefmt, n)
8002 8020 if ui.verbose:
8003 8021 fn.plain(b"%s " % places[p])
8004 8022 fn.data(bundled=p)
8005 8023 fn.condwrite(ui.verbose and v, b"ver", b"%s", v)
8006 8024 if ui.verbose:
8007 8025 fn.plain(b"\n")
8008 8026 fn.end()
8009 8027 fm.end()
8010 8028
8011 8029
8012 8030 def loadcmdtable(ui, name, cmdtable):
8013 8031 """Load command functions from specified cmdtable"""
8014 8032 overrides = [cmd for cmd in cmdtable if cmd in table]
8015 8033 if overrides:
8016 8034 ui.warn(
8017 8035 _(b"extension '%s' overrides commands: %s\n")
8018 8036 % (name, b" ".join(overrides))
8019 8037 )
8020 8038 table.update(cmdtable)
@@ -1,1672 +1,1681 b''
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9
10 10 import os
11 11 import posixpath
12 12 import shutil
13 13 import stat
14 14 import weakref
15 15
16 16 from .i18n import _
17 17 from .node import (
18 18 hex,
19 19 sha1nodeconstants,
20 20 short,
21 21 )
22 22 from .pycompat import getattr
23 23
24 24 from . import (
25 25 bookmarks,
26 26 bundlerepo,
27 27 cmdutil,
28 28 destutil,
29 29 discovery,
30 30 error,
31 31 exchange,
32 32 extensions,
33 33 graphmod,
34 34 httppeer,
35 35 localrepo,
36 36 lock,
37 37 logcmdutil,
38 38 logexchange,
39 39 merge as mergemod,
40 40 mergestate as mergestatemod,
41 41 narrowspec,
42 42 phases,
43 43 requirements,
44 44 scmutil,
45 45 sshpeer,
46 46 statichttprepo,
47 47 ui as uimod,
48 48 unionrepo,
49 49 url,
50 50 util,
51 51 verify as verifymod,
52 52 vfs as vfsmod,
53 53 )
54 54 from .interfaces import repository as repositorymod
55 55 from .utils import (
56 56 hashutil,
57 57 stringutil,
58 58 urlutil,
59 59 )
60 60
61 61
62 62 release = lock.release
63 63
64 64 # shared features
65 65 sharedbookmarks = b'bookmarks'
66 66
67 67
68 def addbranchrevs(lrepo, other, branches, revs):
68 def addbranchrevs(lrepo, other, branches, revs, remotehidden=False):
69 69 if util.safehasattr(other, 'peer'):
70 70 # a courtesy to callers using a localrepo for other
71 peer = other.peer()
71 peer = other.peer(remotehidden=remotehidden)
72 72 else:
73 73 peer = other
74 74 hashbranch, branches = branches
75 75 if not hashbranch and not branches:
76 76 x = revs or None
77 77 if revs:
78 78 y = revs[0]
79 79 else:
80 80 y = None
81 81 return x, y
82 82 if revs:
83 83 revs = list(revs)
84 84 else:
85 85 revs = []
86 86
87 87 if not peer.capable(b'branchmap'):
88 88 if branches:
89 89 raise error.Abort(_(b"remote branch lookup not supported"))
90 90 revs.append(hashbranch)
91 91 return revs, revs[0]
92 92
93 93 with peer.commandexecutor() as e:
94 94 branchmap = e.callcommand(b'branchmap', {}).result()
95 95
96 96 def primary(branch):
97 97 if branch == b'.':
98 98 if not lrepo:
99 99 raise error.Abort(_(b"dirstate branch not accessible"))
100 100 branch = lrepo.dirstate.branch()
101 101 if branch in branchmap:
102 102 revs.extend(hex(r) for r in reversed(branchmap[branch]))
103 103 return True
104 104 else:
105 105 return False
106 106
107 107 for branch in branches:
108 108 if not primary(branch):
109 109 raise error.RepoLookupError(_(b"unknown branch '%s'") % branch)
110 110 if hashbranch:
111 111 if not primary(hashbranch):
112 112 revs.append(hashbranch)
113 113 return revs, revs[0]
114 114
115 115
116 116 def _isfile(path):
117 117 try:
118 118 # we use os.stat() directly here instead of os.path.isfile()
119 119 # because the latter started returning `False` on invalid path
120 120 # exceptions starting in 3.8 and we care about handling
121 121 # invalid paths specially here.
122 122 st = os.stat(path)
123 123 except ValueError as e:
124 124 msg = stringutil.forcebytestr(e)
125 125 raise error.Abort(_(b'invalid path %s: %s') % (path, msg))
126 126 except OSError:
127 127 return False
128 128 else:
129 129 return stat.S_ISREG(st.st_mode)
130 130
131 131
132 132 class LocalFactory:
133 133 """thin wrapper to dispatch between localrepo and bundle repo"""
134 134
135 135 @staticmethod
136 136 def islocal(path: bytes) -> bool:
137 137 path = util.expandpath(urlutil.urllocalpath(path))
138 138 return not _isfile(path)
139 139
140 140 @staticmethod
141 141 def instance(ui, path, *args, **kwargs):
142 142 path = util.expandpath(urlutil.urllocalpath(path))
143 143 if _isfile(path):
144 144 cls = bundlerepo
145 145 else:
146 146 cls = localrepo
147 147 return cls.instance(ui, path, *args, **kwargs)
148 148
149 149
150 150 repo_schemes = {
151 151 b'bundle': bundlerepo,
152 152 b'union': unionrepo,
153 153 b'file': LocalFactory,
154 154 }
155 155
156 156 peer_schemes = {
157 157 b'http': httppeer,
158 158 b'https': httppeer,
159 159 b'ssh': sshpeer,
160 160 b'static-http': statichttprepo,
161 161 }
162 162
163 163
164 164 def islocal(repo):
165 165 '''return true if repo (or path pointing to repo) is local'''
166 166 if isinstance(repo, bytes):
167 167 u = urlutil.url(repo)
168 168 scheme = u.scheme or b'file'
169 169 if scheme in peer_schemes:
170 170 cls = peer_schemes[scheme]
171 171 cls.make_peer # make sure we load the module
172 172 elif scheme in repo_schemes:
173 173 cls = repo_schemes[scheme]
174 174 cls.instance # make sure we load the module
175 175 else:
176 176 cls = LocalFactory
177 177 if util.safehasattr(cls, 'islocal'):
178 178 return cls.islocal(repo) # pytype: disable=module-attr
179 179 return False
180 180 repo.ui.deprecwarn(b"use obj.local() instead of islocal(obj)", b"6.4")
181 181 return repo.local()
182 182
183 183
184 184 def openpath(ui, path, sendaccept=True):
185 185 '''open path with open if local, url.open if remote'''
186 186 pathurl = urlutil.url(path, parsequery=False, parsefragment=False)
187 187 if pathurl.islocal():
188 188 return util.posixfile(pathurl.localpath(), b'rb')
189 189 else:
190 190 return url.open(ui, path, sendaccept=sendaccept)
191 191
192 192
193 193 # a list of (ui, repo) functions called for wire peer initialization
194 194 wirepeersetupfuncs = []
195 195
196 196
197 197 def _setup_repo_or_peer(ui, obj, presetupfuncs=None):
198 198 ui = getattr(obj, "ui", ui)
199 199 for f in presetupfuncs or []:
200 200 f(ui, obj)
201 201 ui.log(b'extension', b'- executing reposetup hooks\n')
202 202 with util.timedcm('all reposetup') as allreposetupstats:
203 203 for name, module in extensions.extensions(ui):
204 204 ui.log(b'extension', b' - running reposetup for %s\n', name)
205 205 hook = getattr(module, 'reposetup', None)
206 206 if hook:
207 207 with util.timedcm('reposetup %r', name) as stats:
208 208 hook(ui, obj)
209 209 msg = b' > reposetup for %s took %s\n'
210 210 ui.log(b'extension', msg, name, stats)
211 211 ui.log(b'extension', b'> all reposetup took %s\n', allreposetupstats)
212 212 if not obj.local():
213 213 for f in wirepeersetupfuncs:
214 214 f(ui, obj)
215 215
216 216
217 217 def repository(
218 218 ui,
219 219 path=b'',
220 220 create=False,
221 221 presetupfuncs=None,
222 222 intents=None,
223 223 createopts=None,
224 224 ):
225 225 """return a repository object for the specified path"""
226 226 scheme = urlutil.url(path).scheme
227 227 if scheme is None:
228 228 scheme = b'file'
229 229 cls = repo_schemes.get(scheme)
230 230 if cls is None:
231 231 if scheme in peer_schemes:
232 232 raise error.Abort(_(b"repository '%s' is not local") % path)
233 233 cls = LocalFactory
234 234 repo = cls.instance(
235 235 ui,
236 236 path,
237 237 create,
238 238 intents=intents,
239 239 createopts=createopts,
240 240 )
241 241 _setup_repo_or_peer(ui, repo, presetupfuncs=presetupfuncs)
242 242 return repo.filtered(b'visible')
243 243
244 244
245 def peer(uiorrepo, opts, path, create=False, intents=None, createopts=None):
245 def peer(
246 uiorrepo,
247 opts,
248 path,
249 create=False,
250 intents=None,
251 createopts=None,
252 remotehidden=False,
253 ):
246 254 '''return a repository peer for the specified path'''
247 255 ui = getattr(uiorrepo, 'ui', uiorrepo)
248 256 rui = remoteui(uiorrepo, opts)
249 257 if util.safehasattr(path, 'url'):
250 258 # this is already a urlutil.path object
251 259 peer_path = path
252 260 else:
253 261 peer_path = urlutil.path(ui, None, rawloc=path, validate_path=False)
254 262 scheme = peer_path.url.scheme # pytype: disable=attribute-error
255 263 if scheme in peer_schemes:
256 264 cls = peer_schemes[scheme]
257 265 peer = cls.make_peer(
258 266 rui,
259 267 peer_path,
260 268 create,
261 269 intents=intents,
262 270 createopts=createopts,
271 remotehidden=remotehidden,
263 272 )
264 273 _setup_repo_or_peer(rui, peer)
265 274 else:
266 275 # this is a repository
267 276 repo_path = peer_path.loc # pytype: disable=attribute-error
268 277 if not repo_path:
269 278 repo_path = peer_path.rawloc # pytype: disable=attribute-error
270 279 repo = repository(
271 280 rui,
272 281 repo_path,
273 282 create,
274 283 intents=intents,
275 284 createopts=createopts,
276 285 )
277 peer = repo.peer(path=peer_path)
286 peer = repo.peer(path=peer_path, remotehidden=remotehidden)
278 287 return peer
279 288
280 289
281 290 def defaultdest(source):
282 291 """return default destination of clone if none is given
283 292
284 293 >>> defaultdest(b'foo')
285 294 'foo'
286 295 >>> defaultdest(b'/foo/bar')
287 296 'bar'
288 297 >>> defaultdest(b'/')
289 298 ''
290 299 >>> defaultdest(b'')
291 300 ''
292 301 >>> defaultdest(b'http://example.org/')
293 302 ''
294 303 >>> defaultdest(b'http://example.org/foo/')
295 304 'foo'
296 305 """
297 306 path = urlutil.url(source).path
298 307 if not path:
299 308 return b''
300 309 return os.path.basename(os.path.normpath(path))
301 310
302 311
303 312 def sharedreposource(repo):
304 313 """Returns repository object for source repository of a shared repo.
305 314
306 315 If repo is not a shared repository, returns None.
307 316 """
308 317 if repo.sharedpath == repo.path:
309 318 return None
310 319
311 320 if util.safehasattr(repo, b'srcrepo') and repo.srcrepo:
312 321 return repo.srcrepo
313 322
314 323 # the sharedpath always ends in the .hg; we want the path to the repo
315 324 source = repo.vfs.split(repo.sharedpath)[0]
316 325 srcurl, branches = urlutil.parseurl(source)
317 326 srcrepo = repository(repo.ui, srcurl)
318 327 repo.srcrepo = srcrepo
319 328 return srcrepo
320 329
321 330
322 331 def share(
323 332 ui,
324 333 source,
325 334 dest=None,
326 335 update=True,
327 336 bookmarks=True,
328 337 defaultpath=None,
329 338 relative=False,
330 339 ):
331 340 '''create a shared repository'''
332 341
333 342 not_local_msg = _(b'can only share local repositories')
334 343 if util.safehasattr(source, 'local'):
335 344 if source.local() is None:
336 345 raise error.Abort(not_local_msg)
337 346 elif not islocal(source):
338 347 # XXX why are we getting bytes here ?
339 348 raise error.Abort(not_local_msg)
340 349
341 350 if not dest:
342 351 dest = defaultdest(source)
343 352 else:
344 353 dest = urlutil.get_clone_path_obj(ui, dest).loc
345 354
346 355 if isinstance(source, bytes):
347 356 source_path = urlutil.get_clone_path_obj(ui, source)
348 357 srcrepo = repository(ui, source_path.loc)
349 358 branches = (source_path.branch, [])
350 359 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
351 360 else:
352 361 srcrepo = source.local()
353 362 checkout = None
354 363
355 364 shareditems = set()
356 365 if bookmarks:
357 366 shareditems.add(sharedbookmarks)
358 367
359 368 r = repository(
360 369 ui,
361 370 dest,
362 371 create=True,
363 372 createopts={
364 373 b'sharedrepo': srcrepo,
365 374 b'sharedrelative': relative,
366 375 b'shareditems': shareditems,
367 376 },
368 377 )
369 378
370 379 postshare(srcrepo, r, defaultpath=defaultpath)
371 380 r = repository(ui, dest)
372 381 _postshareupdate(r, update, checkout=checkout)
373 382 return r
374 383
375 384
376 385 def _prependsourcehgrc(repo):
377 386 """copies the source repo config and prepend it in current repo .hg/hgrc
378 387 on unshare. This is only done if the share was perfomed using share safe
379 388 method where we share config of source in shares"""
380 389 srcvfs = vfsmod.vfs(repo.sharedpath)
381 390 dstvfs = vfsmod.vfs(repo.path)
382 391
383 392 if not srcvfs.exists(b'hgrc'):
384 393 return
385 394
386 395 currentconfig = b''
387 396 if dstvfs.exists(b'hgrc'):
388 397 currentconfig = dstvfs.read(b'hgrc')
389 398
390 399 with dstvfs(b'hgrc', b'wb') as fp:
391 400 sourceconfig = srcvfs.read(b'hgrc')
392 401 fp.write(b"# Config copied from shared source\n")
393 402 fp.write(sourceconfig)
394 403 fp.write(b'\n')
395 404 fp.write(currentconfig)
396 405
397 406
398 407 def unshare(ui, repo):
399 408 """convert a shared repository to a normal one
400 409
401 410 Copy the store data to the repo and remove the sharedpath data.
402 411
403 412 Returns a new repository object representing the unshared repository.
404 413
405 414 The passed repository object is not usable after this function is
406 415 called.
407 416 """
408 417
409 418 with repo.lock():
410 419 # we use locks here because if we race with commit, we
411 420 # can end up with extra data in the cloned revlogs that's
412 421 # not pointed to by changesets, thus causing verify to
413 422 # fail
414 423 destlock = copystore(ui, repo, repo.path)
415 424 with destlock or util.nullcontextmanager():
416 425 if requirements.SHARESAFE_REQUIREMENT in repo.requirements:
417 426 # we were sharing .hg/hgrc of the share source with the current
418 427 # repo. We need to copy that while unsharing otherwise it can
419 428 # disable hooks and other checks
420 429 _prependsourcehgrc(repo)
421 430
422 431 sharefile = repo.vfs.join(b'sharedpath')
423 432 util.rename(sharefile, sharefile + b'.old')
424 433
425 434 repo.requirements.discard(requirements.SHARED_REQUIREMENT)
426 435 repo.requirements.discard(requirements.RELATIVE_SHARED_REQUIREMENT)
427 436 scmutil.writereporequirements(repo)
428 437
429 438 # Removing share changes some fundamental properties of the repo instance.
430 439 # So we instantiate a new repo object and operate on it rather than
431 440 # try to keep the existing repo usable.
432 441 newrepo = repository(repo.baseui, repo.root, create=False)
433 442
434 443 # TODO: figure out how to access subrepos that exist, but were previously
435 444 # removed from .hgsub
436 445 c = newrepo[b'.']
437 446 subs = c.substate
438 447 for s in sorted(subs):
439 448 c.sub(s).unshare()
440 449
441 450 localrepo.poisonrepository(repo)
442 451
443 452 return newrepo
444 453
445 454
446 455 def postshare(sourcerepo, destrepo, defaultpath=None):
447 456 """Called after a new shared repo is created.
448 457
449 458 The new repo only has a requirements file and pointer to the source.
450 459 This function configures additional shared data.
451 460
452 461 Extensions can wrap this function and write additional entries to
453 462 destrepo/.hg/shared to indicate additional pieces of data to be shared.
454 463 """
455 464 default = defaultpath or sourcerepo.ui.config(b'paths', b'default')
456 465 if default:
457 466 template = b'[paths]\ndefault = %s\n'
458 467 destrepo.vfs.write(b'hgrc', util.tonativeeol(template % default))
459 468 if requirements.NARROW_REQUIREMENT in sourcerepo.requirements:
460 469 with destrepo.wlock(), destrepo.lock(), destrepo.transaction(
461 470 b"narrow-share"
462 471 ):
463 472 narrowspec.copytoworkingcopy(destrepo)
464 473
465 474
466 475 def _postshareupdate(repo, update, checkout=None):
467 476 """Maybe perform a working directory update after a shared repo is created.
468 477
469 478 ``update`` can be a boolean or a revision to update to.
470 479 """
471 480 if not update:
472 481 return
473 482
474 483 repo.ui.status(_(b"updating working directory\n"))
475 484 if update is not True:
476 485 checkout = update
477 486 for test in (checkout, b'default', b'tip'):
478 487 if test is None:
479 488 continue
480 489 try:
481 490 uprev = repo.lookup(test)
482 491 break
483 492 except error.RepoLookupError:
484 493 continue
485 494 _update(repo, uprev)
486 495
487 496
488 497 def copystore(ui, srcrepo, destpath):
489 498 """copy files from store of srcrepo in destpath
490 499
491 500 returns destlock
492 501 """
493 502 destlock = None
494 503 try:
495 504 hardlink = None
496 505 topic = _(b'linking') if hardlink else _(b'copying')
497 506 with ui.makeprogress(topic, unit=_(b'files')) as progress:
498 507 num = 0
499 508 srcpublishing = srcrepo.publishing()
500 509 srcvfs = vfsmod.vfs(srcrepo.sharedpath)
501 510 dstvfs = vfsmod.vfs(destpath)
502 511 for f in srcrepo.store.copylist():
503 512 if srcpublishing and f.endswith(b'phaseroots'):
504 513 continue
505 514 dstbase = os.path.dirname(f)
506 515 if dstbase and not dstvfs.exists(dstbase):
507 516 dstvfs.mkdir(dstbase)
508 517 if srcvfs.exists(f):
509 518 if f.endswith(b'data'):
510 519 # 'dstbase' may be empty (e.g. revlog format 0)
511 520 lockfile = os.path.join(dstbase, b"lock")
512 521 # lock to avoid premature writing to the target
513 522 destlock = lock.lock(dstvfs, lockfile)
514 523 hardlink, n = util.copyfiles(
515 524 srcvfs.join(f), dstvfs.join(f), hardlink, progress
516 525 )
517 526 num += n
518 527 if hardlink:
519 528 ui.debug(b"linked %d files\n" % num)
520 529 else:
521 530 ui.debug(b"copied %d files\n" % num)
522 531 return destlock
523 532 except: # re-raises
524 533 release(destlock)
525 534 raise
526 535
527 536
528 537 def clonewithshare(
529 538 ui,
530 539 peeropts,
531 540 sharepath,
532 541 source,
533 542 srcpeer,
534 543 dest,
535 544 pull=False,
536 545 rev=None,
537 546 update=True,
538 547 stream=False,
539 548 ):
540 549 """Perform a clone using a shared repo.
541 550
542 551 The store for the repository will be located at <sharepath>/.hg. The
543 552 specified revisions will be cloned or pulled from "source". A shared repo
544 553 will be created at "dest" and a working copy will be created if "update" is
545 554 True.
546 555 """
547 556 revs = None
548 557 if rev:
549 558 if not srcpeer.capable(b'lookup'):
550 559 raise error.Abort(
551 560 _(
552 561 b"src repository does not support "
553 562 b"revision lookup and so doesn't "
554 563 b"support clone by revision"
555 564 )
556 565 )
557 566
558 567 # TODO this is batchable.
559 568 remoterevs = []
560 569 for r in rev:
561 570 with srcpeer.commandexecutor() as e:
562 571 remoterevs.append(
563 572 e.callcommand(
564 573 b'lookup',
565 574 {
566 575 b'key': r,
567 576 },
568 577 ).result()
569 578 )
570 579 revs = remoterevs
571 580
572 581 # Obtain a lock before checking for or cloning the pooled repo otherwise
573 582 # 2 clients may race creating or populating it.
574 583 pooldir = os.path.dirname(sharepath)
575 584 # lock class requires the directory to exist.
576 585 try:
577 586 util.makedir(pooldir, False)
578 587 except FileExistsError:
579 588 pass
580 589
581 590 poolvfs = vfsmod.vfs(pooldir)
582 591 basename = os.path.basename(sharepath)
583 592
584 593 with lock.lock(poolvfs, b'%s.lock' % basename):
585 594 if os.path.exists(sharepath):
586 595 ui.status(
587 596 _(b'(sharing from existing pooled repository %s)\n') % basename
588 597 )
589 598 else:
590 599 ui.status(
591 600 _(b'(sharing from new pooled repository %s)\n') % basename
592 601 )
593 602 # Always use pull mode because hardlinks in share mode don't work
594 603 # well. Never update because working copies aren't necessary in
595 604 # share mode.
596 605 clone(
597 606 ui,
598 607 peeropts,
599 608 source,
600 609 dest=sharepath,
601 610 pull=True,
602 611 revs=rev,
603 612 update=False,
604 613 stream=stream,
605 614 )
606 615
607 616 # Resolve the value to put in [paths] section for the source.
608 617 if islocal(source):
609 618 defaultpath = util.abspath(urlutil.urllocalpath(source))
610 619 else:
611 620 defaultpath = source
612 621
613 622 sharerepo = repository(ui, path=sharepath)
614 623 destrepo = share(
615 624 ui,
616 625 sharerepo,
617 626 dest=dest,
618 627 update=False,
619 628 bookmarks=False,
620 629 defaultpath=defaultpath,
621 630 )
622 631
623 632 # We need to perform a pull against the dest repo to fetch bookmarks
624 633 # and other non-store data that isn't shared by default. In the case of
625 634 # non-existing shared repo, this means we pull from the remote twice. This
626 635 # is a bit weird. But at the time it was implemented, there wasn't an easy
627 636 # way to pull just non-changegroup data.
628 637 exchange.pull(destrepo, srcpeer, heads=revs)
629 638
630 639 _postshareupdate(destrepo, update)
631 640
632 641 return srcpeer, peer(ui, peeropts, dest)
633 642
634 643
635 644 # Recomputing caches is often slow on big repos, so copy them.
636 645 def _copycache(srcrepo, dstcachedir, fname):
637 646 """copy a cache from srcrepo to destcachedir (if it exists)"""
638 647 srcfname = srcrepo.cachevfs.join(fname)
639 648 dstfname = os.path.join(dstcachedir, fname)
640 649 if os.path.exists(srcfname):
641 650 if not os.path.exists(dstcachedir):
642 651 os.mkdir(dstcachedir)
643 652 util.copyfile(srcfname, dstfname)
644 653
645 654
646 655 def clone(
647 656 ui,
648 657 peeropts,
649 658 source,
650 659 dest=None,
651 660 pull=False,
652 661 revs=None,
653 662 update=True,
654 663 stream=False,
655 664 branch=None,
656 665 shareopts=None,
657 666 storeincludepats=None,
658 667 storeexcludepats=None,
659 668 depth=None,
660 669 ):
661 670 """Make a copy of an existing repository.
662 671
663 672 Create a copy of an existing repository in a new directory. The
664 673 source and destination are URLs, as passed to the repository
665 674 function. Returns a pair of repository peers, the source and
666 675 newly created destination.
667 676
668 677 The location of the source is added to the new repository's
669 678 .hg/hgrc file, as the default to be used for future pulls and
670 679 pushes.
671 680
672 681 If an exception is raised, the partly cloned/updated destination
673 682 repository will be deleted.
674 683
675 684 Arguments:
676 685
677 686 source: repository object or URL
678 687
679 688 dest: URL of destination repository to create (defaults to base
680 689 name of source repository)
681 690
682 691 pull: always pull from source repository, even in local case or if the
683 692 server prefers streaming
684 693
685 694 stream: stream raw data uncompressed from repository (fast over
686 695 LAN, slow over WAN)
687 696
688 697 revs: revision to clone up to (implies pull=True)
689 698
690 699 update: update working directory after clone completes, if
691 700 destination is local repository (True means update to default rev,
692 701 anything else is treated as a revision)
693 702
694 703 branch: branches to clone
695 704
696 705 shareopts: dict of options to control auto sharing behavior. The "pool" key
697 706 activates auto sharing mode and defines the directory for stores. The
698 707 "mode" key determines how to construct the directory name of the shared
699 708 repository. "identity" means the name is derived from the node of the first
700 709 changeset in the repository. "remote" means the name is derived from the
701 710 remote's path/URL. Defaults to "identity."
702 711
703 712 storeincludepats and storeexcludepats: sets of file patterns to include and
704 713 exclude in the repository copy, respectively. If not defined, all files
705 714 will be included (a "full" clone). Otherwise a "narrow" clone containing
706 715 only the requested files will be performed. If ``storeincludepats`` is not
707 716 defined but ``storeexcludepats`` is, ``storeincludepats`` is assumed to be
708 717 ``path:.``. If both are empty sets, no files will be cloned.
709 718 """
710 719
711 720 if isinstance(source, bytes):
712 721 src_path = urlutil.get_clone_path_obj(ui, source)
713 722 if src_path is None:
714 723 srcpeer = peer(ui, peeropts, b'')
715 724 origsource = source = b''
716 725 branches = (None, branch or [])
717 726 else:
718 727 srcpeer = peer(ui, peeropts, src_path)
719 728 origsource = src_path.rawloc
720 729 branches = (src_path.branch, branch or [])
721 730 source = src_path.loc
722 731 else:
723 732 if util.safehasattr(source, 'peer'):
724 733 srcpeer = source.peer() # in case we were called with a localrepo
725 734 else:
726 735 srcpeer = source
727 736 branches = (None, branch or [])
728 737 # XXX path: simply use the peer `path` object when this become available
729 738 origsource = source = srcpeer.url()
730 739 srclock = destlock = destwlock = cleandir = None
731 740 destpeer = None
732 741 try:
733 742 revs, checkout = addbranchrevs(srcpeer, srcpeer, branches, revs)
734 743
735 744 if dest is None:
736 745 dest = defaultdest(source)
737 746 if dest:
738 747 ui.status(_(b"destination directory: %s\n") % dest)
739 748 else:
740 749 dest_path = urlutil.get_clone_path_obj(ui, dest)
741 750 if dest_path is not None:
742 751 dest = dest_path.rawloc
743 752 else:
744 753 dest = b''
745 754
746 755 dest = urlutil.urllocalpath(dest)
747 756 source = urlutil.urllocalpath(source)
748 757
749 758 if not dest:
750 759 raise error.InputError(_(b"empty destination path is not valid"))
751 760
752 761 destvfs = vfsmod.vfs(dest, expandpath=True)
753 762 if destvfs.lexists():
754 763 if not destvfs.isdir():
755 764 raise error.InputError(
756 765 _(b"destination '%s' already exists") % dest
757 766 )
758 767 elif destvfs.listdir():
759 768 raise error.InputError(
760 769 _(b"destination '%s' is not empty") % dest
761 770 )
762 771
763 772 createopts = {}
764 773 narrow = False
765 774
766 775 if storeincludepats is not None:
767 776 narrowspec.validatepatterns(storeincludepats)
768 777 narrow = True
769 778
770 779 if storeexcludepats is not None:
771 780 narrowspec.validatepatterns(storeexcludepats)
772 781 narrow = True
773 782
774 783 if narrow:
775 784 # Include everything by default if only exclusion patterns defined.
776 785 if storeexcludepats and not storeincludepats:
777 786 storeincludepats = {b'path:.'}
778 787
779 788 createopts[b'narrowfiles'] = True
780 789
781 790 if depth:
782 791 createopts[b'shallowfilestore'] = True
783 792
784 793 if srcpeer.capable(b'lfs-serve'):
785 794 # Repository creation honors the config if it disabled the extension, so
786 795 # we can't just announce that lfs will be enabled. This check avoids
787 796 # saying that lfs will be enabled, and then saying it's an unknown
788 797 # feature. The lfs creation option is set in either case so that a
789 798 # requirement is added. If the extension is explicitly disabled but the
790 799 # requirement is set, the clone aborts early, before transferring any
791 800 # data.
792 801 createopts[b'lfs'] = True
793 802
794 803 if b'lfs' in extensions.disabled():
795 804 ui.status(
796 805 _(
797 806 b'(remote is using large file support (lfs), but it is '
798 807 b'explicitly disabled in the local configuration)\n'
799 808 )
800 809 )
801 810 else:
802 811 ui.status(
803 812 _(
804 813 b'(remote is using large file support (lfs); lfs will '
805 814 b'be enabled for this repository)\n'
806 815 )
807 816 )
808 817
809 818 shareopts = shareopts or {}
810 819 sharepool = shareopts.get(b'pool')
811 820 sharenamemode = shareopts.get(b'mode')
812 821 if sharepool and islocal(dest):
813 822 sharepath = None
814 823 if sharenamemode == b'identity':
815 824 # Resolve the name from the initial changeset in the remote
816 825 # repository. This returns nullid when the remote is empty. It
817 826 # raises RepoLookupError if revision 0 is filtered or otherwise
818 827 # not available. If we fail to resolve, sharing is not enabled.
819 828 try:
820 829 with srcpeer.commandexecutor() as e:
821 830 rootnode = e.callcommand(
822 831 b'lookup',
823 832 {
824 833 b'key': b'0',
825 834 },
826 835 ).result()
827 836
828 837 if rootnode != sha1nodeconstants.nullid:
829 838 sharepath = os.path.join(sharepool, hex(rootnode))
830 839 else:
831 840 ui.status(
832 841 _(
833 842 b'(not using pooled storage: '
834 843 b'remote appears to be empty)\n'
835 844 )
836 845 )
837 846 except error.RepoLookupError:
838 847 ui.status(
839 848 _(
840 849 b'(not using pooled storage: '
841 850 b'unable to resolve identity of remote)\n'
842 851 )
843 852 )
844 853 elif sharenamemode == b'remote':
845 854 sharepath = os.path.join(
846 855 sharepool, hex(hashutil.sha1(source).digest())
847 856 )
848 857 else:
849 858 raise error.Abort(
850 859 _(b'unknown share naming mode: %s') % sharenamemode
851 860 )
852 861
853 862 # TODO this is a somewhat arbitrary restriction.
854 863 if narrow:
855 864 ui.status(
856 865 _(b'(pooled storage not supported for narrow clones)\n')
857 866 )
858 867 sharepath = None
859 868
860 869 if sharepath:
861 870 return clonewithshare(
862 871 ui,
863 872 peeropts,
864 873 sharepath,
865 874 source,
866 875 srcpeer,
867 876 dest,
868 877 pull=pull,
869 878 rev=revs,
870 879 update=update,
871 880 stream=stream,
872 881 )
873 882
874 883 srcrepo = srcpeer.local()
875 884
876 885 abspath = origsource
877 886 if islocal(origsource):
878 887 abspath = util.abspath(urlutil.urllocalpath(origsource))
879 888
880 889 if islocal(dest):
881 890 if os.path.exists(dest):
882 891 # only clean up directories we create ourselves
883 892 hgdir = os.path.realpath(os.path.join(dest, b".hg"))
884 893 cleandir = hgdir
885 894 else:
886 895 cleandir = dest
887 896
888 897 copy = False
889 898 if (
890 899 srcrepo
891 900 and srcrepo.cancopy()
892 901 and islocal(dest)
893 902 and not phases.hassecret(srcrepo)
894 903 ):
895 904 copy = not pull and not revs
896 905
897 906 # TODO this is a somewhat arbitrary restriction.
898 907 if narrow:
899 908 copy = False
900 909
901 910 if copy:
902 911 try:
903 912 # we use a lock here because if we race with commit, we
904 913 # can end up with extra data in the cloned revlogs that's
905 914 # not pointed to by changesets, thus causing verify to
906 915 # fail
907 916 srclock = srcrepo.lock(wait=False)
908 917 except error.LockError:
909 918 copy = False
910 919
911 920 if copy:
912 921 srcrepo.hook(b'preoutgoing', throw=True, source=b'clone')
913 922
914 923 destrootpath = urlutil.urllocalpath(dest)
915 924 dest_reqs = localrepo.clone_requirements(ui, createopts, srcrepo)
916 925 localrepo.createrepository(
917 926 ui,
918 927 destrootpath,
919 928 requirements=dest_reqs,
920 929 )
921 930 destrepo = localrepo.makelocalrepository(ui, destrootpath)
922 931
923 932 destwlock = destrepo.wlock()
924 933 destlock = destrepo.lock()
925 934 from . import streamclone # avoid cycle
926 935
927 936 streamclone.local_copy(srcrepo, destrepo)
928 937
929 938 # we need to re-init the repo after manually copying the data
930 939 # into it
931 940 destpeer = peer(srcrepo, peeropts, dest)
932 941
933 942 # make the peer aware that is it already locked
934 943 #
935 944 # important:
936 945 #
937 946 # We still need to release that lock at the end of the function
938 947 destpeer.local()._lockref = weakref.ref(destlock)
939 948 destpeer.local()._wlockref = weakref.ref(destwlock)
940 949 # dirstate also needs to be copied because `_wlockref` has a reference
941 950 # to it: this dirstate is saved to disk when the wlock is released
942 951 destpeer.local().dirstate = destrepo.dirstate
943 952
944 953 srcrepo.hook(
945 954 b'outgoing', source=b'clone', node=srcrepo.nodeconstants.nullhex
946 955 )
947 956 else:
948 957 try:
949 958 # only pass ui when no srcrepo
950 959 destpeer = peer(
951 960 srcrepo or ui,
952 961 peeropts,
953 962 dest,
954 963 create=True,
955 964 createopts=createopts,
956 965 )
957 966 except FileExistsError:
958 967 cleandir = None
959 968 raise error.Abort(_(b"destination '%s' already exists") % dest)
960 969
961 970 if revs:
962 971 if not srcpeer.capable(b'lookup'):
963 972 raise error.Abort(
964 973 _(
965 974 b"src repository does not support "
966 975 b"revision lookup and so doesn't "
967 976 b"support clone by revision"
968 977 )
969 978 )
970 979
971 980 # TODO this is batchable.
972 981 remoterevs = []
973 982 for rev in revs:
974 983 with srcpeer.commandexecutor() as e:
975 984 remoterevs.append(
976 985 e.callcommand(
977 986 b'lookup',
978 987 {
979 988 b'key': rev,
980 989 },
981 990 ).result()
982 991 )
983 992 revs = remoterevs
984 993
985 994 checkout = revs[0]
986 995 else:
987 996 revs = None
988 997 local = destpeer.local()
989 998 if local:
990 999 if narrow:
991 1000 with local.wlock(), local.lock(), local.transaction(
992 1001 b'narrow-clone'
993 1002 ):
994 1003 local.setnarrowpats(storeincludepats, storeexcludepats)
995 1004 narrowspec.copytoworkingcopy(local)
996 1005
997 1006 u = urlutil.url(abspath)
998 1007 defaulturl = bytes(u)
999 1008 local.ui.setconfig(b'paths', b'default', defaulturl, b'clone')
1000 1009 if not stream:
1001 1010 if pull:
1002 1011 stream = False
1003 1012 else:
1004 1013 stream = None
1005 1014 # internal config: ui.quietbookmarkmove
1006 1015 overrides = {(b'ui', b'quietbookmarkmove'): True}
1007 1016 with local.ui.configoverride(overrides, b'clone'):
1008 1017 exchange.pull(
1009 1018 local,
1010 1019 srcpeer,
1011 1020 heads=revs,
1012 1021 streamclonerequested=stream,
1013 1022 includepats=storeincludepats,
1014 1023 excludepats=storeexcludepats,
1015 1024 depth=depth,
1016 1025 )
1017 1026 elif srcrepo:
1018 1027 # TODO lift restriction once exchange.push() accepts narrow
1019 1028 # push.
1020 1029 if narrow:
1021 1030 raise error.Abort(
1022 1031 _(
1023 1032 b'narrow clone not available for '
1024 1033 b'remote destinations'
1025 1034 )
1026 1035 )
1027 1036
1028 1037 exchange.push(
1029 1038 srcrepo,
1030 1039 destpeer,
1031 1040 revs=revs,
1032 1041 bookmarks=srcrepo._bookmarks.keys(),
1033 1042 )
1034 1043 else:
1035 1044 raise error.Abort(
1036 1045 _(b"clone from remote to remote not supported")
1037 1046 )
1038 1047
1039 1048 cleandir = None
1040 1049
1041 1050 destrepo = destpeer.local()
1042 1051 if destrepo:
1043 1052 template = uimod.samplehgrcs[b'cloned']
1044 1053 u = urlutil.url(abspath)
1045 1054 u.passwd = None
1046 1055 defaulturl = bytes(u)
1047 1056 destrepo.vfs.write(b'hgrc', util.tonativeeol(template % defaulturl))
1048 1057 destrepo.ui.setconfig(b'paths', b'default', defaulturl, b'clone')
1049 1058
1050 1059 if ui.configbool(b'experimental', b'remotenames'):
1051 1060 logexchange.pullremotenames(destrepo, srcpeer)
1052 1061
1053 1062 if update:
1054 1063 if update is not True:
1055 1064 with srcpeer.commandexecutor() as e:
1056 1065 checkout = e.callcommand(
1057 1066 b'lookup',
1058 1067 {
1059 1068 b'key': update,
1060 1069 },
1061 1070 ).result()
1062 1071
1063 1072 uprev = None
1064 1073 status = None
1065 1074 if checkout is not None:
1066 1075 # Some extensions (at least hg-git and hg-subversion) have
1067 1076 # a peer.lookup() implementation that returns a name instead
1068 1077 # of a nodeid. We work around it here until we've figured
1069 1078 # out a better solution.
1070 1079 if len(checkout) == 20 and checkout in destrepo:
1071 1080 uprev = checkout
1072 1081 elif scmutil.isrevsymbol(destrepo, checkout):
1073 1082 uprev = scmutil.revsymbol(destrepo, checkout).node()
1074 1083 else:
1075 1084 if update is not True:
1076 1085 try:
1077 1086 uprev = destrepo.lookup(update)
1078 1087 except error.RepoLookupError:
1079 1088 pass
1080 1089 if uprev is None:
1081 1090 try:
1082 1091 if destrepo._activebookmark:
1083 1092 uprev = destrepo.lookup(destrepo._activebookmark)
1084 1093 update = destrepo._activebookmark
1085 1094 else:
1086 1095 uprev = destrepo._bookmarks[b'@']
1087 1096 update = b'@'
1088 1097 bn = destrepo[uprev].branch()
1089 1098 if bn == b'default':
1090 1099 status = _(b"updating to bookmark %s\n" % update)
1091 1100 else:
1092 1101 status = (
1093 1102 _(b"updating to bookmark %s on branch %s\n")
1094 1103 ) % (update, bn)
1095 1104 except KeyError:
1096 1105 try:
1097 1106 uprev = destrepo.branchtip(b'default')
1098 1107 except error.RepoLookupError:
1099 1108 uprev = destrepo.lookup(b'tip')
1100 1109 if not status:
1101 1110 bn = destrepo[uprev].branch()
1102 1111 status = _(b"updating to branch %s\n") % bn
1103 1112 destrepo.ui.status(status)
1104 1113 _update(destrepo, uprev)
1105 1114 if update in destrepo._bookmarks:
1106 1115 bookmarks.activate(destrepo, update)
1107 1116 if destlock is not None:
1108 1117 release(destlock)
1109 1118 if destwlock is not None:
1110 1119 release(destlock)
1111 1120 # here is a tiny windows were someone could end up writing the
1112 1121 # repository before the cache are sure to be warm. This is "fine"
1113 1122 # as the only "bad" outcome would be some slowness. That potential
1114 1123 # slowness already affect reader.
1115 1124 with destrepo.lock():
1116 1125 destrepo.updatecaches(caches=repositorymod.CACHES_POST_CLONE)
1117 1126 finally:
1118 1127 release(srclock, destlock, destwlock)
1119 1128 if cleandir is not None:
1120 1129 shutil.rmtree(cleandir, True)
1121 1130 if srcpeer is not None:
1122 1131 srcpeer.close()
1123 1132 if destpeer and destpeer.local() is None:
1124 1133 destpeer.close()
1125 1134 return srcpeer, destpeer
1126 1135
1127 1136
1128 1137 def _showstats(repo, stats, quietempty=False):
1129 1138 if quietempty and stats.isempty():
1130 1139 return
1131 1140 repo.ui.status(
1132 1141 _(
1133 1142 b"%d files updated, %d files merged, "
1134 1143 b"%d files removed, %d files unresolved\n"
1135 1144 )
1136 1145 % (
1137 1146 stats.updatedcount,
1138 1147 stats.mergedcount,
1139 1148 stats.removedcount,
1140 1149 stats.unresolvedcount,
1141 1150 )
1142 1151 )
1143 1152
1144 1153
1145 1154 def updaterepo(repo, node, overwrite, updatecheck=None):
1146 1155 """Update the working directory to node.
1147 1156
1148 1157 When overwrite is set, changes are clobbered, merged else
1149 1158
1150 1159 returns stats (see pydoc mercurial.merge.applyupdates)"""
1151 1160 repo.ui.deprecwarn(
1152 1161 b'prefer merge.update() or merge.clean_update() over hg.updaterepo()',
1153 1162 b'5.7',
1154 1163 )
1155 1164 return mergemod._update(
1156 1165 repo,
1157 1166 node,
1158 1167 branchmerge=False,
1159 1168 force=overwrite,
1160 1169 labels=[b'working copy', b'destination'],
1161 1170 updatecheck=updatecheck,
1162 1171 )
1163 1172
1164 1173
1165 1174 def update(repo, node, quietempty=False, updatecheck=None):
1166 1175 """update the working directory to node"""
1167 1176 stats = mergemod.update(repo[node], updatecheck=updatecheck)
1168 1177 _showstats(repo, stats, quietempty)
1169 1178 if stats.unresolvedcount:
1170 1179 repo.ui.status(_(b"use 'hg resolve' to retry unresolved file merges\n"))
1171 1180 return stats.unresolvedcount > 0
1172 1181
1173 1182
1174 1183 # naming conflict in clone()
1175 1184 _update = update
1176 1185
1177 1186
1178 1187 def clean(repo, node, show_stats=True, quietempty=False):
1179 1188 """forcibly switch the working directory to node, clobbering changes"""
1180 1189 stats = mergemod.clean_update(repo[node])
1181 1190 assert stats.unresolvedcount == 0
1182 1191 if show_stats:
1183 1192 _showstats(repo, stats, quietempty)
1184 1193 return False
1185 1194
1186 1195
1187 1196 # naming conflict in updatetotally()
1188 1197 _clean = clean
1189 1198
1190 1199 _VALID_UPDATECHECKS = {
1191 1200 mergemod.UPDATECHECK_ABORT,
1192 1201 mergemod.UPDATECHECK_NONE,
1193 1202 mergemod.UPDATECHECK_LINEAR,
1194 1203 mergemod.UPDATECHECK_NO_CONFLICT,
1195 1204 }
1196 1205
1197 1206
1198 1207 def updatetotally(ui, repo, checkout, brev, clean=False, updatecheck=None):
1199 1208 """Update the working directory with extra care for non-file components
1200 1209
1201 1210 This takes care of non-file components below:
1202 1211
1203 1212 :bookmark: might be advanced or (in)activated
1204 1213
1205 1214 This takes arguments below:
1206 1215
1207 1216 :checkout: to which revision the working directory is updated
1208 1217 :brev: a name, which might be a bookmark to be activated after updating
1209 1218 :clean: whether changes in the working directory can be discarded
1210 1219 :updatecheck: how to deal with a dirty working directory
1211 1220
1212 1221 Valid values for updatecheck are the UPDATECHECK_* constants
1213 1222 defined in the merge module. Passing `None` will result in using the
1214 1223 configured default.
1215 1224
1216 1225 * ABORT: abort if the working directory is dirty
1217 1226 * NONE: don't check (merge working directory changes into destination)
1218 1227 * LINEAR: check that update is linear before merging working directory
1219 1228 changes into destination
1220 1229 * NO_CONFLICT: check that the update does not result in file merges
1221 1230
1222 1231 This returns whether conflict is detected at updating or not.
1223 1232 """
1224 1233 if updatecheck is None:
1225 1234 updatecheck = ui.config(b'commands', b'update.check')
1226 1235 if updatecheck not in _VALID_UPDATECHECKS:
1227 1236 # If not configured, or invalid value configured
1228 1237 updatecheck = mergemod.UPDATECHECK_LINEAR
1229 1238 if updatecheck not in _VALID_UPDATECHECKS:
1230 1239 raise ValueError(
1231 1240 r'Invalid updatecheck value %r (can accept %r)'
1232 1241 % (updatecheck, _VALID_UPDATECHECKS)
1233 1242 )
1234 1243 with repo.wlock():
1235 1244 movemarkfrom = None
1236 1245 warndest = False
1237 1246 if checkout is None:
1238 1247 updata = destutil.destupdate(repo, clean=clean)
1239 1248 checkout, movemarkfrom, brev = updata
1240 1249 warndest = True
1241 1250
1242 1251 if clean:
1243 1252 ret = _clean(repo, checkout)
1244 1253 else:
1245 1254 if updatecheck == mergemod.UPDATECHECK_ABORT:
1246 1255 cmdutil.bailifchanged(repo, merge=False)
1247 1256 updatecheck = mergemod.UPDATECHECK_NONE
1248 1257 ret = _update(repo, checkout, updatecheck=updatecheck)
1249 1258
1250 1259 if not ret and movemarkfrom:
1251 1260 if movemarkfrom == repo[b'.'].node():
1252 1261 pass # no-op update
1253 1262 elif bookmarks.update(repo, [movemarkfrom], repo[b'.'].node()):
1254 1263 b = ui.label(repo._activebookmark, b'bookmarks.active')
1255 1264 ui.status(_(b"updating bookmark %s\n") % b)
1256 1265 else:
1257 1266 # this can happen with a non-linear update
1258 1267 b = ui.label(repo._activebookmark, b'bookmarks')
1259 1268 ui.status(_(b"(leaving bookmark %s)\n") % b)
1260 1269 bookmarks.deactivate(repo)
1261 1270 elif brev in repo._bookmarks:
1262 1271 if brev != repo._activebookmark:
1263 1272 b = ui.label(brev, b'bookmarks.active')
1264 1273 ui.status(_(b"(activating bookmark %s)\n") % b)
1265 1274 bookmarks.activate(repo, brev)
1266 1275 elif brev:
1267 1276 if repo._activebookmark:
1268 1277 b = ui.label(repo._activebookmark, b'bookmarks')
1269 1278 ui.status(_(b"(leaving bookmark %s)\n") % b)
1270 1279 bookmarks.deactivate(repo)
1271 1280
1272 1281 if warndest:
1273 1282 destutil.statusotherdests(ui, repo)
1274 1283
1275 1284 return ret
1276 1285
1277 1286
1278 1287 def merge(
1279 1288 ctx,
1280 1289 force=False,
1281 1290 remind=True,
1282 1291 labels=None,
1283 1292 ):
1284 1293 """Branch merge with node, resolving changes. Return true if any
1285 1294 unresolved conflicts."""
1286 1295 repo = ctx.repo()
1287 1296 stats = mergemod.merge(ctx, force=force, labels=labels)
1288 1297 _showstats(repo, stats)
1289 1298 if stats.unresolvedcount:
1290 1299 repo.ui.status(
1291 1300 _(
1292 1301 b"use 'hg resolve' to retry unresolved file merges "
1293 1302 b"or 'hg merge --abort' to abandon\n"
1294 1303 )
1295 1304 )
1296 1305 elif remind:
1297 1306 repo.ui.status(_(b"(branch merge, don't forget to commit)\n"))
1298 1307 return stats.unresolvedcount > 0
1299 1308
1300 1309
1301 1310 def abortmerge(ui, repo):
1302 1311 ms = mergestatemod.mergestate.read(repo)
1303 1312 if ms.active():
1304 1313 # there were conflicts
1305 1314 node = ms.localctx.hex()
1306 1315 else:
1307 1316 # there were no conficts, mergestate was not stored
1308 1317 node = repo[b'.'].hex()
1309 1318
1310 1319 repo.ui.status(_(b"aborting the merge, updating back to %s\n") % node[:12])
1311 1320 stats = mergemod.clean_update(repo[node])
1312 1321 assert stats.unresolvedcount == 0
1313 1322 _showstats(repo, stats)
1314 1323
1315 1324
1316 1325 def _incoming(
1317 1326 displaychlist,
1318 1327 subreporecurse,
1319 1328 ui,
1320 1329 repo,
1321 1330 source,
1322 1331 opts,
1323 1332 buffered=False,
1324 1333 subpath=None,
1325 1334 ):
1326 1335 """
1327 1336 Helper for incoming / gincoming.
1328 1337 displaychlist gets called with
1329 1338 (remoterepo, incomingchangesetlist, displayer) parameters,
1330 1339 and is supposed to contain only code that can't be unified.
1331 1340 """
1332 1341 srcs = urlutil.get_pull_paths(repo, ui, [source])
1333 1342 srcs = list(srcs)
1334 1343 if len(srcs) != 1:
1335 1344 msg = _(b'for now, incoming supports only a single source, %d provided')
1336 1345 msg %= len(srcs)
1337 1346 raise error.Abort(msg)
1338 1347 path = srcs[0]
1339 1348 if subpath is None:
1340 1349 peer_path = path
1341 1350 url = path.loc
1342 1351 else:
1343 1352 # XXX path: we are losing the `path` object here. Keeping it would be
1344 1353 # valuable. For example as a "variant" as we do for pushes.
1345 1354 subpath = urlutil.url(subpath)
1346 1355 if subpath.isabs():
1347 1356 peer_path = url = bytes(subpath)
1348 1357 else:
1349 1358 p = urlutil.url(path.loc)
1350 1359 if p.islocal():
1351 1360 normpath = os.path.normpath
1352 1361 else:
1353 1362 normpath = posixpath.normpath
1354 1363 p.path = normpath(b'%s/%s' % (p.path, subpath))
1355 1364 peer_path = url = bytes(p)
1356 1365 other = peer(repo, opts, peer_path)
1357 1366 cleanupfn = other.close
1358 1367 try:
1359 1368 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(url))
1360 1369 branches = (path.branch, opts.get(b'branch', []))
1361 1370 revs, checkout = addbranchrevs(repo, other, branches, opts.get(b'rev'))
1362 1371
1363 1372 if revs:
1364 1373 revs = [other.lookup(rev) for rev in revs]
1365 1374 other, chlist, cleanupfn = bundlerepo.getremotechanges(
1366 1375 ui, repo, other, revs, opts.get(b"bundle"), opts.get(b"force")
1367 1376 )
1368 1377
1369 1378 if not chlist:
1370 1379 ui.status(_(b"no changes found\n"))
1371 1380 return subreporecurse()
1372 1381 ui.pager(b'incoming')
1373 1382 displayer = logcmdutil.changesetdisplayer(
1374 1383 ui, other, opts, buffered=buffered
1375 1384 )
1376 1385 displaychlist(other, chlist, displayer)
1377 1386 displayer.close()
1378 1387 finally:
1379 1388 cleanupfn()
1380 1389 subreporecurse()
1381 1390 return 0 # exit code is zero since we found incoming changes
1382 1391
1383 1392
1384 1393 def incoming(ui, repo, source, opts, subpath=None):
1385 1394 def subreporecurse():
1386 1395 ret = 1
1387 1396 if opts.get(b'subrepos'):
1388 1397 ctx = repo[None]
1389 1398 for subpath in sorted(ctx.substate):
1390 1399 sub = ctx.sub(subpath)
1391 1400 ret = min(ret, sub.incoming(ui, source, opts))
1392 1401 return ret
1393 1402
1394 1403 def display(other, chlist, displayer):
1395 1404 limit = logcmdutil.getlimit(opts)
1396 1405 if opts.get(b'newest_first'):
1397 1406 chlist.reverse()
1398 1407 count = 0
1399 1408 for n in chlist:
1400 1409 if limit is not None and count >= limit:
1401 1410 break
1402 1411 parents = [
1403 1412 p for p in other.changelog.parents(n) if p != repo.nullid
1404 1413 ]
1405 1414 if opts.get(b'no_merges') and len(parents) == 2:
1406 1415 continue
1407 1416 count += 1
1408 1417 displayer.show(other[n])
1409 1418
1410 1419 return _incoming(
1411 1420 display, subreporecurse, ui, repo, source, opts, subpath=subpath
1412 1421 )
1413 1422
1414 1423
1415 1424 def _outgoing(ui, repo, dests, opts, subpath=None):
1416 1425 out = set()
1417 1426 others = []
1418 1427 for path in urlutil.get_push_paths(repo, ui, dests):
1419 1428 dest = path.loc
1420 1429 if subpath is not None:
1421 1430 subpath = urlutil.url(subpath)
1422 1431 if subpath.isabs():
1423 1432 dest = bytes(subpath)
1424 1433 else:
1425 1434 p = urlutil.url(dest)
1426 1435 if p.islocal():
1427 1436 normpath = os.path.normpath
1428 1437 else:
1429 1438 normpath = posixpath.normpath
1430 1439 p.path = normpath(b'%s/%s' % (p.path, subpath))
1431 1440 dest = bytes(p)
1432 1441 branches = path.branch, opts.get(b'branch') or []
1433 1442
1434 1443 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest))
1435 1444 revs, checkout = addbranchrevs(repo, repo, branches, opts.get(b'rev'))
1436 1445 if revs:
1437 1446 revs = [repo[rev].node() for rev in logcmdutil.revrange(repo, revs)]
1438 1447
1439 1448 other = peer(repo, opts, dest)
1440 1449 try:
1441 1450 outgoing = discovery.findcommonoutgoing(
1442 1451 repo, other, revs, force=opts.get(b'force')
1443 1452 )
1444 1453 o = outgoing.missing
1445 1454 out.update(o)
1446 1455 if not o:
1447 1456 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
1448 1457 others.append(other)
1449 1458 except: # re-raises
1450 1459 other.close()
1451 1460 raise
1452 1461 # make sure this is ordered by revision number
1453 1462 outgoing_revs = list(out)
1454 1463 cl = repo.changelog
1455 1464 outgoing_revs.sort(key=cl.rev)
1456 1465 return outgoing_revs, others
1457 1466
1458 1467
1459 1468 def _outgoing_recurse(ui, repo, dests, opts):
1460 1469 ret = 1
1461 1470 if opts.get(b'subrepos'):
1462 1471 ctx = repo[None]
1463 1472 for subpath in sorted(ctx.substate):
1464 1473 sub = ctx.sub(subpath)
1465 1474 ret = min(ret, sub.outgoing(ui, dests, opts))
1466 1475 return ret
1467 1476
1468 1477
1469 1478 def _outgoing_filter(repo, revs, opts):
1470 1479 """apply revision filtering/ordering option for outgoing"""
1471 1480 limit = logcmdutil.getlimit(opts)
1472 1481 no_merges = opts.get(b'no_merges')
1473 1482 if opts.get(b'newest_first'):
1474 1483 revs.reverse()
1475 1484 if limit is None and not no_merges:
1476 1485 for r in revs:
1477 1486 yield r
1478 1487 return
1479 1488
1480 1489 count = 0
1481 1490 cl = repo.changelog
1482 1491 for n in revs:
1483 1492 if limit is not None and count >= limit:
1484 1493 break
1485 1494 parents = [p for p in cl.parents(n) if p != repo.nullid]
1486 1495 if no_merges and len(parents) == 2:
1487 1496 continue
1488 1497 count += 1
1489 1498 yield n
1490 1499
1491 1500
1492 1501 def outgoing(ui, repo, dests, opts, subpath=None):
1493 1502 if opts.get(b'graph'):
1494 1503 logcmdutil.checkunsupportedgraphflags([], opts)
1495 1504 o, others = _outgoing(ui, repo, dests, opts, subpath=subpath)
1496 1505 ret = 1
1497 1506 try:
1498 1507 if o:
1499 1508 ret = 0
1500 1509
1501 1510 if opts.get(b'graph'):
1502 1511 revdag = logcmdutil.graphrevs(repo, o, opts)
1503 1512 ui.pager(b'outgoing')
1504 1513 displayer = logcmdutil.changesetdisplayer(
1505 1514 ui, repo, opts, buffered=True
1506 1515 )
1507 1516 logcmdutil.displaygraph(
1508 1517 ui, repo, revdag, displayer, graphmod.asciiedges
1509 1518 )
1510 1519 else:
1511 1520 ui.pager(b'outgoing')
1512 1521 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
1513 1522 for n in _outgoing_filter(repo, o, opts):
1514 1523 displayer.show(repo[n])
1515 1524 displayer.close()
1516 1525 for oth in others:
1517 1526 cmdutil.outgoinghooks(ui, repo, oth, opts, o)
1518 1527 ret = min(ret, _outgoing_recurse(ui, repo, dests, opts))
1519 1528 return ret # exit code is zero since we found outgoing changes
1520 1529 finally:
1521 1530 for oth in others:
1522 1531 oth.close()
1523 1532
1524 1533
1525 1534 def verify(repo, level=None):
1526 1535 """verify the consistency of a repository"""
1527 1536 ret = verifymod.verify(repo, level=level)
1528 1537
1529 1538 # Broken subrepo references in hidden csets don't seem worth worrying about,
1530 1539 # since they can't be pushed/pulled, and --hidden can be used if they are a
1531 1540 # concern.
1532 1541
1533 1542 # pathto() is needed for -R case
1534 1543 revs = repo.revs(
1535 1544 b"filelog(%s)", util.pathto(repo.root, repo.getcwd(), b'.hgsubstate')
1536 1545 )
1537 1546
1538 1547 if revs:
1539 1548 repo.ui.status(_(b'checking subrepo links\n'))
1540 1549 for rev in revs:
1541 1550 ctx = repo[rev]
1542 1551 try:
1543 1552 for subpath in ctx.substate:
1544 1553 try:
1545 1554 ret = (
1546 1555 ctx.sub(subpath, allowcreate=False).verify() or ret
1547 1556 )
1548 1557 except error.RepoError as e:
1549 1558 repo.ui.warn(b'%d: %s\n' % (rev, e))
1550 1559 except Exception:
1551 1560 repo.ui.warn(
1552 1561 _(b'.hgsubstate is corrupt in revision %s\n')
1553 1562 % short(ctx.node())
1554 1563 )
1555 1564
1556 1565 return ret
1557 1566
1558 1567
1559 1568 def remoteui(src, opts):
1560 1569 """build a remote ui from ui or repo and opts"""
1561 1570 if util.safehasattr(src, b'baseui'): # looks like a repository
1562 1571 dst = src.baseui.copy() # drop repo-specific config
1563 1572 src = src.ui # copy target options from repo
1564 1573 else: # assume it's a global ui object
1565 1574 dst = src.copy() # keep all global options
1566 1575
1567 1576 # copy ssh-specific options
1568 1577 for o in b'ssh', b'remotecmd':
1569 1578 v = opts.get(o) or src.config(b'ui', o)
1570 1579 if v:
1571 1580 dst.setconfig(b"ui", o, v, b'copied')
1572 1581
1573 1582 # copy bundle-specific options
1574 1583 r = src.config(b'bundle', b'mainreporoot')
1575 1584 if r:
1576 1585 dst.setconfig(b'bundle', b'mainreporoot', r, b'copied')
1577 1586
1578 1587 # copy selected local settings to the remote ui
1579 1588 for sect in (b'auth', b'hostfingerprints', b'hostsecurity', b'http_proxy'):
1580 1589 for key, val in src.configitems(sect):
1581 1590 dst.setconfig(sect, key, val, b'copied')
1582 1591 v = src.config(b'web', b'cacerts')
1583 1592 if v:
1584 1593 dst.setconfig(b'web', b'cacerts', util.expandpath(v), b'copied')
1585 1594
1586 1595 return dst
1587 1596
1588 1597
1589 1598 # Files of interest
1590 1599 # Used to check if the repository has changed looking at mtime and size of
1591 1600 # these files.
1592 1601 foi = [
1593 1602 (b'spath', b'00changelog.i'),
1594 1603 (b'spath', b'phaseroots'), # ! phase can change content at the same size
1595 1604 (b'spath', b'obsstore'),
1596 1605 (b'path', b'bookmarks'), # ! bookmark can change content at the same size
1597 1606 ]
1598 1607
1599 1608
1600 1609 class cachedlocalrepo:
1601 1610 """Holds a localrepository that can be cached and reused."""
1602 1611
1603 1612 def __init__(self, repo):
1604 1613 """Create a new cached repo from an existing repo.
1605 1614
1606 1615 We assume the passed in repo was recently created. If the
1607 1616 repo has changed between when it was created and when it was
1608 1617 turned into a cache, it may not refresh properly.
1609 1618 """
1610 1619 assert isinstance(repo, localrepo.localrepository)
1611 1620 self._repo = repo
1612 1621 self._state, self.mtime = self._repostate()
1613 1622 self._filtername = repo.filtername
1614 1623
1615 1624 def fetch(self):
1616 1625 """Refresh (if necessary) and return a repository.
1617 1626
1618 1627 If the cached instance is out of date, it will be recreated
1619 1628 automatically and returned.
1620 1629
1621 1630 Returns a tuple of the repo and a boolean indicating whether a new
1622 1631 repo instance was created.
1623 1632 """
1624 1633 # We compare the mtimes and sizes of some well-known files to
1625 1634 # determine if the repo changed. This is not precise, as mtimes
1626 1635 # are susceptible to clock skew and imprecise filesystems and
1627 1636 # file content can change while maintaining the same size.
1628 1637
1629 1638 state, mtime = self._repostate()
1630 1639 if state == self._state:
1631 1640 return self._repo, False
1632 1641
1633 1642 repo = repository(self._repo.baseui, self._repo.url())
1634 1643 if self._filtername:
1635 1644 self._repo = repo.filtered(self._filtername)
1636 1645 else:
1637 1646 self._repo = repo.unfiltered()
1638 1647 self._state = state
1639 1648 self.mtime = mtime
1640 1649
1641 1650 return self._repo, True
1642 1651
1643 1652 def _repostate(self):
1644 1653 state = []
1645 1654 maxmtime = -1
1646 1655 for attr, fname in foi:
1647 1656 prefix = getattr(self._repo, attr)
1648 1657 p = os.path.join(prefix, fname)
1649 1658 try:
1650 1659 st = os.stat(p)
1651 1660 except OSError:
1652 1661 st = os.stat(prefix)
1653 1662 state.append((st[stat.ST_MTIME], st.st_size))
1654 1663 maxmtime = max(maxmtime, st[stat.ST_MTIME])
1655 1664
1656 1665 return tuple(state), maxmtime
1657 1666
1658 1667 def copy(self):
1659 1668 """Obtain a copy of this class instance.
1660 1669
1661 1670 A new localrepository instance is obtained. The new instance should be
1662 1671 completely independent of the original.
1663 1672 """
1664 1673 repo = repository(self._repo.baseui, self._repo.origroot)
1665 1674 if self._filtername:
1666 1675 repo = repo.filtered(self._filtername)
1667 1676 else:
1668 1677 repo = repo.unfiltered()
1669 1678 c = cachedlocalrepo(repo)
1670 1679 c._state = self._state
1671 1680 c.mtime = self.mtime
1672 1681 return c
@@ -1,640 +1,659 b''
1 1 # httppeer.py - HTTP repository proxy classes for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9
10 10 import errno
11 11 import io
12 12 import os
13 13 import socket
14 14 import struct
15 15
16 16 from concurrent import futures
17 17 from .i18n import _
18 18 from .pycompat import getattr
19 19 from . import (
20 20 bundle2,
21 21 error,
22 22 httpconnection,
23 23 pycompat,
24 24 statichttprepo,
25 25 url as urlmod,
26 26 util,
27 27 wireprotov1peer,
28 28 )
29 29 from .utils import urlutil
30 30
31 31 httplib = util.httplib
32 32 urlerr = util.urlerr
33 33 urlreq = util.urlreq
34 34
35 35
36 36 def encodevalueinheaders(value, header, limit):
37 37 """Encode a string value into multiple HTTP headers.
38 38
39 39 ``value`` will be encoded into 1 or more HTTP headers with the names
40 40 ``header-<N>`` where ``<N>`` is an integer starting at 1. Each header
41 41 name + value will be at most ``limit`` bytes long.
42 42
43 43 Returns an iterable of 2-tuples consisting of header names and
44 44 values as native strings.
45 45 """
46 46 # HTTP Headers are ASCII. Python 3 requires them to be unicodes,
47 47 # not bytes. This function always takes bytes in as arguments.
48 48 fmt = pycompat.strurl(header) + r'-%s'
49 49 # Note: it is *NOT* a bug that the last bit here is a bytestring
50 50 # and not a unicode: we're just getting the encoded length anyway,
51 51 # and using an r-string to make it portable between Python 2 and 3
52 52 # doesn't work because then the \r is a literal backslash-r
53 53 # instead of a carriage return.
54 54 valuelen = limit - len(fmt % '000') - len(b': \r\n')
55 55 result = []
56 56
57 57 n = 0
58 58 for i in range(0, len(value), valuelen):
59 59 n += 1
60 60 result.append((fmt % str(n), pycompat.strurl(value[i : i + valuelen])))
61 61
62 62 return result
63 63
64 64
65 65 class _multifile:
66 66 def __init__(self, *fileobjs):
67 67 for f in fileobjs:
68 68 if not util.safehasattr(f, b'length'):
69 69 raise ValueError(
70 70 b'_multifile only supports file objects that '
71 71 b'have a length but this one does not:',
72 72 type(f),
73 73 f,
74 74 )
75 75 self._fileobjs = fileobjs
76 76 self._index = 0
77 77
78 78 @property
79 79 def length(self):
80 80 return sum(f.length for f in self._fileobjs)
81 81
82 82 def read(self, amt=None):
83 83 if amt <= 0:
84 84 return b''.join(f.read() for f in self._fileobjs)
85 85 parts = []
86 86 while amt and self._index < len(self._fileobjs):
87 87 parts.append(self._fileobjs[self._index].read(amt))
88 88 got = len(parts[-1])
89 89 if got < amt:
90 90 self._index += 1
91 91 amt -= got
92 92 return b''.join(parts)
93 93
94 94 def seek(self, offset, whence=os.SEEK_SET):
95 95 if whence != os.SEEK_SET:
96 96 raise NotImplementedError(
97 97 b'_multifile does not support anything other'
98 98 b' than os.SEEK_SET for whence on seek()'
99 99 )
100 100 if offset != 0:
101 101 raise NotImplementedError(
102 102 b'_multifile only supports seeking to start, but that '
103 103 b'could be fixed if you need it'
104 104 )
105 105 for f in self._fileobjs:
106 106 f.seek(0)
107 107 self._index = 0
108 108
109 109
110 110 def makev1commandrequest(
111 111 ui, requestbuilder, caps, capablefn, repobaseurl, cmd, args
112 112 ):
113 113 """Make an HTTP request to run a command for a version 1 client.
114 114
115 115 ``caps`` is a set of known server capabilities. The value may be
116 116 None if capabilities are not yet known.
117 117
118 118 ``capablefn`` is a function to evaluate a capability.
119 119
120 120 ``cmd``, ``args``, and ``data`` define the command, its arguments, and
121 121 raw data to pass to it.
122 122 """
123 123 if cmd == b'pushkey':
124 124 args[b'data'] = b''
125 125 data = args.pop(b'data', None)
126 126 headers = args.pop(b'headers', {})
127 127
128 128 ui.debug(b"sending %s command\n" % cmd)
129 129 q = [(b'cmd', cmd)]
130 130 headersize = 0
131 131 # Important: don't use self.capable() here or else you end up
132 132 # with infinite recursion when trying to look up capabilities
133 133 # for the first time.
134 134 postargsok = caps is not None and b'httppostargs' in caps
135 135
136 136 # Send arguments via POST.
137 137 if postargsok and args:
138 138 strargs = urlreq.urlencode(sorted(args.items()))
139 139 if not data:
140 140 data = strargs
141 141 else:
142 142 if isinstance(data, bytes):
143 143 i = io.BytesIO(data)
144 144 i.length = len(data)
145 145 data = i
146 146 argsio = io.BytesIO(strargs)
147 147 argsio.length = len(strargs)
148 148 data = _multifile(argsio, data)
149 149 headers['X-HgArgs-Post'] = len(strargs)
150 150 elif args:
151 151 # Calling self.capable() can infinite loop if we are calling
152 152 # "capabilities". But that command should never accept wire
153 153 # protocol arguments. So this should never happen.
154 154 assert cmd != b'capabilities'
155 155 httpheader = capablefn(b'httpheader')
156 156 if httpheader:
157 157 headersize = int(httpheader.split(b',', 1)[0])
158 158
159 159 # Send arguments via HTTP headers.
160 160 if headersize > 0:
161 161 # The headers can typically carry more data than the URL.
162 162 encoded_args = urlreq.urlencode(sorted(args.items()))
163 163 for header, value in encodevalueinheaders(
164 164 encoded_args, b'X-HgArg', headersize
165 165 ):
166 166 headers[header] = value
167 167 # Send arguments via query string (Mercurial <1.9).
168 168 else:
169 169 q += sorted(args.items())
170 170
171 171 qs = b'?%s' % urlreq.urlencode(q)
172 172 cu = b"%s%s" % (repobaseurl, qs)
173 173 size = 0
174 174 if util.safehasattr(data, b'length'):
175 175 size = data.length
176 176 elif data is not None:
177 177 size = len(data)
178 178 if data is not None and 'Content-Type' not in headers:
179 179 headers['Content-Type'] = 'application/mercurial-0.1'
180 180
181 181 # Tell the server we accept application/mercurial-0.2 and multiple
182 182 # compression formats if the server is capable of emitting those
183 183 # payloads.
184 184 # Note: Keep this set empty by default, as client advertisement of
185 185 # protocol parameters should only occur after the handshake.
186 186 protoparams = set()
187 187
188 188 mediatypes = set()
189 189 if caps is not None:
190 190 mt = capablefn(b'httpmediatype')
191 191 if mt:
192 192 protoparams.add(b'0.1')
193 193 mediatypes = set(mt.split(b','))
194 194
195 195 protoparams.add(b'partial-pull')
196 196
197 197 if b'0.2tx' in mediatypes:
198 198 protoparams.add(b'0.2')
199 199
200 200 if b'0.2tx' in mediatypes and capablefn(b'compression'):
201 201 # We /could/ compare supported compression formats and prune
202 202 # non-mutually supported or error if nothing is mutually supported.
203 203 # For now, send the full list to the server and have it error.
204 204 comps = [
205 205 e.wireprotosupport().name
206 206 for e in util.compengines.supportedwireengines(util.CLIENTROLE)
207 207 ]
208 208 protoparams.add(b'comp=%s' % b','.join(comps))
209 209
210 210 if protoparams:
211 211 protoheaders = encodevalueinheaders(
212 212 b' '.join(sorted(protoparams)), b'X-HgProto', headersize or 1024
213 213 )
214 214 for header, value in protoheaders:
215 215 headers[header] = value
216 216
217 217 varyheaders = []
218 218 for header in headers:
219 219 if header.lower().startswith('x-hg'):
220 220 varyheaders.append(header)
221 221
222 222 if varyheaders:
223 223 headers['Vary'] = ','.join(sorted(varyheaders))
224 224
225 225 req = requestbuilder(pycompat.strurl(cu), data, headers)
226 226
227 227 if data is not None:
228 228 ui.debug(b"sending %d bytes\n" % size)
229 229 req.add_unredirected_header('Content-Length', '%d' % size)
230 230
231 231 return req, cu, qs
232 232
233 233
234 234 def sendrequest(ui, opener, req):
235 235 """Send a prepared HTTP request.
236 236
237 237 Returns the response object.
238 238 """
239 239 dbg = ui.debug
240 240 if ui.debugflag and ui.configbool(b'devel', b'debug.peer-request'):
241 241 line = b'devel-peer-request: %s\n'
242 242 dbg(
243 243 line
244 244 % b'%s %s'
245 245 % (
246 246 pycompat.bytesurl(req.get_method()),
247 247 pycompat.bytesurl(req.get_full_url()),
248 248 )
249 249 )
250 250 hgargssize = None
251 251
252 252 for header, value in sorted(req.header_items()):
253 253 header = pycompat.bytesurl(header)
254 254 value = pycompat.bytesurl(value)
255 255 if header.startswith(b'X-hgarg-'):
256 256 if hgargssize is None:
257 257 hgargssize = 0
258 258 hgargssize += len(value)
259 259 else:
260 260 dbg(line % b' %s %s' % (header, value))
261 261
262 262 if hgargssize is not None:
263 263 dbg(
264 264 line
265 265 % b' %d bytes of commands arguments in headers'
266 266 % hgargssize
267 267 )
268 268 data = req.data
269 269 if data is not None:
270 270 length = getattr(data, 'length', None)
271 271 if length is None:
272 272 length = len(data)
273 273 dbg(line % b' %d bytes of data' % length)
274 274
275 275 start = util.timer()
276 276
277 277 res = None
278 278 try:
279 279 res = opener.open(req)
280 280 except urlerr.httperror as inst:
281 281 if inst.code == 401:
282 282 raise error.Abort(_(b'authorization failed'))
283 283 raise
284 284 except httplib.HTTPException as inst:
285 285 ui.debug(
286 286 b'http error requesting %s\n'
287 287 % urlutil.hidepassword(req.get_full_url())
288 288 )
289 289 ui.traceback()
290 290 raise IOError(None, inst)
291 291 finally:
292 292 if ui.debugflag and ui.configbool(b'devel', b'debug.peer-request'):
293 293 code = res.code if res else -1
294 294 dbg(
295 295 line
296 296 % b' finished in %.4f seconds (%d)'
297 297 % (util.timer() - start, code)
298 298 )
299 299
300 300 # Insert error handlers for common I/O failures.
301 301 urlmod.wrapresponse(res)
302 302
303 303 return res
304 304
305 305
306 306 class RedirectedRepoError(error.RepoError):
307 307 def __init__(self, msg, respurl):
308 308 super(RedirectedRepoError, self).__init__(msg)
309 309 self.respurl = respurl
310 310
311 311
312 312 def parsev1commandresponse(ui, baseurl, requrl, qs, resp, compressible):
313 313 # record the url we got redirected to
314 314 redirected = False
315 315 respurl = pycompat.bytesurl(resp.geturl())
316 316 if respurl.endswith(qs):
317 317 respurl = respurl[: -len(qs)]
318 318 qsdropped = False
319 319 else:
320 320 qsdropped = True
321 321
322 322 if baseurl.rstrip(b'/') != respurl.rstrip(b'/'):
323 323 redirected = True
324 324 if not ui.quiet:
325 325 ui.warn(_(b'real URL is %s\n') % respurl)
326 326
327 327 try:
328 328 proto = pycompat.bytesurl(resp.getheader('content-type', ''))
329 329 except AttributeError:
330 330 proto = pycompat.bytesurl(resp.headers.get('content-type', ''))
331 331
332 332 safeurl = urlutil.hidepassword(baseurl)
333 333 if proto.startswith(b'application/hg-error'):
334 334 raise error.OutOfBandError(resp.read())
335 335
336 336 # Pre 1.0 versions of Mercurial used text/plain and
337 337 # application/hg-changegroup. We don't support such old servers.
338 338 if not proto.startswith(b'application/mercurial-'):
339 339 ui.debug(b"requested URL: '%s'\n" % urlutil.hidepassword(requrl))
340 340 msg = _(
341 341 b"'%s' does not appear to be an hg repository:\n"
342 342 b"---%%<--- (%s)\n%s\n---%%<---\n"
343 343 ) % (safeurl, proto or b'no content-type', resp.read(1024))
344 344
345 345 # Some servers may strip the query string from the redirect. We
346 346 # raise a special error type so callers can react to this specially.
347 347 if redirected and qsdropped:
348 348 raise RedirectedRepoError(msg, respurl)
349 349 else:
350 350 raise error.RepoError(msg)
351 351
352 352 try:
353 353 subtype = proto.split(b'-', 1)[1]
354 354
355 355 version_info = tuple([int(n) for n in subtype.split(b'.')])
356 356 except ValueError:
357 357 raise error.RepoError(
358 358 _(b"'%s' sent a broken Content-Type header (%s)") % (safeurl, proto)
359 359 )
360 360
361 361 # TODO consider switching to a decompression reader that uses
362 362 # generators.
363 363 if version_info == (0, 1):
364 364 if compressible:
365 365 resp = util.compengines[b'zlib'].decompressorreader(resp)
366 366
367 367 elif version_info == (0, 2):
368 368 # application/mercurial-0.2 always identifies the compression
369 369 # engine in the payload header.
370 370 elen = struct.unpack(b'B', util.readexactly(resp, 1))[0]
371 371 ename = util.readexactly(resp, elen)
372 372 engine = util.compengines.forwiretype(ename)
373 373
374 374 resp = engine.decompressorreader(resp)
375 375 else:
376 376 raise error.RepoError(
377 377 _(b"'%s' uses newer protocol %s") % (safeurl, subtype)
378 378 )
379 379
380 380 return respurl, proto, resp
381 381
382 382
383 383 class httppeer(wireprotov1peer.wirepeer):
384 def __init__(self, ui, path, url, opener, requestbuilder, caps):
385 super().__init__(ui, path=path)
384 def __init__(
385 self, ui, path, url, opener, requestbuilder, caps, remotehidden=False
386 ):
387 super().__init__(ui, path=path, remotehidden=remotehidden)
388 if remotehidden:
389 msg = _(
390 b"ignoring `--remote-hidden` request\n"
391 b"(access to hidden changeset for http peers not "
392 b"supported yet)\n"
393 )
394 ui.warn(msg)
386 395 self._url = url
387 396 self._caps = caps
388 397 self.limitedarguments = caps is not None and b'httppostargs' not in caps
389 398 self._urlopener = opener
390 399 self._requestbuilder = requestbuilder
391 400
392 401 def __del__(self):
393 402 for h in self._urlopener.handlers:
394 403 h.close()
395 404 getattr(h, "close_all", lambda: None)()
396 405
397 406 # Begin of ipeerconnection interface.
398 407
399 408 def url(self):
400 409 return self.path.loc
401 410
402 411 def local(self):
403 412 return None
404 413
405 414 def canpush(self):
406 415 return True
407 416
408 417 def close(self):
409 418 try:
410 419 reqs, sent, recv = (
411 420 self._urlopener.requestscount,
412 421 self._urlopener.sentbytescount,
413 422 self._urlopener.receivedbytescount,
414 423 )
415 424 except AttributeError:
416 425 return
417 426 self.ui.note(
418 427 _(
419 428 b'(sent %d HTTP requests and %d bytes; '
420 429 b'received %d bytes in responses)\n'
421 430 )
422 431 % (reqs, sent, recv)
423 432 )
424 433
425 434 # End of ipeerconnection interface.
426 435
427 436 # Begin of ipeercommands interface.
428 437
429 438 def capabilities(self):
430 439 return self._caps
431 440
432 441 # End of ipeercommands interface.
433 442
434 443 def _callstream(self, cmd, _compressible=False, **args):
435 444 args = pycompat.byteskwargs(args)
436 445
437 446 req, cu, qs = makev1commandrequest(
438 447 self.ui,
439 448 self._requestbuilder,
440 449 self._caps,
441 450 self.capable,
442 451 self._url,
443 452 cmd,
444 453 args,
445 454 )
446 455
447 456 resp = sendrequest(self.ui, self._urlopener, req)
448 457
449 458 self._url, ct, resp = parsev1commandresponse(
450 459 self.ui, self._url, cu, qs, resp, _compressible
451 460 )
452 461
453 462 return resp
454 463
455 464 def _call(self, cmd, **args):
456 465 fp = self._callstream(cmd, **args)
457 466 try:
458 467 return fp.read()
459 468 finally:
460 469 # if using keepalive, allow connection to be reused
461 470 fp.close()
462 471
463 472 def _callpush(self, cmd, cg, **args):
464 473 # have to stream bundle to a temp file because we do not have
465 474 # http 1.1 chunked transfer.
466 475
467 476 types = self.capable(b'unbundle')
468 477 try:
469 478 types = types.split(b',')
470 479 except AttributeError:
471 480 # servers older than d1b16a746db6 will send 'unbundle' as a
472 481 # boolean capability. They only support headerless/uncompressed
473 482 # bundles.
474 483 types = [b""]
475 484 for x in types:
476 485 if x in bundle2.bundletypes:
477 486 type = x
478 487 break
479 488
480 489 tempname = bundle2.writebundle(self.ui, cg, None, type)
481 490 fp = httpconnection.httpsendfile(self.ui, tempname, b"rb")
482 491 headers = {'Content-Type': 'application/mercurial-0.1'}
483 492
484 493 try:
485 494 r = self._call(cmd, data=fp, headers=headers, **args)
486 495 vals = r.split(b'\n', 1)
487 496 if len(vals) < 2:
488 497 raise error.ResponseError(_(b"unexpected response:"), r)
489 498 return vals
490 499 except urlerr.httperror:
491 500 # Catch and re-raise these so we don't try and treat them
492 501 # like generic socket errors. They lack any values in
493 502 # .args on Python 3 which breaks our socket.error block.
494 503 raise
495 504 except socket.error as err:
496 505 if err.args[0] in (errno.ECONNRESET, errno.EPIPE):
497 506 raise error.Abort(_(b'push failed: %s') % err.args[1])
498 507 raise error.Abort(err.args[1])
499 508 finally:
500 509 fp.close()
501 510 os.unlink(tempname)
502 511
503 512 def _calltwowaystream(self, cmd, fp, **args):
504 513 filename = None
505 514 try:
506 515 # dump bundle to disk
507 516 fd, filename = pycompat.mkstemp(prefix=b"hg-bundle-", suffix=b".hg")
508 517 with os.fdopen(fd, "wb") as fh:
509 518 d = fp.read(4096)
510 519 while d:
511 520 fh.write(d)
512 521 d = fp.read(4096)
513 522 # start http push
514 523 with httpconnection.httpsendfile(self.ui, filename, b"rb") as fp_:
515 524 headers = {'Content-Type': 'application/mercurial-0.1'}
516 525 return self._callstream(cmd, data=fp_, headers=headers, **args)
517 526 finally:
518 527 if filename is not None:
519 528 os.unlink(filename)
520 529
521 530 def _callcompressable(self, cmd, **args):
522 531 return self._callstream(cmd, _compressible=True, **args)
523 532
524 533 def _abort(self, exception):
525 534 raise exception
526 535
527 536
528 537 class queuedcommandfuture(futures.Future):
529 538 """Wraps result() on command futures to trigger submission on call."""
530 539
531 540 def result(self, timeout=None):
532 541 if self.done():
533 542 return futures.Future.result(self, timeout)
534 543
535 544 self._peerexecutor.sendcommands()
536 545
537 546 # sendcommands() will restore the original __class__ and self.result
538 547 # will resolve to Future.result.
539 548 return self.result(timeout)
540 549
541 550
542 551 def performhandshake(ui, url, opener, requestbuilder):
543 552 # The handshake is a request to the capabilities command.
544 553
545 554 caps = None
546 555
547 556 def capable(x):
548 557 raise error.ProgrammingError(b'should not be called')
549 558
550 559 args = {}
551 560
552 561 req, requrl, qs = makev1commandrequest(
553 562 ui, requestbuilder, caps, capable, url, b'capabilities', args
554 563 )
555 564 resp = sendrequest(ui, opener, req)
556 565
557 566 # The server may redirect us to the repo root, stripping the
558 567 # ?cmd=capabilities query string from the URL. The server would likely
559 568 # return HTML in this case and ``parsev1commandresponse()`` would raise.
560 569 # We catch this special case and re-issue the capabilities request against
561 570 # the new URL.
562 571 #
563 572 # We should ideally not do this, as a redirect that drops the query
564 573 # string from the URL is arguably a server bug. (Garbage in, garbage out).
565 574 # However, Mercurial clients for several years appeared to handle this
566 575 # issue without behavior degradation. And according to issue 5860, it may
567 576 # be a longstanding bug in some server implementations. So we allow a
568 577 # redirect that drops the query string to "just work."
569 578 try:
570 579 respurl, ct, resp = parsev1commandresponse(
571 580 ui, url, requrl, qs, resp, compressible=False
572 581 )
573 582 except RedirectedRepoError as e:
574 583 req, requrl, qs = makev1commandrequest(
575 584 ui, requestbuilder, caps, capable, e.respurl, b'capabilities', args
576 585 )
577 586 resp = sendrequest(ui, opener, req)
578 587 respurl, ct, resp = parsev1commandresponse(
579 588 ui, url, requrl, qs, resp, compressible=False
580 589 )
581 590
582 591 try:
583 592 rawdata = resp.read()
584 593 finally:
585 594 resp.close()
586 595
587 596 if not ct.startswith(b'application/mercurial-'):
588 597 raise error.ProgrammingError(b'unexpected content-type: %s' % ct)
589 598
590 599 info = {b'v1capabilities': set(rawdata.split())}
591 600
592 601 return respurl, info
593 602
594 603
595 def _make_peer(ui, path, opener=None, requestbuilder=urlreq.request):
604 def _make_peer(
605 ui, path, opener=None, requestbuilder=urlreq.request, remotehidden=False
606 ):
596 607 """Construct an appropriate HTTP peer instance.
597 608
598 609 ``opener`` is an ``url.opener`` that should be used to establish
599 610 connections, perform HTTP requests.
600 611
601 612 ``requestbuilder`` is the type used for constructing HTTP requests.
602 613 It exists as an argument so extensions can override the default.
603 614 """
604 615 if path.url.query or path.url.fragment:
605 616 msg = _(b'unsupported URL component: "%s"')
606 617 msg %= path.url.query or path.url.fragment
607 618 raise error.Abort(msg)
608 619
609 620 # urllib cannot handle URLs with embedded user or passwd.
610 621 url, authinfo = path.url.authinfo()
611 622 ui.debug(b'using %s\n' % url)
612 623
613 624 opener = opener or urlmod.opener(ui, authinfo)
614 625
615 626 respurl, info = performhandshake(ui, url, opener, requestbuilder)
616 627
617 628 return httppeer(
618 ui, path, respurl, opener, requestbuilder, info[b'v1capabilities']
629 ui,
630 path,
631 respurl,
632 opener,
633 requestbuilder,
634 info[b'v1capabilities'],
635 remotehidden=remotehidden,
619 636 )
620 637
621 638
622 def make_peer(ui, path, create, intents=None, createopts=None):
639 def make_peer(
640 ui, path, create, intents=None, createopts=None, remotehidden=False
641 ):
623 642 if create:
624 643 raise error.Abort(_(b'cannot create new http repository'))
625 644 try:
626 645 if path.url.scheme == b'https' and not urlmod.has_https:
627 646 raise error.Abort(
628 647 _(b'Python support for SSL and HTTPS is not installed')
629 648 )
630 649
631 inst = _make_peer(ui, path)
650 inst = _make_peer(ui, path, remotehidden=remotehidden)
632 651
633 652 return inst
634 653 except error.RepoError as httpexception:
635 654 try:
636 655 r = statichttprepo.make_peer(ui, b"static-" + path.loc, create)
637 656 ui.note(_(b'(falling back to static-http)\n'))
638 657 return r
639 658 except error.RepoError:
640 659 raise httpexception # use the original http RepoError instead
@@ -1,2065 +1,2065 b''
1 1 # repository.py - Interfaces and base classes for repositories and peers.
2 2 # coding: utf-8
3 3 #
4 4 # Copyright 2017 Gregory Szorc <gregory.szorc@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9
10 10 from ..i18n import _
11 11 from .. import error
12 12 from . import util as interfaceutil
13 13
14 14 # Local repository feature string.
15 15
16 16 # Revlogs are being used for file storage.
17 17 REPO_FEATURE_REVLOG_FILE_STORAGE = b'revlogfilestorage'
18 18 # The storage part of the repository is shared from an external source.
19 19 REPO_FEATURE_SHARED_STORAGE = b'sharedstore'
20 20 # LFS supported for backing file storage.
21 21 REPO_FEATURE_LFS = b'lfs'
22 22 # Repository supports being stream cloned.
23 23 REPO_FEATURE_STREAM_CLONE = b'streamclone'
24 24 # Repository supports (at least) some sidedata to be stored
25 25 REPO_FEATURE_SIDE_DATA = b'side-data'
26 26 # Files storage may lack data for all ancestors.
27 27 REPO_FEATURE_SHALLOW_FILE_STORAGE = b'shallowfilestorage'
28 28
29 29 REVISION_FLAG_CENSORED = 1 << 15
30 30 REVISION_FLAG_ELLIPSIS = 1 << 14
31 31 REVISION_FLAG_EXTSTORED = 1 << 13
32 32 REVISION_FLAG_HASCOPIESINFO = 1 << 12
33 33
34 34 REVISION_FLAGS_KNOWN = (
35 35 REVISION_FLAG_CENSORED
36 36 | REVISION_FLAG_ELLIPSIS
37 37 | REVISION_FLAG_EXTSTORED
38 38 | REVISION_FLAG_HASCOPIESINFO
39 39 )
40 40
41 41 CG_DELTAMODE_STD = b'default'
42 42 CG_DELTAMODE_PREV = b'previous'
43 43 CG_DELTAMODE_FULL = b'fulltext'
44 44 CG_DELTAMODE_P1 = b'p1'
45 45
46 46
47 47 ## Cache related constants:
48 48 #
49 49 # Used to control which cache should be warmed in a repo.updatecaches(…) call.
50 50
51 51 # Warm branchmaps of all known repoview's filter-level
52 52 CACHE_BRANCHMAP_ALL = b"branchmap-all"
53 53 # Warm branchmaps of repoview's filter-level used by server
54 54 CACHE_BRANCHMAP_SERVED = b"branchmap-served"
55 55 # Warm internal changelog cache (eg: persistent nodemap)
56 56 CACHE_CHANGELOG_CACHE = b"changelog-cache"
57 57 # Warm full manifest cache
58 58 CACHE_FULL_MANIFEST = b"full-manifest"
59 59 # Warm file-node-tags cache
60 60 CACHE_FILE_NODE_TAGS = b"file-node-tags"
61 61 # Warm internal manifestlog cache (eg: persistent nodemap)
62 62 CACHE_MANIFESTLOG_CACHE = b"manifestlog-cache"
63 63 # Warn rev branch cache
64 64 CACHE_REV_BRANCH = b"rev-branch-cache"
65 65 # Warm tags' cache for default repoview'
66 66 CACHE_TAGS_DEFAULT = b"tags-default"
67 67 # Warm tags' cache for repoview's filter-level used by server
68 68 CACHE_TAGS_SERVED = b"tags-served"
69 69
70 70 # the cache to warm by default after a simple transaction
71 71 # (this is a mutable set to let extension update it)
72 72 CACHES_DEFAULT = {
73 73 CACHE_BRANCHMAP_SERVED,
74 74 }
75 75
76 76 # the caches to warm when warming all of them
77 77 # (this is a mutable set to let extension update it)
78 78 CACHES_ALL = {
79 79 CACHE_BRANCHMAP_SERVED,
80 80 CACHE_BRANCHMAP_ALL,
81 81 CACHE_CHANGELOG_CACHE,
82 82 CACHE_FILE_NODE_TAGS,
83 83 CACHE_FULL_MANIFEST,
84 84 CACHE_MANIFESTLOG_CACHE,
85 85 CACHE_TAGS_DEFAULT,
86 86 CACHE_TAGS_SERVED,
87 87 }
88 88
89 89 # the cache to warm by default on simple call
90 90 # (this is a mutable set to let extension update it)
91 91 CACHES_POST_CLONE = CACHES_ALL.copy()
92 92 CACHES_POST_CLONE.discard(CACHE_FILE_NODE_TAGS)
93 93
94 94
95 95 class ipeerconnection(interfaceutil.Interface):
96 96 """Represents a "connection" to a repository.
97 97
98 98 This is the base interface for representing a connection to a repository.
99 99 It holds basic properties and methods applicable to all peer types.
100 100
101 101 This is not a complete interface definition and should not be used
102 102 outside of this module.
103 103 """
104 104
105 105 ui = interfaceutil.Attribute("""ui.ui instance""")
106 106 path = interfaceutil.Attribute("""a urlutil.path instance or None""")
107 107
108 108 def url():
109 109 """Returns a URL string representing this peer.
110 110
111 111 Currently, implementations expose the raw URL used to construct the
112 112 instance. It may contain credentials as part of the URL. The
113 113 expectations of the value aren't well-defined and this could lead to
114 114 data leakage.
115 115
116 116 TODO audit/clean consumers and more clearly define the contents of this
117 117 value.
118 118 """
119 119
120 120 def local():
121 121 """Returns a local repository instance.
122 122
123 123 If the peer represents a local repository, returns an object that
124 124 can be used to interface with it. Otherwise returns ``None``.
125 125 """
126 126
127 127 def canpush():
128 128 """Returns a boolean indicating if this peer can be pushed to."""
129 129
130 130 def close():
131 131 """Close the connection to this peer.
132 132
133 133 This is called when the peer will no longer be used. Resources
134 134 associated with the peer should be cleaned up.
135 135 """
136 136
137 137
138 138 class ipeercapabilities(interfaceutil.Interface):
139 139 """Peer sub-interface related to capabilities."""
140 140
141 141 def capable(name):
142 142 """Determine support for a named capability.
143 143
144 144 Returns ``False`` if capability not supported.
145 145
146 146 Returns ``True`` if boolean capability is supported. Returns a string
147 147 if capability support is non-boolean.
148 148
149 149 Capability strings may or may not map to wire protocol capabilities.
150 150 """
151 151
152 152 def requirecap(name, purpose):
153 153 """Require a capability to be present.
154 154
155 155 Raises a ``CapabilityError`` if the capability isn't present.
156 156 """
157 157
158 158
159 159 class ipeercommands(interfaceutil.Interface):
160 160 """Client-side interface for communicating over the wire protocol.
161 161
162 162 This interface is used as a gateway to the Mercurial wire protocol.
163 163 methods commonly call wire protocol commands of the same name.
164 164 """
165 165
166 166 def branchmap():
167 167 """Obtain heads in named branches.
168 168
169 169 Returns a dict mapping branch name to an iterable of nodes that are
170 170 heads on that branch.
171 171 """
172 172
173 173 def capabilities():
174 174 """Obtain capabilities of the peer.
175 175
176 176 Returns a set of string capabilities.
177 177 """
178 178
179 179 def clonebundles():
180 180 """Obtains the clone bundles manifest for the repo.
181 181
182 182 Returns the manifest as unparsed bytes.
183 183 """
184 184
185 185 def debugwireargs(one, two, three=None, four=None, five=None):
186 186 """Used to facilitate debugging of arguments passed over the wire."""
187 187
188 188 def getbundle(source, **kwargs):
189 189 """Obtain remote repository data as a bundle.
190 190
191 191 This command is how the bulk of repository data is transferred from
192 192 the peer to the local repository
193 193
194 194 Returns a generator of bundle data.
195 195 """
196 196
197 197 def heads():
198 198 """Determine all known head revisions in the peer.
199 199
200 200 Returns an iterable of binary nodes.
201 201 """
202 202
203 203 def known(nodes):
204 204 """Determine whether multiple nodes are known.
205 205
206 206 Accepts an iterable of nodes whose presence to check for.
207 207
208 208 Returns an iterable of booleans indicating of the corresponding node
209 209 at that index is known to the peer.
210 210 """
211 211
212 212 def listkeys(namespace):
213 213 """Obtain all keys in a pushkey namespace.
214 214
215 215 Returns an iterable of key names.
216 216 """
217 217
218 218 def lookup(key):
219 219 """Resolve a value to a known revision.
220 220
221 221 Returns a binary node of the resolved revision on success.
222 222 """
223 223
224 224 def pushkey(namespace, key, old, new):
225 225 """Set a value using the ``pushkey`` protocol.
226 226
227 227 Arguments correspond to the pushkey namespace and key to operate on and
228 228 the old and new values for that key.
229 229
230 230 Returns a string with the peer result. The value inside varies by the
231 231 namespace.
232 232 """
233 233
234 234 def stream_out():
235 235 """Obtain streaming clone data.
236 236
237 237 Successful result should be a generator of data chunks.
238 238 """
239 239
240 240 def unbundle(bundle, heads, url):
241 241 """Transfer repository data to the peer.
242 242
243 243 This is how the bulk of data during a push is transferred.
244 244
245 245 Returns the integer number of heads added to the peer.
246 246 """
247 247
248 248
249 249 class ipeerlegacycommands(interfaceutil.Interface):
250 250 """Interface for implementing support for legacy wire protocol commands.
251 251
252 252 Wire protocol commands transition to legacy status when they are no longer
253 253 used by modern clients. To facilitate identifying which commands are
254 254 legacy, the interfaces are split.
255 255 """
256 256
257 257 def between(pairs):
258 258 """Obtain nodes between pairs of nodes.
259 259
260 260 ``pairs`` is an iterable of node pairs.
261 261
262 262 Returns an iterable of iterables of nodes corresponding to each
263 263 requested pair.
264 264 """
265 265
266 266 def branches(nodes):
267 267 """Obtain ancestor changesets of specific nodes back to a branch point.
268 268
269 269 For each requested node, the peer finds the first ancestor node that is
270 270 a DAG root or is a merge.
271 271
272 272 Returns an iterable of iterables with the resolved values for each node.
273 273 """
274 274
275 275 def changegroup(nodes, source):
276 276 """Obtain a changegroup with data for descendants of specified nodes."""
277 277
278 278 def changegroupsubset(bases, heads, source):
279 279 pass
280 280
281 281
282 282 class ipeercommandexecutor(interfaceutil.Interface):
283 283 """Represents a mechanism to execute remote commands.
284 284
285 285 This is the primary interface for requesting that wire protocol commands
286 286 be executed. Instances of this interface are active in a context manager
287 287 and have a well-defined lifetime. When the context manager exits, all
288 288 outstanding requests are waited on.
289 289 """
290 290
291 291 def callcommand(name, args):
292 292 """Request that a named command be executed.
293 293
294 294 Receives the command name and a dictionary of command arguments.
295 295
296 296 Returns a ``concurrent.futures.Future`` that will resolve to the
297 297 result of that command request. That exact value is left up to
298 298 the implementation and possibly varies by command.
299 299
300 300 Not all commands can coexist with other commands in an executor
301 301 instance: it depends on the underlying wire protocol transport being
302 302 used and the command itself.
303 303
304 304 Implementations MAY call ``sendcommands()`` automatically if the
305 305 requested command can not coexist with other commands in this executor.
306 306
307 307 Implementations MAY call ``sendcommands()`` automatically when the
308 308 future's ``result()`` is called. So, consumers using multiple
309 309 commands with an executor MUST ensure that ``result()`` is not called
310 310 until all command requests have been issued.
311 311 """
312 312
313 313 def sendcommands():
314 314 """Trigger submission of queued command requests.
315 315
316 316 Not all transports submit commands as soon as they are requested to
317 317 run. When called, this method forces queued command requests to be
318 318 issued. It will no-op if all commands have already been sent.
319 319
320 320 When called, no more new commands may be issued with this executor.
321 321 """
322 322
323 323 def close():
324 324 """Signal that this command request is finished.
325 325
326 326 When called, no more new commands may be issued. All outstanding
327 327 commands that have previously been issued are waited on before
328 328 returning. This not only includes waiting for the futures to resolve,
329 329 but also waiting for all response data to arrive. In other words,
330 330 calling this waits for all on-wire state for issued command requests
331 331 to finish.
332 332
333 333 When used as a context manager, this method is called when exiting the
334 334 context manager.
335 335
336 336 This method may call ``sendcommands()`` if there are buffered commands.
337 337 """
338 338
339 339
340 340 class ipeerrequests(interfaceutil.Interface):
341 341 """Interface for executing commands on a peer."""
342 342
343 343 limitedarguments = interfaceutil.Attribute(
344 344 """True if the peer cannot receive large argument value for commands."""
345 345 )
346 346
347 347 def commandexecutor():
348 348 """A context manager that resolves to an ipeercommandexecutor.
349 349
350 350 The object this resolves to can be used to issue command requests
351 351 to the peer.
352 352
353 353 Callers should call its ``callcommand`` method to issue command
354 354 requests.
355 355
356 356 A new executor should be obtained for each distinct set of commands
357 357 (possibly just a single command) that the consumer wants to execute
358 358 as part of a single operation or round trip. This is because some
359 359 peers are half-duplex and/or don't support persistent connections.
360 360 e.g. in the case of HTTP peers, commands sent to an executor represent
361 361 a single HTTP request. While some peers may support multiple command
362 362 sends over the wire per executor, consumers need to code to the least
363 363 capable peer. So it should be assumed that command executors buffer
364 364 called commands until they are told to send them and that each
365 365 command executor could result in a new connection or wire-level request
366 366 being issued.
367 367 """
368 368
369 369
370 370 class ipeerbase(ipeerconnection, ipeercapabilities, ipeerrequests):
371 371 """Unified interface for peer repositories.
372 372
373 373 All peer instances must conform to this interface.
374 374 """
375 375
376 376
377 377 class ipeerv2(ipeerconnection, ipeercapabilities, ipeerrequests):
378 378 """Unified peer interface for wire protocol version 2 peers."""
379 379
380 380 apidescriptor = interfaceutil.Attribute(
381 381 """Data structure holding description of server API."""
382 382 )
383 383
384 384
385 385 @interfaceutil.implementer(ipeerbase)
386 386 class peer:
387 387 """Base class for peer repositories."""
388 388
389 389 limitedarguments = False
390 390
391 def __init__(self, ui, path=None):
391 def __init__(self, ui, path=None, remotehidden=False):
392 392 self.ui = ui
393 393 self.path = path
394 394
395 395 def capable(self, name):
396 396 caps = self.capabilities()
397 397 if name in caps:
398 398 return True
399 399
400 400 name = b'%s=' % name
401 401 for cap in caps:
402 402 if cap.startswith(name):
403 403 return cap[len(name) :]
404 404
405 405 return False
406 406
407 407 def requirecap(self, name, purpose):
408 408 if self.capable(name):
409 409 return
410 410
411 411 raise error.CapabilityError(
412 412 _(
413 413 b'cannot %s; remote repository does not support the '
414 414 b'\'%s\' capability'
415 415 )
416 416 % (purpose, name)
417 417 )
418 418
419 419
420 420 class iverifyproblem(interfaceutil.Interface):
421 421 """Represents a problem with the integrity of the repository.
422 422
423 423 Instances of this interface are emitted to describe an integrity issue
424 424 with a repository (e.g. corrupt storage, missing data, etc).
425 425
426 426 Instances are essentially messages associated with severity.
427 427 """
428 428
429 429 warning = interfaceutil.Attribute(
430 430 """Message indicating a non-fatal problem."""
431 431 )
432 432
433 433 error = interfaceutil.Attribute("""Message indicating a fatal problem.""")
434 434
435 435 node = interfaceutil.Attribute(
436 436 """Revision encountering the problem.
437 437
438 438 ``None`` means the problem doesn't apply to a single revision.
439 439 """
440 440 )
441 441
442 442
443 443 class irevisiondelta(interfaceutil.Interface):
444 444 """Represents a delta between one revision and another.
445 445
446 446 Instances convey enough information to allow a revision to be exchanged
447 447 with another repository.
448 448
449 449 Instances represent the fulltext revision data or a delta against
450 450 another revision. Therefore the ``revision`` and ``delta`` attributes
451 451 are mutually exclusive.
452 452
453 453 Typically used for changegroup generation.
454 454 """
455 455
456 456 node = interfaceutil.Attribute("""20 byte node of this revision.""")
457 457
458 458 p1node = interfaceutil.Attribute(
459 459 """20 byte node of 1st parent of this revision."""
460 460 )
461 461
462 462 p2node = interfaceutil.Attribute(
463 463 """20 byte node of 2nd parent of this revision."""
464 464 )
465 465
466 466 linknode = interfaceutil.Attribute(
467 467 """20 byte node of the changelog revision this node is linked to."""
468 468 )
469 469
470 470 flags = interfaceutil.Attribute(
471 471 """2 bytes of integer flags that apply to this revision.
472 472
473 473 This is a bitwise composition of the ``REVISION_FLAG_*`` constants.
474 474 """
475 475 )
476 476
477 477 basenode = interfaceutil.Attribute(
478 478 """20 byte node of the revision this data is a delta against.
479 479
480 480 ``nullid`` indicates that the revision is a full revision and not
481 481 a delta.
482 482 """
483 483 )
484 484
485 485 baserevisionsize = interfaceutil.Attribute(
486 486 """Size of base revision this delta is against.
487 487
488 488 May be ``None`` if ``basenode`` is ``nullid``.
489 489 """
490 490 )
491 491
492 492 revision = interfaceutil.Attribute(
493 493 """Raw fulltext of revision data for this node."""
494 494 )
495 495
496 496 delta = interfaceutil.Attribute(
497 497 """Delta between ``basenode`` and ``node``.
498 498
499 499 Stored in the bdiff delta format.
500 500 """
501 501 )
502 502
503 503 sidedata = interfaceutil.Attribute(
504 504 """Raw sidedata bytes for the given revision."""
505 505 )
506 506
507 507 protocol_flags = interfaceutil.Attribute(
508 508 """Single byte of integer flags that can influence the protocol.
509 509
510 510 This is a bitwise composition of the ``storageutil.CG_FLAG*`` constants.
511 511 """
512 512 )
513 513
514 514
515 515 class ifilerevisionssequence(interfaceutil.Interface):
516 516 """Contains index data for all revisions of a file.
517 517
518 518 Types implementing this behave like lists of tuples. The index
519 519 in the list corresponds to the revision number. The values contain
520 520 index metadata.
521 521
522 522 The *null* revision (revision number -1) is always the last item
523 523 in the index.
524 524 """
525 525
526 526 def __len__():
527 527 """The total number of revisions."""
528 528
529 529 def __getitem__(rev):
530 530 """Returns the object having a specific revision number.
531 531
532 532 Returns an 8-tuple with the following fields:
533 533
534 534 offset+flags
535 535 Contains the offset and flags for the revision. 64-bit unsigned
536 536 integer where first 6 bytes are the offset and the next 2 bytes
537 537 are flags. The offset can be 0 if it is not used by the store.
538 538 compressed size
539 539 Size of the revision data in the store. It can be 0 if it isn't
540 540 needed by the store.
541 541 uncompressed size
542 542 Fulltext size. It can be 0 if it isn't needed by the store.
543 543 base revision
544 544 Revision number of revision the delta for storage is encoded
545 545 against. -1 indicates not encoded against a base revision.
546 546 link revision
547 547 Revision number of changelog revision this entry is related to.
548 548 p1 revision
549 549 Revision number of 1st parent. -1 if no 1st parent.
550 550 p2 revision
551 551 Revision number of 2nd parent. -1 if no 1st parent.
552 552 node
553 553 Binary node value for this revision number.
554 554
555 555 Negative values should index off the end of the sequence. ``-1``
556 556 should return the null revision. ``-2`` should return the most
557 557 recent revision.
558 558 """
559 559
560 560 def __contains__(rev):
561 561 """Whether a revision number exists."""
562 562
563 563 def insert(self, i, entry):
564 564 """Add an item to the index at specific revision."""
565 565
566 566
567 567 class ifileindex(interfaceutil.Interface):
568 568 """Storage interface for index data of a single file.
569 569
570 570 File storage data is divided into index metadata and data storage.
571 571 This interface defines the index portion of the interface.
572 572
573 573 The index logically consists of:
574 574
575 575 * A mapping between revision numbers and nodes.
576 576 * DAG data (storing and querying the relationship between nodes).
577 577 * Metadata to facilitate storage.
578 578 """
579 579
580 580 nullid = interfaceutil.Attribute(
581 581 """node for the null revision for use as delta base."""
582 582 )
583 583
584 584 def __len__():
585 585 """Obtain the number of revisions stored for this file."""
586 586
587 587 def __iter__():
588 588 """Iterate over revision numbers for this file."""
589 589
590 590 def hasnode(node):
591 591 """Returns a bool indicating if a node is known to this store.
592 592
593 593 Implementations must only return True for full, binary node values:
594 594 hex nodes, revision numbers, and partial node matches must be
595 595 rejected.
596 596
597 597 The null node is never present.
598 598 """
599 599
600 600 def revs(start=0, stop=None):
601 601 """Iterate over revision numbers for this file, with control."""
602 602
603 603 def parents(node):
604 604 """Returns a 2-tuple of parent nodes for a revision.
605 605
606 606 Values will be ``nullid`` if the parent is empty.
607 607 """
608 608
609 609 def parentrevs(rev):
610 610 """Like parents() but operates on revision numbers."""
611 611
612 612 def rev(node):
613 613 """Obtain the revision number given a node.
614 614
615 615 Raises ``error.LookupError`` if the node is not known.
616 616 """
617 617
618 618 def node(rev):
619 619 """Obtain the node value given a revision number.
620 620
621 621 Raises ``IndexError`` if the node is not known.
622 622 """
623 623
624 624 def lookup(node):
625 625 """Attempt to resolve a value to a node.
626 626
627 627 Value can be a binary node, hex node, revision number, or a string
628 628 that can be converted to an integer.
629 629
630 630 Raises ``error.LookupError`` if a node could not be resolved.
631 631 """
632 632
633 633 def linkrev(rev):
634 634 """Obtain the changeset revision number a revision is linked to."""
635 635
636 636 def iscensored(rev):
637 637 """Return whether a revision's content has been censored."""
638 638
639 639 def commonancestorsheads(node1, node2):
640 640 """Obtain an iterable of nodes containing heads of common ancestors.
641 641
642 642 See ``ancestor.commonancestorsheads()``.
643 643 """
644 644
645 645 def descendants(revs):
646 646 """Obtain descendant revision numbers for a set of revision numbers.
647 647
648 648 If ``nullrev`` is in the set, this is equivalent to ``revs()``.
649 649 """
650 650
651 651 def heads(start=None, stop=None):
652 652 """Obtain a list of nodes that are DAG heads, with control.
653 653
654 654 The set of revisions examined can be limited by specifying
655 655 ``start`` and ``stop``. ``start`` is a node. ``stop`` is an
656 656 iterable of nodes. DAG traversal starts at earlier revision
657 657 ``start`` and iterates forward until any node in ``stop`` is
658 658 encountered.
659 659 """
660 660
661 661 def children(node):
662 662 """Obtain nodes that are children of a node.
663 663
664 664 Returns a list of nodes.
665 665 """
666 666
667 667
668 668 class ifiledata(interfaceutil.Interface):
669 669 """Storage interface for data storage of a specific file.
670 670
671 671 This complements ``ifileindex`` and provides an interface for accessing
672 672 data for a tracked file.
673 673 """
674 674
675 675 def size(rev):
676 676 """Obtain the fulltext size of file data.
677 677
678 678 Any metadata is excluded from size measurements.
679 679 """
680 680
681 681 def revision(node, raw=False):
682 682 """Obtain fulltext data for a node.
683 683
684 684 By default, any storage transformations are applied before the data
685 685 is returned. If ``raw`` is True, non-raw storage transformations
686 686 are not applied.
687 687
688 688 The fulltext data may contain a header containing metadata. Most
689 689 consumers should use ``read()`` to obtain the actual file data.
690 690 """
691 691
692 692 def rawdata(node):
693 693 """Obtain raw data for a node."""
694 694
695 695 def read(node):
696 696 """Resolve file fulltext data.
697 697
698 698 This is similar to ``revision()`` except any metadata in the data
699 699 headers is stripped.
700 700 """
701 701
702 702 def renamed(node):
703 703 """Obtain copy metadata for a node.
704 704
705 705 Returns ``False`` if no copy metadata is stored or a 2-tuple of
706 706 (path, node) from which this revision was copied.
707 707 """
708 708
709 709 def cmp(node, fulltext):
710 710 """Compare fulltext to another revision.
711 711
712 712 Returns True if the fulltext is different from what is stored.
713 713
714 714 This takes copy metadata into account.
715 715
716 716 TODO better document the copy metadata and censoring logic.
717 717 """
718 718
719 719 def emitrevisions(
720 720 nodes,
721 721 nodesorder=None,
722 722 revisiondata=False,
723 723 assumehaveparentrevisions=False,
724 724 deltamode=CG_DELTAMODE_STD,
725 725 ):
726 726 """Produce ``irevisiondelta`` for revisions.
727 727
728 728 Given an iterable of nodes, emits objects conforming to the
729 729 ``irevisiondelta`` interface that describe revisions in storage.
730 730
731 731 This method is a generator.
732 732
733 733 The input nodes may be unordered. Implementations must ensure that a
734 734 node's parents are emitted before the node itself. Transitively, this
735 735 means that a node may only be emitted once all its ancestors in
736 736 ``nodes`` have also been emitted.
737 737
738 738 By default, emits "index" data (the ``node``, ``p1node``, and
739 739 ``p2node`` attributes). If ``revisiondata`` is set, revision data
740 740 will also be present on the emitted objects.
741 741
742 742 With default argument values, implementations can choose to emit
743 743 either fulltext revision data or a delta. When emitting deltas,
744 744 implementations must consider whether the delta's base revision
745 745 fulltext is available to the receiver.
746 746
747 747 The base revision fulltext is guaranteed to be available if any of
748 748 the following are met:
749 749
750 750 * Its fulltext revision was emitted by this method call.
751 751 * A delta for that revision was emitted by this method call.
752 752 * ``assumehaveparentrevisions`` is True and the base revision is a
753 753 parent of the node.
754 754
755 755 ``nodesorder`` can be used to control the order that revisions are
756 756 emitted. By default, revisions can be reordered as long as they are
757 757 in DAG topological order (see above). If the value is ``nodes``,
758 758 the iteration order from ``nodes`` should be used. If the value is
759 759 ``storage``, then the native order from the backing storage layer
760 760 is used. (Not all storage layers will have strong ordering and behavior
761 761 of this mode is storage-dependent.) ``nodes`` ordering can force
762 762 revisions to be emitted before their ancestors, so consumers should
763 763 use it with care.
764 764
765 765 The ``linknode`` attribute on the returned ``irevisiondelta`` may not
766 766 be set and it is the caller's responsibility to resolve it, if needed.
767 767
768 768 If ``deltamode`` is CG_DELTAMODE_PREV and revision data is requested,
769 769 all revision data should be emitted as deltas against the revision
770 770 emitted just prior. The initial revision should be a delta against its
771 771 1st parent.
772 772 """
773 773
774 774
775 775 class ifilemutation(interfaceutil.Interface):
776 776 """Storage interface for mutation events of a tracked file."""
777 777
778 778 def add(filedata, meta, transaction, linkrev, p1, p2):
779 779 """Add a new revision to the store.
780 780
781 781 Takes file data, dictionary of metadata, a transaction, linkrev,
782 782 and parent nodes.
783 783
784 784 Returns the node that was added.
785 785
786 786 May no-op if a revision matching the supplied data is already stored.
787 787 """
788 788
789 789 def addrevision(
790 790 revisiondata,
791 791 transaction,
792 792 linkrev,
793 793 p1,
794 794 p2,
795 795 node=None,
796 796 flags=0,
797 797 cachedelta=None,
798 798 ):
799 799 """Add a new revision to the store and return its number.
800 800
801 801 This is similar to ``add()`` except it operates at a lower level.
802 802
803 803 The data passed in already contains a metadata header, if any.
804 804
805 805 ``node`` and ``flags`` can be used to define the expected node and
806 806 the flags to use with storage. ``flags`` is a bitwise value composed
807 807 of the various ``REVISION_FLAG_*`` constants.
808 808
809 809 ``add()`` is usually called when adding files from e.g. the working
810 810 directory. ``addrevision()`` is often called by ``add()`` and for
811 811 scenarios where revision data has already been computed, such as when
812 812 applying raw data from a peer repo.
813 813 """
814 814
815 815 def addgroup(
816 816 deltas,
817 817 linkmapper,
818 818 transaction,
819 819 addrevisioncb=None,
820 820 duplicaterevisioncb=None,
821 821 maybemissingparents=False,
822 822 ):
823 823 """Process a series of deltas for storage.
824 824
825 825 ``deltas`` is an iterable of 7-tuples of
826 826 (node, p1, p2, linknode, deltabase, delta, flags) defining revisions
827 827 to add.
828 828
829 829 The ``delta`` field contains ``mpatch`` data to apply to a base
830 830 revision, identified by ``deltabase``. The base node can be
831 831 ``nullid``, in which case the header from the delta can be ignored
832 832 and the delta used as the fulltext.
833 833
834 834 ``alwayscache`` instructs the lower layers to cache the content of the
835 835 newly added revision, even if it needs to be explicitly computed.
836 836 This used to be the default when ``addrevisioncb`` was provided up to
837 837 Mercurial 5.8.
838 838
839 839 ``addrevisioncb`` should be called for each new rev as it is committed.
840 840 ``duplicaterevisioncb`` should be called for all revs with a
841 841 pre-existing node.
842 842
843 843 ``maybemissingparents`` is a bool indicating whether the incoming
844 844 data may reference parents/ancestor revisions that aren't present.
845 845 This flag is set when receiving data into a "shallow" store that
846 846 doesn't hold all history.
847 847
848 848 Returns a list of nodes that were processed. A node will be in the list
849 849 even if it existed in the store previously.
850 850 """
851 851
852 852 def censorrevision(tr, node, tombstone=b''):
853 853 """Remove the content of a single revision.
854 854
855 855 The specified ``node`` will have its content purged from storage.
856 856 Future attempts to access the revision data for this node will
857 857 result in failure.
858 858
859 859 A ``tombstone`` message can optionally be stored. This message may be
860 860 displayed to users when they attempt to access the missing revision
861 861 data.
862 862
863 863 Storage backends may have stored deltas against the previous content
864 864 in this revision. As part of censoring a revision, these storage
865 865 backends are expected to rewrite any internally stored deltas such
866 866 that they no longer reference the deleted content.
867 867 """
868 868
869 869 def getstrippoint(minlink):
870 870 """Find the minimum revision that must be stripped to strip a linkrev.
871 871
872 872 Returns a 2-tuple containing the minimum revision number and a set
873 873 of all revisions numbers that would be broken by this strip.
874 874
875 875 TODO this is highly revlog centric and should be abstracted into
876 876 a higher-level deletion API. ``repair.strip()`` relies on this.
877 877 """
878 878
879 879 def strip(minlink, transaction):
880 880 """Remove storage of items starting at a linkrev.
881 881
882 882 This uses ``getstrippoint()`` to determine the first node to remove.
883 883 Then it effectively truncates storage for all revisions after that.
884 884
885 885 TODO this is highly revlog centric and should be abstracted into a
886 886 higher-level deletion API.
887 887 """
888 888
889 889
890 890 class ifilestorage(ifileindex, ifiledata, ifilemutation):
891 891 """Complete storage interface for a single tracked file."""
892 892
893 893 def files():
894 894 """Obtain paths that are backing storage for this file.
895 895
896 896 TODO this is used heavily by verify code and there should probably
897 897 be a better API for that.
898 898 """
899 899
900 900 def storageinfo(
901 901 exclusivefiles=False,
902 902 sharedfiles=False,
903 903 revisionscount=False,
904 904 trackedsize=False,
905 905 storedsize=False,
906 906 ):
907 907 """Obtain information about storage for this file's data.
908 908
909 909 Returns a dict describing storage for this tracked path. The keys
910 910 in the dict map to arguments of the same. The arguments are bools
911 911 indicating whether to calculate and obtain that data.
912 912
913 913 exclusivefiles
914 914 Iterable of (vfs, path) describing files that are exclusively
915 915 used to back storage for this tracked path.
916 916
917 917 sharedfiles
918 918 Iterable of (vfs, path) describing files that are used to back
919 919 storage for this tracked path. Those files may also provide storage
920 920 for other stored entities.
921 921
922 922 revisionscount
923 923 Number of revisions available for retrieval.
924 924
925 925 trackedsize
926 926 Total size in bytes of all tracked revisions. This is a sum of the
927 927 length of the fulltext of all revisions.
928 928
929 929 storedsize
930 930 Total size in bytes used to store data for all tracked revisions.
931 931 This is commonly less than ``trackedsize`` due to internal usage
932 932 of deltas rather than fulltext revisions.
933 933
934 934 Not all storage backends may support all queries are have a reasonable
935 935 value to use. In that case, the value should be set to ``None`` and
936 936 callers are expected to handle this special value.
937 937 """
938 938
939 939 def verifyintegrity(state):
940 940 """Verifies the integrity of file storage.
941 941
942 942 ``state`` is a dict holding state of the verifier process. It can be
943 943 used to communicate data between invocations of multiple storage
944 944 primitives.
945 945
946 946 If individual revisions cannot have their revision content resolved,
947 947 the method is expected to set the ``skipread`` key to a set of nodes
948 948 that encountered problems. If set, the method can also add the node(s)
949 949 to ``safe_renamed`` in order to indicate nodes that may perform the
950 950 rename checks with currently accessible data.
951 951
952 952 The method yields objects conforming to the ``iverifyproblem``
953 953 interface.
954 954 """
955 955
956 956
957 957 class idirs(interfaceutil.Interface):
958 958 """Interface representing a collection of directories from paths.
959 959
960 960 This interface is essentially a derived data structure representing
961 961 directories from a collection of paths.
962 962 """
963 963
964 964 def addpath(path):
965 965 """Add a path to the collection.
966 966
967 967 All directories in the path will be added to the collection.
968 968 """
969 969
970 970 def delpath(path):
971 971 """Remove a path from the collection.
972 972
973 973 If the removal was the last path in a particular directory, the
974 974 directory is removed from the collection.
975 975 """
976 976
977 977 def __iter__():
978 978 """Iterate over the directories in this collection of paths."""
979 979
980 980 def __contains__(path):
981 981 """Whether a specific directory is in this collection."""
982 982
983 983
984 984 class imanifestdict(interfaceutil.Interface):
985 985 """Interface representing a manifest data structure.
986 986
987 987 A manifest is effectively a dict mapping paths to entries. Each entry
988 988 consists of a binary node and extra flags affecting that entry.
989 989 """
990 990
991 991 def __getitem__(path):
992 992 """Returns the binary node value for a path in the manifest.
993 993
994 994 Raises ``KeyError`` if the path does not exist in the manifest.
995 995
996 996 Equivalent to ``self.find(path)[0]``.
997 997 """
998 998
999 999 def find(path):
1000 1000 """Returns the entry for a path in the manifest.
1001 1001
1002 1002 Returns a 2-tuple of (node, flags).
1003 1003
1004 1004 Raises ``KeyError`` if the path does not exist in the manifest.
1005 1005 """
1006 1006
1007 1007 def __len__():
1008 1008 """Return the number of entries in the manifest."""
1009 1009
1010 1010 def __nonzero__():
1011 1011 """Returns True if the manifest has entries, False otherwise."""
1012 1012
1013 1013 __bool__ = __nonzero__
1014 1014
1015 1015 def __setitem__(path, node):
1016 1016 """Define the node value for a path in the manifest.
1017 1017
1018 1018 If the path is already in the manifest, its flags will be copied to
1019 1019 the new entry.
1020 1020 """
1021 1021
1022 1022 def __contains__(path):
1023 1023 """Whether a path exists in the manifest."""
1024 1024
1025 1025 def __delitem__(path):
1026 1026 """Remove a path from the manifest.
1027 1027
1028 1028 Raises ``KeyError`` if the path is not in the manifest.
1029 1029 """
1030 1030
1031 1031 def __iter__():
1032 1032 """Iterate over paths in the manifest."""
1033 1033
1034 1034 def iterkeys():
1035 1035 """Iterate over paths in the manifest."""
1036 1036
1037 1037 def keys():
1038 1038 """Obtain a list of paths in the manifest."""
1039 1039
1040 1040 def filesnotin(other, match=None):
1041 1041 """Obtain the set of paths in this manifest but not in another.
1042 1042
1043 1043 ``match`` is an optional matcher function to be applied to both
1044 1044 manifests.
1045 1045
1046 1046 Returns a set of paths.
1047 1047 """
1048 1048
1049 1049 def dirs():
1050 1050 """Returns an object implementing the ``idirs`` interface."""
1051 1051
1052 1052 def hasdir(dir):
1053 1053 """Returns a bool indicating if a directory is in this manifest."""
1054 1054
1055 1055 def walk(match):
1056 1056 """Generator of paths in manifest satisfying a matcher.
1057 1057
1058 1058 If the matcher has explicit files listed and they don't exist in
1059 1059 the manifest, ``match.bad()`` is called for each missing file.
1060 1060 """
1061 1061
1062 1062 def diff(other, match=None, clean=False):
1063 1063 """Find differences between this manifest and another.
1064 1064
1065 1065 This manifest is compared to ``other``.
1066 1066
1067 1067 If ``match`` is provided, the two manifests are filtered against this
1068 1068 matcher and only entries satisfying the matcher are compared.
1069 1069
1070 1070 If ``clean`` is True, unchanged files are included in the returned
1071 1071 object.
1072 1072
1073 1073 Returns a dict with paths as keys and values of 2-tuples of 2-tuples of
1074 1074 the form ``((node1, flag1), (node2, flag2))`` where ``(node1, flag1)``
1075 1075 represents the node and flags for this manifest and ``(node2, flag2)``
1076 1076 are the same for the other manifest.
1077 1077 """
1078 1078
1079 1079 def setflag(path, flag):
1080 1080 """Set the flag value for a given path.
1081 1081
1082 1082 Raises ``KeyError`` if the path is not already in the manifest.
1083 1083 """
1084 1084
1085 1085 def get(path, default=None):
1086 1086 """Obtain the node value for a path or a default value if missing."""
1087 1087
1088 1088 def flags(path):
1089 1089 """Return the flags value for a path (default: empty bytestring)."""
1090 1090
1091 1091 def copy():
1092 1092 """Return a copy of this manifest."""
1093 1093
1094 1094 def items():
1095 1095 """Returns an iterable of (path, node) for items in this manifest."""
1096 1096
1097 1097 def iteritems():
1098 1098 """Identical to items()."""
1099 1099
1100 1100 def iterentries():
1101 1101 """Returns an iterable of (path, node, flags) for this manifest.
1102 1102
1103 1103 Similar to ``iteritems()`` except items are a 3-tuple and include
1104 1104 flags.
1105 1105 """
1106 1106
1107 1107 def text():
1108 1108 """Obtain the raw data representation for this manifest.
1109 1109
1110 1110 Result is used to create a manifest revision.
1111 1111 """
1112 1112
1113 1113 def fastdelta(base, changes):
1114 1114 """Obtain a delta between this manifest and another given changes.
1115 1115
1116 1116 ``base`` in the raw data representation for another manifest.
1117 1117
1118 1118 ``changes`` is an iterable of ``(path, to_delete)``.
1119 1119
1120 1120 Returns a 2-tuple containing ``bytearray(self.text())`` and the
1121 1121 delta between ``base`` and this manifest.
1122 1122
1123 1123 If this manifest implementation can't support ``fastdelta()``,
1124 1124 raise ``mercurial.manifest.FastdeltaUnavailable``.
1125 1125 """
1126 1126
1127 1127
1128 1128 class imanifestrevisionbase(interfaceutil.Interface):
1129 1129 """Base interface representing a single revision of a manifest.
1130 1130
1131 1131 Should not be used as a primary interface: should always be inherited
1132 1132 as part of a larger interface.
1133 1133 """
1134 1134
1135 1135 def copy():
1136 1136 """Obtain a copy of this manifest instance.
1137 1137
1138 1138 Returns an object conforming to the ``imanifestrevisionwritable``
1139 1139 interface. The instance will be associated with the same
1140 1140 ``imanifestlog`` collection as this instance.
1141 1141 """
1142 1142
1143 1143 def read():
1144 1144 """Obtain the parsed manifest data structure.
1145 1145
1146 1146 The returned object conforms to the ``imanifestdict`` interface.
1147 1147 """
1148 1148
1149 1149
1150 1150 class imanifestrevisionstored(imanifestrevisionbase):
1151 1151 """Interface representing a manifest revision committed to storage."""
1152 1152
1153 1153 def node():
1154 1154 """The binary node for this manifest."""
1155 1155
1156 1156 parents = interfaceutil.Attribute(
1157 1157 """List of binary nodes that are parents for this manifest revision."""
1158 1158 )
1159 1159
1160 1160 def readdelta(shallow=False):
1161 1161 """Obtain the manifest data structure representing changes from parent.
1162 1162
1163 1163 This manifest is compared to its 1st parent. A new manifest representing
1164 1164 those differences is constructed.
1165 1165
1166 1166 The returned object conforms to the ``imanifestdict`` interface.
1167 1167 """
1168 1168
1169 1169 def readfast(shallow=False):
1170 1170 """Calls either ``read()`` or ``readdelta()``.
1171 1171
1172 1172 The faster of the two options is called.
1173 1173 """
1174 1174
1175 1175 def find(key):
1176 1176 """Calls self.read().find(key)``.
1177 1177
1178 1178 Returns a 2-tuple of ``(node, flags)`` or raises ``KeyError``.
1179 1179 """
1180 1180
1181 1181
1182 1182 class imanifestrevisionwritable(imanifestrevisionbase):
1183 1183 """Interface representing a manifest revision that can be committed."""
1184 1184
1185 1185 def write(transaction, linkrev, p1node, p2node, added, removed, match=None):
1186 1186 """Add this revision to storage.
1187 1187
1188 1188 Takes a transaction object, the changeset revision number it will
1189 1189 be associated with, its parent nodes, and lists of added and
1190 1190 removed paths.
1191 1191
1192 1192 If match is provided, storage can choose not to inspect or write out
1193 1193 items that do not match. Storage is still required to be able to provide
1194 1194 the full manifest in the future for any directories written (these
1195 1195 manifests should not be "narrowed on disk").
1196 1196
1197 1197 Returns the binary node of the created revision.
1198 1198 """
1199 1199
1200 1200
1201 1201 class imanifeststorage(interfaceutil.Interface):
1202 1202 """Storage interface for manifest data."""
1203 1203
1204 1204 nodeconstants = interfaceutil.Attribute(
1205 1205 """nodeconstants used by the current repository."""
1206 1206 )
1207 1207
1208 1208 tree = interfaceutil.Attribute(
1209 1209 """The path to the directory this manifest tracks.
1210 1210
1211 1211 The empty bytestring represents the root manifest.
1212 1212 """
1213 1213 )
1214 1214
1215 1215 index = interfaceutil.Attribute(
1216 1216 """An ``ifilerevisionssequence`` instance."""
1217 1217 )
1218 1218
1219 1219 opener = interfaceutil.Attribute(
1220 1220 """VFS opener to use to access underlying files used for storage.
1221 1221
1222 1222 TODO this is revlog specific and should not be exposed.
1223 1223 """
1224 1224 )
1225 1225
1226 1226 _generaldelta = interfaceutil.Attribute(
1227 1227 """Whether generaldelta storage is being used.
1228 1228
1229 1229 TODO this is revlog specific and should not be exposed.
1230 1230 """
1231 1231 )
1232 1232
1233 1233 fulltextcache = interfaceutil.Attribute(
1234 1234 """Dict with cache of fulltexts.
1235 1235
1236 1236 TODO this doesn't feel appropriate for the storage interface.
1237 1237 """
1238 1238 )
1239 1239
1240 1240 def __len__():
1241 1241 """Obtain the number of revisions stored for this manifest."""
1242 1242
1243 1243 def __iter__():
1244 1244 """Iterate over revision numbers for this manifest."""
1245 1245
1246 1246 def rev(node):
1247 1247 """Obtain the revision number given a binary node.
1248 1248
1249 1249 Raises ``error.LookupError`` if the node is not known.
1250 1250 """
1251 1251
1252 1252 def node(rev):
1253 1253 """Obtain the node value given a revision number.
1254 1254
1255 1255 Raises ``error.LookupError`` if the revision is not known.
1256 1256 """
1257 1257
1258 1258 def lookup(value):
1259 1259 """Attempt to resolve a value to a node.
1260 1260
1261 1261 Value can be a binary node, hex node, revision number, or a bytes
1262 1262 that can be converted to an integer.
1263 1263
1264 1264 Raises ``error.LookupError`` if a ndoe could not be resolved.
1265 1265 """
1266 1266
1267 1267 def parents(node):
1268 1268 """Returns a 2-tuple of parent nodes for a node.
1269 1269
1270 1270 Values will be ``nullid`` if the parent is empty.
1271 1271 """
1272 1272
1273 1273 def parentrevs(rev):
1274 1274 """Like parents() but operates on revision numbers."""
1275 1275
1276 1276 def linkrev(rev):
1277 1277 """Obtain the changeset revision number a revision is linked to."""
1278 1278
1279 1279 def revision(node, _df=None):
1280 1280 """Obtain fulltext data for a node."""
1281 1281
1282 1282 def rawdata(node, _df=None):
1283 1283 """Obtain raw data for a node."""
1284 1284
1285 1285 def revdiff(rev1, rev2):
1286 1286 """Obtain a delta between two revision numbers.
1287 1287
1288 1288 The returned data is the result of ``bdiff.bdiff()`` on the raw
1289 1289 revision data.
1290 1290 """
1291 1291
1292 1292 def cmp(node, fulltext):
1293 1293 """Compare fulltext to another revision.
1294 1294
1295 1295 Returns True if the fulltext is different from what is stored.
1296 1296 """
1297 1297
1298 1298 def emitrevisions(
1299 1299 nodes,
1300 1300 nodesorder=None,
1301 1301 revisiondata=False,
1302 1302 assumehaveparentrevisions=False,
1303 1303 ):
1304 1304 """Produce ``irevisiondelta`` describing revisions.
1305 1305
1306 1306 See the documentation for ``ifiledata`` for more.
1307 1307 """
1308 1308
1309 1309 def addgroup(
1310 1310 deltas,
1311 1311 linkmapper,
1312 1312 transaction,
1313 1313 addrevisioncb=None,
1314 1314 duplicaterevisioncb=None,
1315 1315 ):
1316 1316 """Process a series of deltas for storage.
1317 1317
1318 1318 See the documentation in ``ifilemutation`` for more.
1319 1319 """
1320 1320
1321 1321 def rawsize(rev):
1322 1322 """Obtain the size of tracked data.
1323 1323
1324 1324 Is equivalent to ``len(m.rawdata(node))``.
1325 1325
1326 1326 TODO this method is only used by upgrade code and may be removed.
1327 1327 """
1328 1328
1329 1329 def getstrippoint(minlink):
1330 1330 """Find minimum revision that must be stripped to strip a linkrev.
1331 1331
1332 1332 See the documentation in ``ifilemutation`` for more.
1333 1333 """
1334 1334
1335 1335 def strip(minlink, transaction):
1336 1336 """Remove storage of items starting at a linkrev.
1337 1337
1338 1338 See the documentation in ``ifilemutation`` for more.
1339 1339 """
1340 1340
1341 1341 def checksize():
1342 1342 """Obtain the expected sizes of backing files.
1343 1343
1344 1344 TODO this is used by verify and it should not be part of the interface.
1345 1345 """
1346 1346
1347 1347 def files():
1348 1348 """Obtain paths that are backing storage for this manifest.
1349 1349
1350 1350 TODO this is used by verify and there should probably be a better API
1351 1351 for this functionality.
1352 1352 """
1353 1353
1354 1354 def deltaparent(rev):
1355 1355 """Obtain the revision that a revision is delta'd against.
1356 1356
1357 1357 TODO delta encoding is an implementation detail of storage and should
1358 1358 not be exposed to the storage interface.
1359 1359 """
1360 1360
1361 1361 def clone(tr, dest, **kwargs):
1362 1362 """Clone this instance to another."""
1363 1363
1364 1364 def clearcaches(clear_persisted_data=False):
1365 1365 """Clear any caches associated with this instance."""
1366 1366
1367 1367 def dirlog(d):
1368 1368 """Obtain a manifest storage instance for a tree."""
1369 1369
1370 1370 def add(
1371 1371 m, transaction, link, p1, p2, added, removed, readtree=None, match=None
1372 1372 ):
1373 1373 """Add a revision to storage.
1374 1374
1375 1375 ``m`` is an object conforming to ``imanifestdict``.
1376 1376
1377 1377 ``link`` is the linkrev revision number.
1378 1378
1379 1379 ``p1`` and ``p2`` are the parent revision numbers.
1380 1380
1381 1381 ``added`` and ``removed`` are iterables of added and removed paths,
1382 1382 respectively.
1383 1383
1384 1384 ``readtree`` is a function that can be used to read the child tree(s)
1385 1385 when recursively writing the full tree structure when using
1386 1386 treemanifets.
1387 1387
1388 1388 ``match`` is a matcher that can be used to hint to storage that not all
1389 1389 paths must be inspected; this is an optimization and can be safely
1390 1390 ignored. Note that the storage must still be able to reproduce a full
1391 1391 manifest including files that did not match.
1392 1392 """
1393 1393
1394 1394 def storageinfo(
1395 1395 exclusivefiles=False,
1396 1396 sharedfiles=False,
1397 1397 revisionscount=False,
1398 1398 trackedsize=False,
1399 1399 storedsize=False,
1400 1400 ):
1401 1401 """Obtain information about storage for this manifest's data.
1402 1402
1403 1403 See ``ifilestorage.storageinfo()`` for a description of this method.
1404 1404 This one behaves the same way, except for manifest data.
1405 1405 """
1406 1406
1407 1407
1408 1408 class imanifestlog(interfaceutil.Interface):
1409 1409 """Interface representing a collection of manifest snapshots.
1410 1410
1411 1411 Represents the root manifest in a repository.
1412 1412
1413 1413 Also serves as a means to access nested tree manifests and to cache
1414 1414 tree manifests.
1415 1415 """
1416 1416
1417 1417 nodeconstants = interfaceutil.Attribute(
1418 1418 """nodeconstants used by the current repository."""
1419 1419 )
1420 1420
1421 1421 def __getitem__(node):
1422 1422 """Obtain a manifest instance for a given binary node.
1423 1423
1424 1424 Equivalent to calling ``self.get('', node)``.
1425 1425
1426 1426 The returned object conforms to the ``imanifestrevisionstored``
1427 1427 interface.
1428 1428 """
1429 1429
1430 1430 def get(tree, node, verify=True):
1431 1431 """Retrieve the manifest instance for a given directory and binary node.
1432 1432
1433 1433 ``node`` always refers to the node of the root manifest (which will be
1434 1434 the only manifest if flat manifests are being used).
1435 1435
1436 1436 If ``tree`` is the empty string, the root manifest is returned.
1437 1437 Otherwise the manifest for the specified directory will be returned
1438 1438 (requires tree manifests).
1439 1439
1440 1440 If ``verify`` is True, ``LookupError`` is raised if the node is not
1441 1441 known.
1442 1442
1443 1443 The returned object conforms to the ``imanifestrevisionstored``
1444 1444 interface.
1445 1445 """
1446 1446
1447 1447 def getstorage(tree):
1448 1448 """Retrieve an interface to storage for a particular tree.
1449 1449
1450 1450 If ``tree`` is the empty bytestring, storage for the root manifest will
1451 1451 be returned. Otherwise storage for a tree manifest is returned.
1452 1452
1453 1453 TODO formalize interface for returned object.
1454 1454 """
1455 1455
1456 1456 def clearcaches():
1457 1457 """Clear caches associated with this collection."""
1458 1458
1459 1459 def rev(node):
1460 1460 """Obtain the revision number for a binary node.
1461 1461
1462 1462 Raises ``error.LookupError`` if the node is not known.
1463 1463 """
1464 1464
1465 1465 def update_caches(transaction):
1466 1466 """update whatever cache are relevant for the used storage."""
1467 1467
1468 1468
1469 1469 class ilocalrepositoryfilestorage(interfaceutil.Interface):
1470 1470 """Local repository sub-interface providing access to tracked file storage.
1471 1471
1472 1472 This interface defines how a repository accesses storage for a single
1473 1473 tracked file path.
1474 1474 """
1475 1475
1476 1476 def file(f):
1477 1477 """Obtain a filelog for a tracked path.
1478 1478
1479 1479 The returned type conforms to the ``ifilestorage`` interface.
1480 1480 """
1481 1481
1482 1482
1483 1483 class ilocalrepositorymain(interfaceutil.Interface):
1484 1484 """Main interface for local repositories.
1485 1485
1486 1486 This currently captures the reality of things - not how things should be.
1487 1487 """
1488 1488
1489 1489 nodeconstants = interfaceutil.Attribute(
1490 1490 """Constant nodes matching the hash function used by the repository."""
1491 1491 )
1492 1492 nullid = interfaceutil.Attribute(
1493 1493 """null revision for the hash function used by the repository."""
1494 1494 )
1495 1495
1496 1496 supported = interfaceutil.Attribute(
1497 1497 """Set of requirements that this repo is capable of opening."""
1498 1498 )
1499 1499
1500 1500 requirements = interfaceutil.Attribute(
1501 1501 """Set of requirements this repo uses."""
1502 1502 )
1503 1503
1504 1504 features = interfaceutil.Attribute(
1505 1505 """Set of "features" this repository supports.
1506 1506
1507 1507 A "feature" is a loosely-defined term. It can refer to a feature
1508 1508 in the classical sense or can describe an implementation detail
1509 1509 of the repository. For example, a ``readonly`` feature may denote
1510 1510 the repository as read-only. Or a ``revlogfilestore`` feature may
1511 1511 denote that the repository is using revlogs for file storage.
1512 1512
1513 1513 The intent of features is to provide a machine-queryable mechanism
1514 1514 for repo consumers to test for various repository characteristics.
1515 1515
1516 1516 Features are similar to ``requirements``. The main difference is that
1517 1517 requirements are stored on-disk and represent requirements to open the
1518 1518 repository. Features are more run-time capabilities of the repository
1519 1519 and more granular capabilities (which may be derived from requirements).
1520 1520 """
1521 1521 )
1522 1522
1523 1523 filtername = interfaceutil.Attribute(
1524 1524 """Name of the repoview that is active on this repo."""
1525 1525 )
1526 1526
1527 1527 vfs_map = interfaceutil.Attribute(
1528 1528 """a bytes-key → vfs mapping used by transaction and others"""
1529 1529 )
1530 1530
1531 1531 wvfs = interfaceutil.Attribute(
1532 1532 """VFS used to access the working directory."""
1533 1533 )
1534 1534
1535 1535 vfs = interfaceutil.Attribute(
1536 1536 """VFS rooted at the .hg directory.
1537 1537
1538 1538 Used to access repository data not in the store.
1539 1539 """
1540 1540 )
1541 1541
1542 1542 svfs = interfaceutil.Attribute(
1543 1543 """VFS rooted at the store.
1544 1544
1545 1545 Used to access repository data in the store. Typically .hg/store.
1546 1546 But can point elsewhere if the store is shared.
1547 1547 """
1548 1548 )
1549 1549
1550 1550 root = interfaceutil.Attribute(
1551 1551 """Path to the root of the working directory."""
1552 1552 )
1553 1553
1554 1554 path = interfaceutil.Attribute("""Path to the .hg directory.""")
1555 1555
1556 1556 origroot = interfaceutil.Attribute(
1557 1557 """The filesystem path that was used to construct the repo."""
1558 1558 )
1559 1559
1560 1560 auditor = interfaceutil.Attribute(
1561 1561 """A pathauditor for the working directory.
1562 1562
1563 1563 This checks if a path refers to a nested repository.
1564 1564
1565 1565 Operates on the filesystem.
1566 1566 """
1567 1567 )
1568 1568
1569 1569 nofsauditor = interfaceutil.Attribute(
1570 1570 """A pathauditor for the working directory.
1571 1571
1572 1572 This is like ``auditor`` except it doesn't do filesystem checks.
1573 1573 """
1574 1574 )
1575 1575
1576 1576 baseui = interfaceutil.Attribute(
1577 1577 """Original ui instance passed into constructor."""
1578 1578 )
1579 1579
1580 1580 ui = interfaceutil.Attribute("""Main ui instance for this instance.""")
1581 1581
1582 1582 sharedpath = interfaceutil.Attribute(
1583 1583 """Path to the .hg directory of the repo this repo was shared from."""
1584 1584 )
1585 1585
1586 1586 store = interfaceutil.Attribute("""A store instance.""")
1587 1587
1588 1588 spath = interfaceutil.Attribute("""Path to the store.""")
1589 1589
1590 1590 sjoin = interfaceutil.Attribute("""Alias to self.store.join.""")
1591 1591
1592 1592 cachevfs = interfaceutil.Attribute(
1593 1593 """A VFS used to access the cache directory.
1594 1594
1595 1595 Typically .hg/cache.
1596 1596 """
1597 1597 )
1598 1598
1599 1599 wcachevfs = interfaceutil.Attribute(
1600 1600 """A VFS used to access the cache directory dedicated to working copy
1601 1601
1602 1602 Typically .hg/wcache.
1603 1603 """
1604 1604 )
1605 1605
1606 1606 filteredrevcache = interfaceutil.Attribute(
1607 1607 """Holds sets of revisions to be filtered."""
1608 1608 )
1609 1609
1610 1610 names = interfaceutil.Attribute("""A ``namespaces`` instance.""")
1611 1611
1612 1612 filecopiesmode = interfaceutil.Attribute(
1613 1613 """The way files copies should be dealt with in this repo."""
1614 1614 )
1615 1615
1616 1616 def close():
1617 1617 """Close the handle on this repository."""
1618 1618
1619 1619 def peer(path=None):
1620 1620 """Obtain an object conforming to the ``peer`` interface."""
1621 1621
1622 1622 def unfiltered():
1623 1623 """Obtain an unfiltered/raw view of this repo."""
1624 1624
1625 1625 def filtered(name, visibilityexceptions=None):
1626 1626 """Obtain a named view of this repository."""
1627 1627
1628 1628 obsstore = interfaceutil.Attribute("""A store of obsolescence data.""")
1629 1629
1630 1630 changelog = interfaceutil.Attribute("""A handle on the changelog revlog.""")
1631 1631
1632 1632 manifestlog = interfaceutil.Attribute(
1633 1633 """An instance conforming to the ``imanifestlog`` interface.
1634 1634
1635 1635 Provides access to manifests for the repository.
1636 1636 """
1637 1637 )
1638 1638
1639 1639 dirstate = interfaceutil.Attribute("""Working directory state.""")
1640 1640
1641 1641 narrowpats = interfaceutil.Attribute(
1642 1642 """Matcher patterns for this repository's narrowspec."""
1643 1643 )
1644 1644
1645 1645 def narrowmatch(match=None, includeexact=False):
1646 1646 """Obtain a matcher for the narrowspec."""
1647 1647
1648 1648 def setnarrowpats(newincludes, newexcludes):
1649 1649 """Define the narrowspec for this repository."""
1650 1650
1651 1651 def __getitem__(changeid):
1652 1652 """Try to resolve a changectx."""
1653 1653
1654 1654 def __contains__(changeid):
1655 1655 """Whether a changeset exists."""
1656 1656
1657 1657 def __nonzero__():
1658 1658 """Always returns True."""
1659 1659 return True
1660 1660
1661 1661 __bool__ = __nonzero__
1662 1662
1663 1663 def __len__():
1664 1664 """Returns the number of changesets in the repo."""
1665 1665
1666 1666 def __iter__():
1667 1667 """Iterate over revisions in the changelog."""
1668 1668
1669 1669 def revs(expr, *args):
1670 1670 """Evaluate a revset.
1671 1671
1672 1672 Emits revisions.
1673 1673 """
1674 1674
1675 1675 def set(expr, *args):
1676 1676 """Evaluate a revset.
1677 1677
1678 1678 Emits changectx instances.
1679 1679 """
1680 1680
1681 1681 def anyrevs(specs, user=False, localalias=None):
1682 1682 """Find revisions matching one of the given revsets."""
1683 1683
1684 1684 def url():
1685 1685 """Returns a string representing the location of this repo."""
1686 1686
1687 1687 def hook(name, throw=False, **args):
1688 1688 """Call a hook."""
1689 1689
1690 1690 def tags():
1691 1691 """Return a mapping of tag to node."""
1692 1692
1693 1693 def tagtype(tagname):
1694 1694 """Return the type of a given tag."""
1695 1695
1696 1696 def tagslist():
1697 1697 """Return a list of tags ordered by revision."""
1698 1698
1699 1699 def nodetags(node):
1700 1700 """Return the tags associated with a node."""
1701 1701
1702 1702 def nodebookmarks(node):
1703 1703 """Return the list of bookmarks pointing to the specified node."""
1704 1704
1705 1705 def branchmap():
1706 1706 """Return a mapping of branch to heads in that branch."""
1707 1707
1708 1708 def revbranchcache():
1709 1709 pass
1710 1710
1711 1711 def register_changeset(rev, changelogrevision):
1712 1712 """Extension point for caches for new nodes.
1713 1713
1714 1714 Multiple consumers are expected to need parts of the changelogrevision,
1715 1715 so it is provided as optimization to avoid duplicate lookups. A simple
1716 1716 cache would be fragile when other revisions are accessed, too."""
1717 1717 pass
1718 1718
1719 1719 def branchtip(branchtip, ignoremissing=False):
1720 1720 """Return the tip node for a given branch."""
1721 1721
1722 1722 def lookup(key):
1723 1723 """Resolve the node for a revision."""
1724 1724
1725 1725 def lookupbranch(key):
1726 1726 """Look up the branch name of the given revision or branch name."""
1727 1727
1728 1728 def known(nodes):
1729 1729 """Determine whether a series of nodes is known.
1730 1730
1731 1731 Returns a list of bools.
1732 1732 """
1733 1733
1734 1734 def local():
1735 1735 """Whether the repository is local."""
1736 1736 return True
1737 1737
1738 1738 def publishing():
1739 1739 """Whether the repository is a publishing repository."""
1740 1740
1741 1741 def cancopy():
1742 1742 pass
1743 1743
1744 1744 def shared():
1745 1745 """The type of shared repository or None."""
1746 1746
1747 1747 def wjoin(f, *insidef):
1748 1748 """Calls self.vfs.reljoin(self.root, f, *insidef)"""
1749 1749
1750 1750 def setparents(p1, p2):
1751 1751 """Set the parent nodes of the working directory."""
1752 1752
1753 1753 def filectx(path, changeid=None, fileid=None):
1754 1754 """Obtain a filectx for the given file revision."""
1755 1755
1756 1756 def getcwd():
1757 1757 """Obtain the current working directory from the dirstate."""
1758 1758
1759 1759 def pathto(f, cwd=None):
1760 1760 """Obtain the relative path to a file."""
1761 1761
1762 1762 def adddatafilter(name, fltr):
1763 1763 pass
1764 1764
1765 1765 def wread(filename):
1766 1766 """Read a file from wvfs, using data filters."""
1767 1767
1768 1768 def wwrite(filename, data, flags, backgroundclose=False, **kwargs):
1769 1769 """Write data to a file in the wvfs, using data filters."""
1770 1770
1771 1771 def wwritedata(filename, data):
1772 1772 """Resolve data for writing to the wvfs, using data filters."""
1773 1773
1774 1774 def currenttransaction():
1775 1775 """Obtain the current transaction instance or None."""
1776 1776
1777 1777 def transaction(desc, report=None):
1778 1778 """Open a new transaction to write to the repository."""
1779 1779
1780 1780 def undofiles():
1781 1781 """Returns a list of (vfs, path) for files to undo transactions."""
1782 1782
1783 1783 def recover():
1784 1784 """Roll back an interrupted transaction."""
1785 1785
1786 1786 def rollback(dryrun=False, force=False):
1787 1787 """Undo the last transaction.
1788 1788
1789 1789 DANGEROUS.
1790 1790 """
1791 1791
1792 1792 def updatecaches(tr=None, full=False, caches=None):
1793 1793 """Warm repo caches."""
1794 1794
1795 1795 def invalidatecaches():
1796 1796 """Invalidate cached data due to the repository mutating."""
1797 1797
1798 1798 def invalidatevolatilesets():
1799 1799 pass
1800 1800
1801 1801 def invalidatedirstate():
1802 1802 """Invalidate the dirstate."""
1803 1803
1804 1804 def invalidate(clearfilecache=False):
1805 1805 pass
1806 1806
1807 1807 def invalidateall():
1808 1808 pass
1809 1809
1810 1810 def lock(wait=True):
1811 1811 """Lock the repository store and return a lock instance."""
1812 1812
1813 1813 def currentlock():
1814 1814 """Return the lock if it's held or None."""
1815 1815
1816 1816 def wlock(wait=True):
1817 1817 """Lock the non-store parts of the repository."""
1818 1818
1819 1819 def currentwlock():
1820 1820 """Return the wlock if it's held or None."""
1821 1821
1822 1822 def checkcommitpatterns(wctx, match, status, fail):
1823 1823 pass
1824 1824
1825 1825 def commit(
1826 1826 text=b'',
1827 1827 user=None,
1828 1828 date=None,
1829 1829 match=None,
1830 1830 force=False,
1831 1831 editor=False,
1832 1832 extra=None,
1833 1833 ):
1834 1834 """Add a new revision to the repository."""
1835 1835
1836 1836 def commitctx(ctx, error=False, origctx=None):
1837 1837 """Commit a commitctx instance to the repository."""
1838 1838
1839 1839 def destroying():
1840 1840 """Inform the repository that nodes are about to be destroyed."""
1841 1841
1842 1842 def destroyed():
1843 1843 """Inform the repository that nodes have been destroyed."""
1844 1844
1845 1845 def status(
1846 1846 node1=b'.',
1847 1847 node2=None,
1848 1848 match=None,
1849 1849 ignored=False,
1850 1850 clean=False,
1851 1851 unknown=False,
1852 1852 listsubrepos=False,
1853 1853 ):
1854 1854 """Convenience method to call repo[x].status()."""
1855 1855
1856 1856 def addpostdsstatus(ps):
1857 1857 pass
1858 1858
1859 1859 def postdsstatus():
1860 1860 pass
1861 1861
1862 1862 def clearpostdsstatus():
1863 1863 pass
1864 1864
1865 1865 def heads(start=None):
1866 1866 """Obtain list of nodes that are DAG heads."""
1867 1867
1868 1868 def branchheads(branch=None, start=None, closed=False):
1869 1869 pass
1870 1870
1871 1871 def branches(nodes):
1872 1872 pass
1873 1873
1874 1874 def between(pairs):
1875 1875 pass
1876 1876
1877 1877 def checkpush(pushop):
1878 1878 pass
1879 1879
1880 1880 prepushoutgoinghooks = interfaceutil.Attribute("""util.hooks instance.""")
1881 1881
1882 1882 def pushkey(namespace, key, old, new):
1883 1883 pass
1884 1884
1885 1885 def listkeys(namespace):
1886 1886 pass
1887 1887
1888 1888 def debugwireargs(one, two, three=None, four=None, five=None):
1889 1889 pass
1890 1890
1891 1891 def savecommitmessage(text):
1892 1892 pass
1893 1893
1894 1894 def register_sidedata_computer(
1895 1895 kind, category, keys, computer, flags, replace=False
1896 1896 ):
1897 1897 pass
1898 1898
1899 1899 def register_wanted_sidedata(category):
1900 1900 pass
1901 1901
1902 1902
1903 1903 class completelocalrepository(
1904 1904 ilocalrepositorymain, ilocalrepositoryfilestorage
1905 1905 ):
1906 1906 """Complete interface for a local repository."""
1907 1907
1908 1908
1909 1909 class iwireprotocolcommandcacher(interfaceutil.Interface):
1910 1910 """Represents a caching backend for wire protocol commands.
1911 1911
1912 1912 Wire protocol version 2 supports transparent caching of many commands.
1913 1913 To leverage this caching, servers can activate objects that cache
1914 1914 command responses. Objects handle both cache writing and reading.
1915 1915 This interface defines how that response caching mechanism works.
1916 1916
1917 1917 Wire protocol version 2 commands emit a series of objects that are
1918 1918 serialized and sent to the client. The caching layer exists between
1919 1919 the invocation of the command function and the sending of its output
1920 1920 objects to an output layer.
1921 1921
1922 1922 Instances of this interface represent a binding to a cache that
1923 1923 can serve a response (in place of calling a command function) and/or
1924 1924 write responses to a cache for subsequent use.
1925 1925
1926 1926 When a command request arrives, the following happens with regards
1927 1927 to this interface:
1928 1928
1929 1929 1. The server determines whether the command request is cacheable.
1930 1930 2. If it is, an instance of this interface is spawned.
1931 1931 3. The cacher is activated in a context manager (``__enter__`` is called).
1932 1932 4. A cache *key* for that request is derived. This will call the
1933 1933 instance's ``adjustcachekeystate()`` method so the derivation
1934 1934 can be influenced.
1935 1935 5. The cacher is informed of the derived cache key via a call to
1936 1936 ``setcachekey()``.
1937 1937 6. The cacher's ``lookup()`` method is called to test for presence of
1938 1938 the derived key in the cache.
1939 1939 7. If ``lookup()`` returns a hit, that cached result is used in place
1940 1940 of invoking the command function. ``__exit__`` is called and the instance
1941 1941 is discarded.
1942 1942 8. The command function is invoked.
1943 1943 9. ``onobject()`` is called for each object emitted by the command
1944 1944 function.
1945 1945 10. After the final object is seen, ``onfinished()`` is called.
1946 1946 11. ``__exit__`` is called to signal the end of use of the instance.
1947 1947
1948 1948 Cache *key* derivation can be influenced by the instance.
1949 1949
1950 1950 Cache keys are initially derived by a deterministic representation of
1951 1951 the command request. This includes the command name, arguments, protocol
1952 1952 version, etc. This initial key derivation is performed by CBOR-encoding a
1953 1953 data structure and feeding that output into a hasher.
1954 1954
1955 1955 Instances of this interface can influence this initial key derivation
1956 1956 via ``adjustcachekeystate()``.
1957 1957
1958 1958 The instance is informed of the derived cache key via a call to
1959 1959 ``setcachekey()``. The instance must store the key locally so it can
1960 1960 be consulted on subsequent operations that may require it.
1961 1961
1962 1962 When constructed, the instance has access to a callable that can be used
1963 1963 for encoding response objects. This callable receives as its single
1964 1964 argument an object emitted by a command function. It returns an iterable
1965 1965 of bytes chunks representing the encoded object. Unless the cacher is
1966 1966 caching native Python objects in memory or has a way of reconstructing
1967 1967 the original Python objects, implementations typically call this function
1968 1968 to produce bytes from the output objects and then store those bytes in
1969 1969 the cache. When it comes time to re-emit those bytes, they are wrapped
1970 1970 in a ``wireprototypes.encodedresponse`` instance to tell the output
1971 1971 layer that they are pre-encoded.
1972 1972
1973 1973 When receiving the objects emitted by the command function, instances
1974 1974 can choose what to do with those objects. The simplest thing to do is
1975 1975 re-emit the original objects. They will be forwarded to the output
1976 1976 layer and will be processed as if the cacher did not exist.
1977 1977
1978 1978 Implementations could also choose to not emit objects - instead locally
1979 1979 buffering objects or their encoded representation. They could then emit
1980 1980 a single "coalesced" object when ``onfinished()`` is called. In
1981 1981 this way, the implementation would function as a filtering layer of
1982 1982 sorts.
1983 1983
1984 1984 When caching objects, typically the encoded form of the object will
1985 1985 be stored. Keep in mind that if the original object is forwarded to
1986 1986 the output layer, it will need to be encoded there as well. For large
1987 1987 output, this redundant encoding could add overhead. Implementations
1988 1988 could wrap the encoded object data in ``wireprototypes.encodedresponse``
1989 1989 instances to avoid this overhead.
1990 1990 """
1991 1991
1992 1992 def __enter__():
1993 1993 """Marks the instance as active.
1994 1994
1995 1995 Should return self.
1996 1996 """
1997 1997
1998 1998 def __exit__(exctype, excvalue, exctb):
1999 1999 """Called when cacher is no longer used.
2000 2000
2001 2001 This can be used by implementations to perform cleanup actions (e.g.
2002 2002 disconnecting network sockets, aborting a partially cached response.
2003 2003 """
2004 2004
2005 2005 def adjustcachekeystate(state):
2006 2006 """Influences cache key derivation by adjusting state to derive key.
2007 2007
2008 2008 A dict defining the state used to derive the cache key is passed.
2009 2009
2010 2010 Implementations can modify this dict to record additional state that
2011 2011 is wanted to influence key derivation.
2012 2012
2013 2013 Implementations are *highly* encouraged to not modify or delete
2014 2014 existing keys.
2015 2015 """
2016 2016
2017 2017 def setcachekey(key):
2018 2018 """Record the derived cache key for this request.
2019 2019
2020 2020 Instances may mutate the key for internal usage, as desired. e.g.
2021 2021 instances may wish to prepend the repo name, introduce path
2022 2022 components for filesystem or URL addressing, etc. Behavior is up to
2023 2023 the cache.
2024 2024
2025 2025 Returns a bool indicating if the request is cacheable by this
2026 2026 instance.
2027 2027 """
2028 2028
2029 2029 def lookup():
2030 2030 """Attempt to resolve an entry in the cache.
2031 2031
2032 2032 The instance is instructed to look for the cache key that it was
2033 2033 informed about via the call to ``setcachekey()``.
2034 2034
2035 2035 If there's no cache hit or the cacher doesn't wish to use the cached
2036 2036 entry, ``None`` should be returned.
2037 2037
2038 2038 Else, a dict defining the cached result should be returned. The
2039 2039 dict may have the following keys:
2040 2040
2041 2041 objs
2042 2042 An iterable of objects that should be sent to the client. That
2043 2043 iterable of objects is expected to be what the command function
2044 2044 would return if invoked or an equivalent representation thereof.
2045 2045 """
2046 2046
2047 2047 def onobject(obj):
2048 2048 """Called when a new object is emitted from the command function.
2049 2049
2050 2050 Receives as its argument the object that was emitted from the
2051 2051 command function.
2052 2052
2053 2053 This method returns an iterator of objects to forward to the output
2054 2054 layer. The easiest implementation is a generator that just
2055 2055 ``yield obj``.
2056 2056 """
2057 2057
2058 2058 def onfinished():
2059 2059 """Called after all objects have been emitted from the command function.
2060 2060
2061 2061 Implementations should return an iterator of objects to forward to
2062 2062 the output layer.
2063 2063
2064 2064 This method can be a generator.
2065 2065 """
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now