##// END OF EJS Templates
exchange: backout changeset c26335fa4225...
marmoute -
r45782:6063c185 stable
parent child Browse files
Show More
@@ -1,3162 +1,3157 b''
1 1 # exchange.py - utility to exchange data between repos.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import collections
11 11 import weakref
12 12
13 13 from .i18n import _
14 14 from .node import (
15 15 hex,
16 16 nullid,
17 17 nullrev,
18 18 )
19 19 from .thirdparty import attr
20 20 from . import (
21 21 bookmarks as bookmod,
22 22 bundle2,
23 23 changegroup,
24 24 discovery,
25 25 error,
26 26 exchangev2,
27 27 lock as lockmod,
28 28 logexchange,
29 29 narrowspec,
30 30 obsolete,
31 31 obsutil,
32 32 phases,
33 33 pushkey,
34 34 pycompat,
35 35 scmutil,
36 36 sslutil,
37 37 streamclone,
38 38 url as urlmod,
39 39 util,
40 40 wireprototypes,
41 41 )
42 42 from .interfaces import repository
43 43 from .utils import (
44 44 hashutil,
45 45 stringutil,
46 46 )
47 47
48 48 urlerr = util.urlerr
49 49 urlreq = util.urlreq
50 50
51 51 _NARROWACL_SECTION = b'narrowacl'
52 52
53 53 # Maps bundle version human names to changegroup versions.
54 54 _bundlespeccgversions = {
55 55 b'v1': b'01',
56 56 b'v2': b'02',
57 57 b'packed1': b's1',
58 58 b'bundle2': b'02', # legacy
59 59 }
60 60
61 61 # Maps bundle version with content opts to choose which part to bundle
62 62 _bundlespeccontentopts = {
63 63 b'v1': {
64 64 b'changegroup': True,
65 65 b'cg.version': b'01',
66 66 b'obsolescence': False,
67 67 b'phases': False,
68 68 b'tagsfnodescache': False,
69 69 b'revbranchcache': False,
70 70 },
71 71 b'v2': {
72 72 b'changegroup': True,
73 73 b'cg.version': b'02',
74 74 b'obsolescence': False,
75 75 b'phases': False,
76 76 b'tagsfnodescache': True,
77 77 b'revbranchcache': True,
78 78 },
79 79 b'packed1': {b'cg.version': b's1'},
80 80 }
81 81 _bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2']
82 82
83 83 _bundlespecvariants = {
84 84 b"streamv2": {
85 85 b"changegroup": False,
86 86 b"streamv2": True,
87 87 b"tagsfnodescache": False,
88 88 b"revbranchcache": False,
89 89 }
90 90 }
91 91
92 92 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
93 93 _bundlespecv1compengines = {b'gzip', b'bzip2', b'none'}
94 94
95 95
96 96 @attr.s
97 97 class bundlespec(object):
98 98 compression = attr.ib()
99 99 wirecompression = attr.ib()
100 100 version = attr.ib()
101 101 wireversion = attr.ib()
102 102 params = attr.ib()
103 103 contentopts = attr.ib()
104 104
105 105
106 106 def parsebundlespec(repo, spec, strict=True):
107 107 """Parse a bundle string specification into parts.
108 108
109 109 Bundle specifications denote a well-defined bundle/exchange format.
110 110 The content of a given specification should not change over time in
111 111 order to ensure that bundles produced by a newer version of Mercurial are
112 112 readable from an older version.
113 113
114 114 The string currently has the form:
115 115
116 116 <compression>-<type>[;<parameter0>[;<parameter1>]]
117 117
118 118 Where <compression> is one of the supported compression formats
119 119 and <type> is (currently) a version string. A ";" can follow the type and
120 120 all text afterwards is interpreted as URI encoded, ";" delimited key=value
121 121 pairs.
122 122
123 123 If ``strict`` is True (the default) <compression> is required. Otherwise,
124 124 it is optional.
125 125
126 126 Returns a bundlespec object of (compression, version, parameters).
127 127 Compression will be ``None`` if not in strict mode and a compression isn't
128 128 defined.
129 129
130 130 An ``InvalidBundleSpecification`` is raised when the specification is
131 131 not syntactically well formed.
132 132
133 133 An ``UnsupportedBundleSpecification`` is raised when the compression or
134 134 bundle type/version is not recognized.
135 135
136 136 Note: this function will likely eventually return a more complex data
137 137 structure, including bundle2 part information.
138 138 """
139 139
140 140 def parseparams(s):
141 141 if b';' not in s:
142 142 return s, {}
143 143
144 144 params = {}
145 145 version, paramstr = s.split(b';', 1)
146 146
147 147 for p in paramstr.split(b';'):
148 148 if b'=' not in p:
149 149 raise error.InvalidBundleSpecification(
150 150 _(
151 151 b'invalid bundle specification: '
152 152 b'missing "=" in parameter: %s'
153 153 )
154 154 % p
155 155 )
156 156
157 157 key, value = p.split(b'=', 1)
158 158 key = urlreq.unquote(key)
159 159 value = urlreq.unquote(value)
160 160 params[key] = value
161 161
162 162 return version, params
163 163
164 164 if strict and b'-' not in spec:
165 165 raise error.InvalidBundleSpecification(
166 166 _(
167 167 b'invalid bundle specification; '
168 168 b'must be prefixed with compression: %s'
169 169 )
170 170 % spec
171 171 )
172 172
173 173 if b'-' in spec:
174 174 compression, version = spec.split(b'-', 1)
175 175
176 176 if compression not in util.compengines.supportedbundlenames:
177 177 raise error.UnsupportedBundleSpecification(
178 178 _(b'%s compression is not supported') % compression
179 179 )
180 180
181 181 version, params = parseparams(version)
182 182
183 183 if version not in _bundlespeccgversions:
184 184 raise error.UnsupportedBundleSpecification(
185 185 _(b'%s is not a recognized bundle version') % version
186 186 )
187 187 else:
188 188 # Value could be just the compression or just the version, in which
189 189 # case some defaults are assumed (but only when not in strict mode).
190 190 assert not strict
191 191
192 192 spec, params = parseparams(spec)
193 193
194 194 if spec in util.compengines.supportedbundlenames:
195 195 compression = spec
196 196 version = b'v1'
197 197 # Generaldelta repos require v2.
198 198 if b'generaldelta' in repo.requirements:
199 199 version = b'v2'
200 200 # Modern compression engines require v2.
201 201 if compression not in _bundlespecv1compengines:
202 202 version = b'v2'
203 203 elif spec in _bundlespeccgversions:
204 204 if spec == b'packed1':
205 205 compression = b'none'
206 206 else:
207 207 compression = b'bzip2'
208 208 version = spec
209 209 else:
210 210 raise error.UnsupportedBundleSpecification(
211 211 _(b'%s is not a recognized bundle specification') % spec
212 212 )
213 213
214 214 # Bundle version 1 only supports a known set of compression engines.
215 215 if version == b'v1' and compression not in _bundlespecv1compengines:
216 216 raise error.UnsupportedBundleSpecification(
217 217 _(b'compression engine %s is not supported on v1 bundles')
218 218 % compression
219 219 )
220 220
221 221 # The specification for packed1 can optionally declare the data formats
222 222 # required to apply it. If we see this metadata, compare against what the
223 223 # repo supports and error if the bundle isn't compatible.
224 224 if version == b'packed1' and b'requirements' in params:
225 225 requirements = set(params[b'requirements'].split(b','))
226 226 missingreqs = requirements - repo.supportedformats
227 227 if missingreqs:
228 228 raise error.UnsupportedBundleSpecification(
229 229 _(b'missing support for repository features: %s')
230 230 % b', '.join(sorted(missingreqs))
231 231 )
232 232
233 233 # Compute contentopts based on the version
234 234 contentopts = _bundlespeccontentopts.get(version, {}).copy()
235 235
236 236 # Process the variants
237 237 if b"stream" in params and params[b"stream"] == b"v2":
238 238 variant = _bundlespecvariants[b"streamv2"]
239 239 contentopts.update(variant)
240 240
241 241 engine = util.compengines.forbundlename(compression)
242 242 compression, wirecompression = engine.bundletype()
243 243 wireversion = _bundlespeccgversions[version]
244 244
245 245 return bundlespec(
246 246 compression, wirecompression, version, wireversion, params, contentopts
247 247 )
248 248
249 249
250 250 def readbundle(ui, fh, fname, vfs=None):
251 251 header = changegroup.readexactly(fh, 4)
252 252
253 253 alg = None
254 254 if not fname:
255 255 fname = b"stream"
256 256 if not header.startswith(b'HG') and header.startswith(b'\0'):
257 257 fh = changegroup.headerlessfixup(fh, header)
258 258 header = b"HG10"
259 259 alg = b'UN'
260 260 elif vfs:
261 261 fname = vfs.join(fname)
262 262
263 263 magic, version = header[0:2], header[2:4]
264 264
265 265 if magic != b'HG':
266 266 raise error.Abort(_(b'%s: not a Mercurial bundle') % fname)
267 267 if version == b'10':
268 268 if alg is None:
269 269 alg = changegroup.readexactly(fh, 2)
270 270 return changegroup.cg1unpacker(fh, alg)
271 271 elif version.startswith(b'2'):
272 272 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
273 273 elif version == b'S1':
274 274 return streamclone.streamcloneapplier(fh)
275 275 else:
276 276 raise error.Abort(
277 277 _(b'%s: unknown bundle version %s') % (fname, version)
278 278 )
279 279
280 280
281 281 def getbundlespec(ui, fh):
282 282 """Infer the bundlespec from a bundle file handle.
283 283
284 284 The input file handle is seeked and the original seek position is not
285 285 restored.
286 286 """
287 287
288 288 def speccompression(alg):
289 289 try:
290 290 return util.compengines.forbundletype(alg).bundletype()[0]
291 291 except KeyError:
292 292 return None
293 293
294 294 b = readbundle(ui, fh, None)
295 295 if isinstance(b, changegroup.cg1unpacker):
296 296 alg = b._type
297 297 if alg == b'_truncatedBZ':
298 298 alg = b'BZ'
299 299 comp = speccompression(alg)
300 300 if not comp:
301 301 raise error.Abort(_(b'unknown compression algorithm: %s') % alg)
302 302 return b'%s-v1' % comp
303 303 elif isinstance(b, bundle2.unbundle20):
304 304 if b'Compression' in b.params:
305 305 comp = speccompression(b.params[b'Compression'])
306 306 if not comp:
307 307 raise error.Abort(
308 308 _(b'unknown compression algorithm: %s') % comp
309 309 )
310 310 else:
311 311 comp = b'none'
312 312
313 313 version = None
314 314 for part in b.iterparts():
315 315 if part.type == b'changegroup':
316 316 version = part.params[b'version']
317 317 if version in (b'01', b'02'):
318 318 version = b'v2'
319 319 else:
320 320 raise error.Abort(
321 321 _(
322 322 b'changegroup version %s does not have '
323 323 b'a known bundlespec'
324 324 )
325 325 % version,
326 326 hint=_(b'try upgrading your Mercurial client'),
327 327 )
328 328 elif part.type == b'stream2' and version is None:
329 329 # A stream2 part requires to be part of a v2 bundle
330 330 requirements = urlreq.unquote(part.params[b'requirements'])
331 331 splitted = requirements.split()
332 332 params = bundle2._formatrequirementsparams(splitted)
333 333 return b'none-v2;stream=v2;%s' % params
334 334
335 335 if not version:
336 336 raise error.Abort(
337 337 _(b'could not identify changegroup version in bundle')
338 338 )
339 339
340 340 return b'%s-%s' % (comp, version)
341 341 elif isinstance(b, streamclone.streamcloneapplier):
342 342 requirements = streamclone.readbundle1header(fh)[2]
343 343 formatted = bundle2._formatrequirementsparams(requirements)
344 344 return b'none-packed1;%s' % formatted
345 345 else:
346 346 raise error.Abort(_(b'unknown bundle type: %s') % b)
347 347
348 348
349 349 def _computeoutgoing(repo, heads, common):
350 350 """Computes which revs are outgoing given a set of common
351 351 and a set of heads.
352 352
353 353 This is a separate function so extensions can have access to
354 354 the logic.
355 355
356 356 Returns a discovery.outgoing object.
357 357 """
358 358 cl = repo.changelog
359 359 if common:
360 360 hasnode = cl.hasnode
361 361 common = [n for n in common if hasnode(n)]
362 362 else:
363 363 common = [nullid]
364 364 if not heads:
365 365 heads = cl.heads()
366 366 return discovery.outgoing(repo, common, heads)
367 367
368 368
369 369 def _checkpublish(pushop):
370 370 repo = pushop.repo
371 371 ui = repo.ui
372 372 behavior = ui.config(b'experimental', b'auto-publish')
373 373 if pushop.publish or behavior not in (b'warn', b'confirm', b'abort'):
374 374 return
375 375 remotephases = listkeys(pushop.remote, b'phases')
376 376 if not remotephases.get(b'publishing', False):
377 377 return
378 378
379 379 if pushop.revs is None:
380 380 published = repo.filtered(b'served').revs(b'not public()')
381 381 else:
382 382 published = repo.revs(b'::%ln - public()', pushop.revs)
383 383 if published:
384 384 if behavior == b'warn':
385 385 ui.warn(
386 386 _(b'%i changesets about to be published\n') % len(published)
387 387 )
388 388 elif behavior == b'confirm':
389 389 if ui.promptchoice(
390 390 _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No')
391 391 % len(published)
392 392 ):
393 393 raise error.Abort(_(b'user quit'))
394 394 elif behavior == b'abort':
395 395 msg = _(b'push would publish %i changesets') % len(published)
396 396 hint = _(
397 397 b"use --publish or adjust 'experimental.auto-publish'"
398 398 b" config"
399 399 )
400 400 raise error.Abort(msg, hint=hint)
401 401
402 402
403 403 def _forcebundle1(op):
404 404 """return true if a pull/push must use bundle1
405 405
406 406 This function is used to allow testing of the older bundle version"""
407 407 ui = op.repo.ui
408 408 # The goal is this config is to allow developer to choose the bundle
409 409 # version used during exchanged. This is especially handy during test.
410 410 # Value is a list of bundle version to be picked from, highest version
411 411 # should be used.
412 412 #
413 413 # developer config: devel.legacy.exchange
414 414 exchange = ui.configlist(b'devel', b'legacy.exchange')
415 415 forcebundle1 = b'bundle2' not in exchange and b'bundle1' in exchange
416 416 return forcebundle1 or not op.remote.capable(b'bundle2')
417 417
418 418
419 419 class pushoperation(object):
420 420 """A object that represent a single push operation
421 421
422 422 Its purpose is to carry push related state and very common operations.
423 423
424 424 A new pushoperation should be created at the beginning of each push and
425 425 discarded afterward.
426 426 """
427 427
428 428 def __init__(
429 429 self,
430 430 repo,
431 431 remote,
432 432 force=False,
433 433 revs=None,
434 434 newbranch=False,
435 435 bookmarks=(),
436 436 publish=False,
437 437 pushvars=None,
438 438 ):
439 439 # repo we push from
440 440 self.repo = repo
441 441 self.ui = repo.ui
442 442 # repo we push to
443 443 self.remote = remote
444 444 # force option provided
445 445 self.force = force
446 446 # revs to be pushed (None is "all")
447 447 self.revs = revs
448 448 # bookmark explicitly pushed
449 449 self.bookmarks = bookmarks
450 450 # allow push of new branch
451 451 self.newbranch = newbranch
452 452 # step already performed
453 453 # (used to check what steps have been already performed through bundle2)
454 454 self.stepsdone = set()
455 455 # Integer version of the changegroup push result
456 456 # - None means nothing to push
457 457 # - 0 means HTTP error
458 458 # - 1 means we pushed and remote head count is unchanged *or*
459 459 # we have outgoing changesets but refused to push
460 460 # - other values as described by addchangegroup()
461 461 self.cgresult = None
462 462 # Boolean value for the bookmark push
463 463 self.bkresult = None
464 464 # discover.outgoing object (contains common and outgoing data)
465 465 self.outgoing = None
466 466 # all remote topological heads before the push
467 467 self.remoteheads = None
468 468 # Details of the remote branch pre and post push
469 469 #
470 470 # mapping: {'branch': ([remoteheads],
471 471 # [newheads],
472 472 # [unsyncedheads],
473 473 # [discardedheads])}
474 474 # - branch: the branch name
475 475 # - remoteheads: the list of remote heads known locally
476 476 # None if the branch is new
477 477 # - newheads: the new remote heads (known locally) with outgoing pushed
478 478 # - unsyncedheads: the list of remote heads unknown locally.
479 479 # - discardedheads: the list of remote heads made obsolete by the push
480 480 self.pushbranchmap = None
481 481 # testable as a boolean indicating if any nodes are missing locally.
482 482 self.incoming = None
483 483 # summary of the remote phase situation
484 484 self.remotephases = None
485 485 # phases changes that must be pushed along side the changesets
486 486 self.outdatedphases = None
487 487 # phases changes that must be pushed if changeset push fails
488 488 self.fallbackoutdatedphases = None
489 489 # outgoing obsmarkers
490 490 self.outobsmarkers = set()
491 491 # outgoing bookmarks, list of (bm, oldnode | '', newnode | '')
492 492 self.outbookmarks = []
493 493 # transaction manager
494 494 self.trmanager = None
495 495 # map { pushkey partid -> callback handling failure}
496 496 # used to handle exception from mandatory pushkey part failure
497 497 self.pkfailcb = {}
498 498 # an iterable of pushvars or None
499 499 self.pushvars = pushvars
500 500 # publish pushed changesets
501 501 self.publish = publish
502 502
503 503 @util.propertycache
504 504 def futureheads(self):
505 505 """future remote heads if the changeset push succeeds"""
506 506 return self.outgoing.ancestorsof
507 507
508 508 @util.propertycache
509 509 def fallbackheads(self):
510 510 """future remote heads if the changeset push fails"""
511 511 if self.revs is None:
512 512 # not target to push, all common are relevant
513 513 return self.outgoing.commonheads
514 514 unfi = self.repo.unfiltered()
515 515 # I want cheads = heads(::ancestorsof and ::commonheads)
516 516 # (ancestorsof is revs with secret changeset filtered out)
517 517 #
518 518 # This can be expressed as:
519 519 # cheads = ( (ancestorsof and ::commonheads)
520 520 # + (commonheads and ::ancestorsof))"
521 521 # )
522 522 #
523 523 # while trying to push we already computed the following:
524 524 # common = (::commonheads)
525 525 # missing = ((commonheads::ancestorsof) - commonheads)
526 526 #
527 527 # We can pick:
528 528 # * ancestorsof part of common (::commonheads)
529 529 common = self.outgoing.common
530 530 rev = self.repo.changelog.index.rev
531 531 cheads = [node for node in self.revs if rev(node) in common]
532 532 # and
533 533 # * commonheads parents on missing
534 534 revset = unfi.set(
535 535 b'%ln and parents(roots(%ln))',
536 536 self.outgoing.commonheads,
537 537 self.outgoing.missing,
538 538 )
539 539 cheads.extend(c.node() for c in revset)
540 540 return cheads
541 541
542 542 @property
543 543 def commonheads(self):
544 544 """set of all common heads after changeset bundle push"""
545 545 if self.cgresult:
546 546 return self.futureheads
547 547 else:
548 548 return self.fallbackheads
549 549
550 550
551 551 # mapping of message used when pushing bookmark
552 552 bookmsgmap = {
553 553 b'update': (
554 554 _(b"updating bookmark %s\n"),
555 555 _(b'updating bookmark %s failed!\n'),
556 556 ),
557 557 b'export': (
558 558 _(b"exporting bookmark %s\n"),
559 559 _(b'exporting bookmark %s failed!\n'),
560 560 ),
561 561 b'delete': (
562 562 _(b"deleting remote bookmark %s\n"),
563 563 _(b'deleting remote bookmark %s failed!\n'),
564 564 ),
565 565 }
566 566
567 567
568 568 def push(
569 569 repo,
570 570 remote,
571 571 force=False,
572 572 revs=None,
573 573 newbranch=False,
574 574 bookmarks=(),
575 575 publish=False,
576 576 opargs=None,
577 577 ):
578 578 '''Push outgoing changesets (limited by revs) from a local
579 579 repository to remote. Return an integer:
580 580 - None means nothing to push
581 581 - 0 means HTTP error
582 582 - 1 means we pushed and remote head count is unchanged *or*
583 583 we have outgoing changesets but refused to push
584 584 - other values as described by addchangegroup()
585 585 '''
586 586 if opargs is None:
587 587 opargs = {}
588 588 pushop = pushoperation(
589 589 repo,
590 590 remote,
591 591 force,
592 592 revs,
593 593 newbranch,
594 594 bookmarks,
595 595 publish,
596 596 **pycompat.strkwargs(opargs)
597 597 )
598 598 if pushop.remote.local():
599 599 missing = (
600 600 set(pushop.repo.requirements) - pushop.remote.local().supported
601 601 )
602 602 if missing:
603 603 msg = _(
604 604 b"required features are not"
605 605 b" supported in the destination:"
606 606 b" %s"
607 607 ) % (b', '.join(sorted(missing)))
608 608 raise error.Abort(msg)
609 609
610 610 if not pushop.remote.canpush():
611 611 raise error.Abort(_(b"destination does not support push"))
612 612
613 613 if not pushop.remote.capable(b'unbundle'):
614 614 raise error.Abort(
615 615 _(
616 616 b'cannot push: destination does not support the '
617 617 b'unbundle wire protocol command'
618 618 )
619 619 )
620 620
621 621 # get lock as we might write phase data
622 622 wlock = lock = None
623 623 try:
624 624 # bundle2 push may receive a reply bundle touching bookmarks
625 625 # requiring the wlock. Take it now to ensure proper ordering.
626 626 maypushback = pushop.ui.configbool(b'experimental', b'bundle2.pushback')
627 627 if (
628 628 (not _forcebundle1(pushop))
629 629 and maypushback
630 630 and not bookmod.bookmarksinstore(repo)
631 631 ):
632 632 wlock = pushop.repo.wlock()
633 633 lock = pushop.repo.lock()
634 634 pushop.trmanager = transactionmanager(
635 635 pushop.repo, b'push-response', pushop.remote.url()
636 636 )
637 637 except error.LockUnavailable as err:
638 638 # source repo cannot be locked.
639 639 # We do not abort the push, but just disable the local phase
640 640 # synchronisation.
641 641 msg = b'cannot lock source repository: %s\n' % stringutil.forcebytestr(
642 642 err
643 643 )
644 644 pushop.ui.debug(msg)
645 645
646 646 with wlock or util.nullcontextmanager():
647 647 with lock or util.nullcontextmanager():
648 648 with pushop.trmanager or util.nullcontextmanager():
649 649 pushop.repo.checkpush(pushop)
650 650 _checkpublish(pushop)
651 651 _pushdiscovery(pushop)
652 652 if not pushop.force:
653 653 _checksubrepostate(pushop)
654 654 if not _forcebundle1(pushop):
655 655 _pushbundle2(pushop)
656 656 _pushchangeset(pushop)
657 657 _pushsyncphase(pushop)
658 658 _pushobsolete(pushop)
659 659 _pushbookmark(pushop)
660 660
661 661 if repo.ui.configbool(b'experimental', b'remotenames'):
662 662 logexchange.pullremotenames(repo, remote)
663 663
664 664 return pushop
665 665
666 666
667 667 # list of steps to perform discovery before push
668 668 pushdiscoveryorder = []
669 669
670 670 # Mapping between step name and function
671 671 #
672 672 # This exists to help extensions wrap steps if necessary
673 673 pushdiscoverymapping = {}
674 674
675 675
676 676 def pushdiscovery(stepname):
677 677 """decorator for function performing discovery before push
678 678
679 679 The function is added to the step -> function mapping and appended to the
680 680 list of steps. Beware that decorated function will be added in order (this
681 681 may matter).
682 682
683 683 You can only use this decorator for a new step, if you want to wrap a step
684 684 from an extension, change the pushdiscovery dictionary directly."""
685 685
686 686 def dec(func):
687 687 assert stepname not in pushdiscoverymapping
688 688 pushdiscoverymapping[stepname] = func
689 689 pushdiscoveryorder.append(stepname)
690 690 return func
691 691
692 692 return dec
693 693
694 694
695 695 def _pushdiscovery(pushop):
696 696 """Run all discovery steps"""
697 697 for stepname in pushdiscoveryorder:
698 698 step = pushdiscoverymapping[stepname]
699 699 step(pushop)
700 700
701 701
702 702 def _checksubrepostate(pushop):
703 703 """Ensure all outgoing referenced subrepo revisions are present locally"""
704 704 for n in pushop.outgoing.missing:
705 705 ctx = pushop.repo[n]
706 706
707 707 if b'.hgsub' in ctx.manifest() and b'.hgsubstate' in ctx.files():
708 708 for subpath in sorted(ctx.substate):
709 709 sub = ctx.sub(subpath)
710 710 sub.verify(onpush=True)
711 711
712 712
713 713 @pushdiscovery(b'changeset')
714 714 def _pushdiscoverychangeset(pushop):
715 715 """discover the changeset that need to be pushed"""
716 716 fci = discovery.findcommonincoming
717 717 if pushop.revs:
718 718 commoninc = fci(
719 719 pushop.repo,
720 720 pushop.remote,
721 721 force=pushop.force,
722 722 ancestorsof=pushop.revs,
723 723 )
724 724 else:
725 725 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
726 726 common, inc, remoteheads = commoninc
727 727 fco = discovery.findcommonoutgoing
728 728 outgoing = fco(
729 729 pushop.repo,
730 730 pushop.remote,
731 731 onlyheads=pushop.revs,
732 732 commoninc=commoninc,
733 733 force=pushop.force,
734 734 )
735 735 pushop.outgoing = outgoing
736 736 pushop.remoteheads = remoteheads
737 737 pushop.incoming = inc
738 738
739 739
740 740 @pushdiscovery(b'phase')
741 741 def _pushdiscoveryphase(pushop):
742 742 """discover the phase that needs to be pushed
743 743
744 744 (computed for both success and failure case for changesets push)"""
745 745 outgoing = pushop.outgoing
746 746 unfi = pushop.repo.unfiltered()
747 747 remotephases = listkeys(pushop.remote, b'phases')
748 748
749 749 if (
750 750 pushop.ui.configbool(b'ui', b'_usedassubrepo')
751 751 and remotephases # server supports phases
752 752 and not pushop.outgoing.missing # no changesets to be pushed
753 753 and remotephases.get(b'publishing', False)
754 754 ):
755 755 # When:
756 756 # - this is a subrepo push
757 757 # - and remote support phase
758 758 # - and no changeset are to be pushed
759 759 # - and remote is publishing
760 760 # We may be in issue 3781 case!
761 761 # We drop the possible phase synchronisation done by
762 762 # courtesy to publish changesets possibly locally draft
763 763 # on the remote.
764 764 pushop.outdatedphases = []
765 765 pushop.fallbackoutdatedphases = []
766 766 return
767 767
768 768 pushop.remotephases = phases.remotephasessummary(
769 769 pushop.repo, pushop.fallbackheads, remotephases
770 770 )
771 771 droots = pushop.remotephases.draftroots
772 772
773 773 extracond = b''
774 774 if not pushop.remotephases.publishing:
775 775 extracond = b' and public()'
776 776 revset = b'heads((%%ln::%%ln) %s)' % extracond
777 777 # Get the list of all revs draft on remote by public here.
778 778 # XXX Beware that revset break if droots is not strictly
779 779 # XXX root we may want to ensure it is but it is costly
780 780 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
781 781 if not pushop.remotephases.publishing and pushop.publish:
782 782 future = list(
783 783 unfi.set(
784 784 b'%ln and (not public() or %ln::)', pushop.futureheads, droots
785 785 )
786 786 )
787 787 elif not outgoing.missing:
788 788 future = fallback
789 789 else:
790 790 # adds changeset we are going to push as draft
791 791 #
792 792 # should not be necessary for publishing server, but because of an
793 793 # issue fixed in xxxxx we have to do it anyway.
794 794 fdroots = list(
795 795 unfi.set(b'roots(%ln + %ln::)', outgoing.missing, droots)
796 796 )
797 797 fdroots = [f.node() for f in fdroots]
798 798 future = list(unfi.set(revset, fdroots, pushop.futureheads))
799 799 pushop.outdatedphases = future
800 800 pushop.fallbackoutdatedphases = fallback
801 801
802 802
803 803 @pushdiscovery(b'obsmarker')
804 804 def _pushdiscoveryobsmarkers(pushop):
805 805 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
806 806 return
807 807
808 808 if not pushop.repo.obsstore:
809 809 return
810 810
811 811 if b'obsolete' not in listkeys(pushop.remote, b'namespaces'):
812 812 return
813 813
814 814 repo = pushop.repo
815 815 # very naive computation, that can be quite expensive on big repo.
816 816 # However: evolution is currently slow on them anyway.
817 817 nodes = (c.node() for c in repo.set(b'::%ln', pushop.futureheads))
818 818 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
819 819
820 820
821 821 @pushdiscovery(b'bookmarks')
822 822 def _pushdiscoverybookmarks(pushop):
823 823 ui = pushop.ui
824 824 repo = pushop.repo.unfiltered()
825 825 remote = pushop.remote
826 826 ui.debug(b"checking for updated bookmarks\n")
827 827 ancestors = ()
828 828 if pushop.revs:
829 829 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
830 830 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
831 831
832 832 remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, b'bookmarks'))
833 833
834 834 explicit = {
835 835 repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
836 836 }
837 837
838 838 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
839 839 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
840 840
841 841
842 842 def _processcompared(pushop, pushed, explicit, remotebms, comp):
843 843 """take decision on bookmarks to push to the remote repo
844 844
845 845 Exists to help extensions alter this behavior.
846 846 """
847 847 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
848 848
849 849 repo = pushop.repo
850 850
851 851 for b, scid, dcid in advsrc:
852 852 if b in explicit:
853 853 explicit.remove(b)
854 854 if not pushed or repo[scid].rev() in pushed:
855 855 pushop.outbookmarks.append((b, dcid, scid))
856 856 # search added bookmark
857 857 for b, scid, dcid in addsrc:
858 858 if b in explicit:
859 859 explicit.remove(b)
860 860 if bookmod.isdivergent(b):
861 861 pushop.ui.warn(_(b'cannot push divergent bookmark %s!\n') % b)
862 862 pushop.bkresult = 2
863 863 else:
864 864 pushop.outbookmarks.append((b, b'', scid))
865 865 # search for overwritten bookmark
866 866 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
867 867 if b in explicit:
868 868 explicit.remove(b)
869 869 pushop.outbookmarks.append((b, dcid, scid))
870 870 # search for bookmark to delete
871 871 for b, scid, dcid in adddst:
872 872 if b in explicit:
873 873 explicit.remove(b)
874 874 # treat as "deleted locally"
875 875 pushop.outbookmarks.append((b, dcid, b''))
876 876 # identical bookmarks shouldn't get reported
877 877 for b, scid, dcid in same:
878 878 if b in explicit:
879 879 explicit.remove(b)
880 880
881 881 if explicit:
882 882 explicit = sorted(explicit)
883 883 # we should probably list all of them
884 884 pushop.ui.warn(
885 885 _(
886 886 b'bookmark %s does not exist on the local '
887 887 b'or remote repository!\n'
888 888 )
889 889 % explicit[0]
890 890 )
891 891 pushop.bkresult = 2
892 892
893 893 pushop.outbookmarks.sort()
894 894
895 895
896 896 def _pushcheckoutgoing(pushop):
897 897 outgoing = pushop.outgoing
898 898 unfi = pushop.repo.unfiltered()
899 899 if not outgoing.missing:
900 900 # nothing to push
901 901 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
902 902 return False
903 903 # something to push
904 904 if not pushop.force:
905 905 # if repo.obsstore == False --> no obsolete
906 906 # then, save the iteration
907 907 if unfi.obsstore:
908 obsoletes = []
909 unstables = []
910 for node in outgoing.missing:
908 # this message are here for 80 char limit reason
909 mso = _(b"push includes obsolete changeset: %s!")
910 mspd = _(b"push includes phase-divergent changeset: %s!")
911 mscd = _(b"push includes content-divergent changeset: %s!")
912 mst = {
913 b"orphan": _(b"push includes orphan changeset: %s!"),
914 b"phase-divergent": mspd,
915 b"content-divergent": mscd,
916 }
917 # If we are to push if there is at least one
918 # obsolete or unstable changeset in missing, at
919 # least one of the missinghead will be obsolete or
920 # unstable. So checking heads only is ok
921 for node in outgoing.ancestorsof:
911 922 ctx = unfi[node]
912 923 if ctx.obsolete():
913 obsoletes.append(ctx)
924 raise error.Abort(mso % ctx)
914 925 elif ctx.isunstable():
915 unstables.append(ctx)
916 if obsoletes or unstables:
917 msg = b""
918 if obsoletes:
919 msg += _(b"push includes obsolete changesets:\n")
920 msg += b"\n".join(b' %s' % ctx for ctx in obsoletes)
921 if unstables:
922 if msg:
923 msg += b"\n"
924 msg += _(b"push includes unstable changesets:\n")
925 msg += b"\n".join(
926 b' %s (%s)'
927 % (
928 ctx,
929 b", ".join(_(ins) for ins in ctx.instabilities()),
930 )
931 for ctx in unstables
932 )
933 raise error.Abort(msg)
926 # TODO print more than one instability in the abort
927 # message
928 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
934 929
935 930 discovery.checkheads(pushop)
936 931 return True
937 932
938 933
939 934 # List of names of steps to perform for an outgoing bundle2, order matters.
940 935 b2partsgenorder = []
941 936
942 937 # Mapping between step name and function
943 938 #
944 939 # This exists to help extensions wrap steps if necessary
945 940 b2partsgenmapping = {}
946 941
947 942
948 943 def b2partsgenerator(stepname, idx=None):
949 944 """decorator for function generating bundle2 part
950 945
951 946 The function is added to the step -> function mapping and appended to the
952 947 list of steps. Beware that decorated functions will be added in order
953 948 (this may matter).
954 949
955 950 You can only use this decorator for new steps, if you want to wrap a step
956 951 from an extension, attack the b2partsgenmapping dictionary directly."""
957 952
958 953 def dec(func):
959 954 assert stepname not in b2partsgenmapping
960 955 b2partsgenmapping[stepname] = func
961 956 if idx is None:
962 957 b2partsgenorder.append(stepname)
963 958 else:
964 959 b2partsgenorder.insert(idx, stepname)
965 960 return func
966 961
967 962 return dec
968 963
969 964
970 965 def _pushb2ctxcheckheads(pushop, bundler):
971 966 """Generate race condition checking parts
972 967
973 968 Exists as an independent function to aid extensions
974 969 """
975 970 # * 'force' do not check for push race,
976 971 # * if we don't push anything, there are nothing to check.
977 972 if not pushop.force and pushop.outgoing.ancestorsof:
978 973 allowunrelated = b'related' in bundler.capabilities.get(
979 974 b'checkheads', ()
980 975 )
981 976 emptyremote = pushop.pushbranchmap is None
982 977 if not allowunrelated or emptyremote:
983 978 bundler.newpart(b'check:heads', data=iter(pushop.remoteheads))
984 979 else:
985 980 affected = set()
986 981 for branch, heads in pycompat.iteritems(pushop.pushbranchmap):
987 982 remoteheads, newheads, unsyncedheads, discardedheads = heads
988 983 if remoteheads is not None:
989 984 remote = set(remoteheads)
990 985 affected |= set(discardedheads) & remote
991 986 affected |= remote - set(newheads)
992 987 if affected:
993 988 data = iter(sorted(affected))
994 989 bundler.newpart(b'check:updated-heads', data=data)
995 990
996 991
997 992 def _pushing(pushop):
998 993 """return True if we are pushing anything"""
999 994 return bool(
1000 995 pushop.outgoing.missing
1001 996 or pushop.outdatedphases
1002 997 or pushop.outobsmarkers
1003 998 or pushop.outbookmarks
1004 999 )
1005 1000
1006 1001
1007 1002 @b2partsgenerator(b'check-bookmarks')
1008 1003 def _pushb2checkbookmarks(pushop, bundler):
1009 1004 """insert bookmark move checking"""
1010 1005 if not _pushing(pushop) or pushop.force:
1011 1006 return
1012 1007 b2caps = bundle2.bundle2caps(pushop.remote)
1013 1008 hasbookmarkcheck = b'bookmarks' in b2caps
1014 1009 if not (pushop.outbookmarks and hasbookmarkcheck):
1015 1010 return
1016 1011 data = []
1017 1012 for book, old, new in pushop.outbookmarks:
1018 1013 data.append((book, old))
1019 1014 checkdata = bookmod.binaryencode(data)
1020 1015 bundler.newpart(b'check:bookmarks', data=checkdata)
1021 1016
1022 1017
1023 1018 @b2partsgenerator(b'check-phases')
1024 1019 def _pushb2checkphases(pushop, bundler):
1025 1020 """insert phase move checking"""
1026 1021 if not _pushing(pushop) or pushop.force:
1027 1022 return
1028 1023 b2caps = bundle2.bundle2caps(pushop.remote)
1029 1024 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
1030 1025 if pushop.remotephases is not None and hasphaseheads:
1031 1026 # check that the remote phase has not changed
1032 1027 checks = {p: [] for p in phases.allphases}
1033 1028 checks[phases.public].extend(pushop.remotephases.publicheads)
1034 1029 checks[phases.draft].extend(pushop.remotephases.draftroots)
1035 1030 if any(pycompat.itervalues(checks)):
1036 1031 for phase in checks:
1037 1032 checks[phase].sort()
1038 1033 checkdata = phases.binaryencode(checks)
1039 1034 bundler.newpart(b'check:phases', data=checkdata)
1040 1035
1041 1036
1042 1037 @b2partsgenerator(b'changeset')
1043 1038 def _pushb2ctx(pushop, bundler):
1044 1039 """handle changegroup push through bundle2
1045 1040
1046 1041 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
1047 1042 """
1048 1043 if b'changesets' in pushop.stepsdone:
1049 1044 return
1050 1045 pushop.stepsdone.add(b'changesets')
1051 1046 # Send known heads to the server for race detection.
1052 1047 if not _pushcheckoutgoing(pushop):
1053 1048 return
1054 1049 pushop.repo.prepushoutgoinghooks(pushop)
1055 1050
1056 1051 _pushb2ctxcheckheads(pushop, bundler)
1057 1052
1058 1053 b2caps = bundle2.bundle2caps(pushop.remote)
1059 1054 version = b'01'
1060 1055 cgversions = b2caps.get(b'changegroup')
1061 1056 if cgversions: # 3.1 and 3.2 ship with an empty value
1062 1057 cgversions = [
1063 1058 v
1064 1059 for v in cgversions
1065 1060 if v in changegroup.supportedoutgoingversions(pushop.repo)
1066 1061 ]
1067 1062 if not cgversions:
1068 1063 raise error.Abort(_(b'no common changegroup version'))
1069 1064 version = max(cgversions)
1070 1065 cgstream = changegroup.makestream(
1071 1066 pushop.repo, pushop.outgoing, version, b'push'
1072 1067 )
1073 1068 cgpart = bundler.newpart(b'changegroup', data=cgstream)
1074 1069 if cgversions:
1075 1070 cgpart.addparam(b'version', version)
1076 1071 if b'treemanifest' in pushop.repo.requirements:
1077 1072 cgpart.addparam(b'treemanifest', b'1')
1078 1073 if b'exp-sidedata-flag' in pushop.repo.requirements:
1079 1074 cgpart.addparam(b'exp-sidedata', b'1')
1080 1075
1081 1076 def handlereply(op):
1082 1077 """extract addchangegroup returns from server reply"""
1083 1078 cgreplies = op.records.getreplies(cgpart.id)
1084 1079 assert len(cgreplies[b'changegroup']) == 1
1085 1080 pushop.cgresult = cgreplies[b'changegroup'][0][b'return']
1086 1081
1087 1082 return handlereply
1088 1083
1089 1084
1090 1085 @b2partsgenerator(b'phase')
1091 1086 def _pushb2phases(pushop, bundler):
1092 1087 """handle phase push through bundle2"""
1093 1088 if b'phases' in pushop.stepsdone:
1094 1089 return
1095 1090 b2caps = bundle2.bundle2caps(pushop.remote)
1096 1091 ui = pushop.repo.ui
1097 1092
1098 1093 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1099 1094 haspushkey = b'pushkey' in b2caps
1100 1095 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
1101 1096
1102 1097 if hasphaseheads and not legacyphase:
1103 1098 return _pushb2phaseheads(pushop, bundler)
1104 1099 elif haspushkey:
1105 1100 return _pushb2phasespushkey(pushop, bundler)
1106 1101
1107 1102
1108 1103 def _pushb2phaseheads(pushop, bundler):
1109 1104 """push phase information through a bundle2 - binary part"""
1110 1105 pushop.stepsdone.add(b'phases')
1111 1106 if pushop.outdatedphases:
1112 1107 updates = {p: [] for p in phases.allphases}
1113 1108 updates[0].extend(h.node() for h in pushop.outdatedphases)
1114 1109 phasedata = phases.binaryencode(updates)
1115 1110 bundler.newpart(b'phase-heads', data=phasedata)
1116 1111
1117 1112
1118 1113 def _pushb2phasespushkey(pushop, bundler):
1119 1114 """push phase information through a bundle2 - pushkey part"""
1120 1115 pushop.stepsdone.add(b'phases')
1121 1116 part2node = []
1122 1117
1123 1118 def handlefailure(pushop, exc):
1124 1119 targetid = int(exc.partid)
1125 1120 for partid, node in part2node:
1126 1121 if partid == targetid:
1127 1122 raise error.Abort(_(b'updating %s to public failed') % node)
1128 1123
1129 1124 enc = pushkey.encode
1130 1125 for newremotehead in pushop.outdatedphases:
1131 1126 part = bundler.newpart(b'pushkey')
1132 1127 part.addparam(b'namespace', enc(b'phases'))
1133 1128 part.addparam(b'key', enc(newremotehead.hex()))
1134 1129 part.addparam(b'old', enc(b'%d' % phases.draft))
1135 1130 part.addparam(b'new', enc(b'%d' % phases.public))
1136 1131 part2node.append((part.id, newremotehead))
1137 1132 pushop.pkfailcb[part.id] = handlefailure
1138 1133
1139 1134 def handlereply(op):
1140 1135 for partid, node in part2node:
1141 1136 partrep = op.records.getreplies(partid)
1142 1137 results = partrep[b'pushkey']
1143 1138 assert len(results) <= 1
1144 1139 msg = None
1145 1140 if not results:
1146 1141 msg = _(b'server ignored update of %s to public!\n') % node
1147 1142 elif not int(results[0][b'return']):
1148 1143 msg = _(b'updating %s to public failed!\n') % node
1149 1144 if msg is not None:
1150 1145 pushop.ui.warn(msg)
1151 1146
1152 1147 return handlereply
1153 1148
1154 1149
1155 1150 @b2partsgenerator(b'obsmarkers')
1156 1151 def _pushb2obsmarkers(pushop, bundler):
1157 1152 if b'obsmarkers' in pushop.stepsdone:
1158 1153 return
1159 1154 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
1160 1155 if obsolete.commonversion(remoteversions) is None:
1161 1156 return
1162 1157 pushop.stepsdone.add(b'obsmarkers')
1163 1158 if pushop.outobsmarkers:
1164 1159 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1165 1160 bundle2.buildobsmarkerspart(bundler, markers)
1166 1161
1167 1162
1168 1163 @b2partsgenerator(b'bookmarks')
1169 1164 def _pushb2bookmarks(pushop, bundler):
1170 1165 """handle bookmark push through bundle2"""
1171 1166 if b'bookmarks' in pushop.stepsdone:
1172 1167 return
1173 1168 b2caps = bundle2.bundle2caps(pushop.remote)
1174 1169
1175 1170 legacy = pushop.repo.ui.configlist(b'devel', b'legacy.exchange')
1176 1171 legacybooks = b'bookmarks' in legacy
1177 1172
1178 1173 if not legacybooks and b'bookmarks' in b2caps:
1179 1174 return _pushb2bookmarkspart(pushop, bundler)
1180 1175 elif b'pushkey' in b2caps:
1181 1176 return _pushb2bookmarkspushkey(pushop, bundler)
1182 1177
1183 1178
1184 1179 def _bmaction(old, new):
1185 1180 """small utility for bookmark pushing"""
1186 1181 if not old:
1187 1182 return b'export'
1188 1183 elif not new:
1189 1184 return b'delete'
1190 1185 return b'update'
1191 1186
1192 1187
1193 1188 def _abortonsecretctx(pushop, node, b):
1194 1189 """abort if a given bookmark points to a secret changeset"""
1195 1190 if node and pushop.repo[node].phase() == phases.secret:
1196 1191 raise error.Abort(
1197 1192 _(b'cannot push bookmark %s as it points to a secret changeset') % b
1198 1193 )
1199 1194
1200 1195
1201 1196 def _pushb2bookmarkspart(pushop, bundler):
1202 1197 pushop.stepsdone.add(b'bookmarks')
1203 1198 if not pushop.outbookmarks:
1204 1199 return
1205 1200
1206 1201 allactions = []
1207 1202 data = []
1208 1203 for book, old, new in pushop.outbookmarks:
1209 1204 _abortonsecretctx(pushop, new, book)
1210 1205 data.append((book, new))
1211 1206 allactions.append((book, _bmaction(old, new)))
1212 1207 checkdata = bookmod.binaryencode(data)
1213 1208 bundler.newpart(b'bookmarks', data=checkdata)
1214 1209
1215 1210 def handlereply(op):
1216 1211 ui = pushop.ui
1217 1212 # if success
1218 1213 for book, action in allactions:
1219 1214 ui.status(bookmsgmap[action][0] % book)
1220 1215
1221 1216 return handlereply
1222 1217
1223 1218
1224 1219 def _pushb2bookmarkspushkey(pushop, bundler):
1225 1220 pushop.stepsdone.add(b'bookmarks')
1226 1221 part2book = []
1227 1222 enc = pushkey.encode
1228 1223
1229 1224 def handlefailure(pushop, exc):
1230 1225 targetid = int(exc.partid)
1231 1226 for partid, book, action in part2book:
1232 1227 if partid == targetid:
1233 1228 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1234 1229 # we should not be called for part we did not generated
1235 1230 assert False
1236 1231
1237 1232 for book, old, new in pushop.outbookmarks:
1238 1233 _abortonsecretctx(pushop, new, book)
1239 1234 part = bundler.newpart(b'pushkey')
1240 1235 part.addparam(b'namespace', enc(b'bookmarks'))
1241 1236 part.addparam(b'key', enc(book))
1242 1237 part.addparam(b'old', enc(hex(old)))
1243 1238 part.addparam(b'new', enc(hex(new)))
1244 1239 action = b'update'
1245 1240 if not old:
1246 1241 action = b'export'
1247 1242 elif not new:
1248 1243 action = b'delete'
1249 1244 part2book.append((part.id, book, action))
1250 1245 pushop.pkfailcb[part.id] = handlefailure
1251 1246
1252 1247 def handlereply(op):
1253 1248 ui = pushop.ui
1254 1249 for partid, book, action in part2book:
1255 1250 partrep = op.records.getreplies(partid)
1256 1251 results = partrep[b'pushkey']
1257 1252 assert len(results) <= 1
1258 1253 if not results:
1259 1254 pushop.ui.warn(_(b'server ignored bookmark %s update\n') % book)
1260 1255 else:
1261 1256 ret = int(results[0][b'return'])
1262 1257 if ret:
1263 1258 ui.status(bookmsgmap[action][0] % book)
1264 1259 else:
1265 1260 ui.warn(bookmsgmap[action][1] % book)
1266 1261 if pushop.bkresult is not None:
1267 1262 pushop.bkresult = 1
1268 1263
1269 1264 return handlereply
1270 1265
1271 1266
1272 1267 @b2partsgenerator(b'pushvars', idx=0)
1273 1268 def _getbundlesendvars(pushop, bundler):
1274 1269 '''send shellvars via bundle2'''
1275 1270 pushvars = pushop.pushvars
1276 1271 if pushvars:
1277 1272 shellvars = {}
1278 1273 for raw in pushvars:
1279 1274 if b'=' not in raw:
1280 1275 msg = (
1281 1276 b"unable to parse variable '%s', should follow "
1282 1277 b"'KEY=VALUE' or 'KEY=' format"
1283 1278 )
1284 1279 raise error.Abort(msg % raw)
1285 1280 k, v = raw.split(b'=', 1)
1286 1281 shellvars[k] = v
1287 1282
1288 1283 part = bundler.newpart(b'pushvars')
1289 1284
1290 1285 for key, value in pycompat.iteritems(shellvars):
1291 1286 part.addparam(key, value, mandatory=False)
1292 1287
1293 1288
1294 1289 def _pushbundle2(pushop):
1295 1290 """push data to the remote using bundle2
1296 1291
1297 1292 The only currently supported type of data is changegroup but this will
1298 1293 evolve in the future."""
1299 1294 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1300 1295 pushback = pushop.trmanager and pushop.ui.configbool(
1301 1296 b'experimental', b'bundle2.pushback'
1302 1297 )
1303 1298
1304 1299 # create reply capability
1305 1300 capsblob = bundle2.encodecaps(
1306 1301 bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role=b'client')
1307 1302 )
1308 1303 bundler.newpart(b'replycaps', data=capsblob)
1309 1304 replyhandlers = []
1310 1305 for partgenname in b2partsgenorder:
1311 1306 partgen = b2partsgenmapping[partgenname]
1312 1307 ret = partgen(pushop, bundler)
1313 1308 if callable(ret):
1314 1309 replyhandlers.append(ret)
1315 1310 # do not push if nothing to push
1316 1311 if bundler.nbparts <= 1:
1317 1312 return
1318 1313 stream = util.chunkbuffer(bundler.getchunks())
1319 1314 try:
1320 1315 try:
1321 1316 with pushop.remote.commandexecutor() as e:
1322 1317 reply = e.callcommand(
1323 1318 b'unbundle',
1324 1319 {
1325 1320 b'bundle': stream,
1326 1321 b'heads': [b'force'],
1327 1322 b'url': pushop.remote.url(),
1328 1323 },
1329 1324 ).result()
1330 1325 except error.BundleValueError as exc:
1331 1326 raise error.Abort(_(b'missing support for %s') % exc)
1332 1327 try:
1333 1328 trgetter = None
1334 1329 if pushback:
1335 1330 trgetter = pushop.trmanager.transaction
1336 1331 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1337 1332 except error.BundleValueError as exc:
1338 1333 raise error.Abort(_(b'missing support for %s') % exc)
1339 1334 except bundle2.AbortFromPart as exc:
1340 1335 pushop.ui.status(_(b'remote: %s\n') % exc)
1341 1336 if exc.hint is not None:
1342 1337 pushop.ui.status(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
1343 1338 raise error.Abort(_(b'push failed on remote'))
1344 1339 except error.PushkeyFailed as exc:
1345 1340 partid = int(exc.partid)
1346 1341 if partid not in pushop.pkfailcb:
1347 1342 raise
1348 1343 pushop.pkfailcb[partid](pushop, exc)
1349 1344 for rephand in replyhandlers:
1350 1345 rephand(op)
1351 1346
1352 1347
1353 1348 def _pushchangeset(pushop):
1354 1349 """Make the actual push of changeset bundle to remote repo"""
1355 1350 if b'changesets' in pushop.stepsdone:
1356 1351 return
1357 1352 pushop.stepsdone.add(b'changesets')
1358 1353 if not _pushcheckoutgoing(pushop):
1359 1354 return
1360 1355
1361 1356 # Should have verified this in push().
1362 1357 assert pushop.remote.capable(b'unbundle')
1363 1358
1364 1359 pushop.repo.prepushoutgoinghooks(pushop)
1365 1360 outgoing = pushop.outgoing
1366 1361 # TODO: get bundlecaps from remote
1367 1362 bundlecaps = None
1368 1363 # create a changegroup from local
1369 1364 if pushop.revs is None and not (
1370 1365 outgoing.excluded or pushop.repo.changelog.filteredrevs
1371 1366 ):
1372 1367 # push everything,
1373 1368 # use the fast path, no race possible on push
1374 1369 cg = changegroup.makechangegroup(
1375 1370 pushop.repo,
1376 1371 outgoing,
1377 1372 b'01',
1378 1373 b'push',
1379 1374 fastpath=True,
1380 1375 bundlecaps=bundlecaps,
1381 1376 )
1382 1377 else:
1383 1378 cg = changegroup.makechangegroup(
1384 1379 pushop.repo, outgoing, b'01', b'push', bundlecaps=bundlecaps
1385 1380 )
1386 1381
1387 1382 # apply changegroup to remote
1388 1383 # local repo finds heads on server, finds out what
1389 1384 # revs it must push. once revs transferred, if server
1390 1385 # finds it has different heads (someone else won
1391 1386 # commit/push race), server aborts.
1392 1387 if pushop.force:
1393 1388 remoteheads = [b'force']
1394 1389 else:
1395 1390 remoteheads = pushop.remoteheads
1396 1391 # ssh: return remote's addchangegroup()
1397 1392 # http: return remote's addchangegroup() or 0 for error
1398 1393 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
1399 1394
1400 1395
1401 1396 def _pushsyncphase(pushop):
1402 1397 """synchronise phase information locally and remotely"""
1403 1398 cheads = pushop.commonheads
1404 1399 # even when we don't push, exchanging phase data is useful
1405 1400 remotephases = listkeys(pushop.remote, b'phases')
1406 1401 if (
1407 1402 pushop.ui.configbool(b'ui', b'_usedassubrepo')
1408 1403 and remotephases # server supports phases
1409 1404 and pushop.cgresult is None # nothing was pushed
1410 1405 and remotephases.get(b'publishing', False)
1411 1406 ):
1412 1407 # When:
1413 1408 # - this is a subrepo push
1414 1409 # - and remote support phase
1415 1410 # - and no changeset was pushed
1416 1411 # - and remote is publishing
1417 1412 # We may be in issue 3871 case!
1418 1413 # We drop the possible phase synchronisation done by
1419 1414 # courtesy to publish changesets possibly locally draft
1420 1415 # on the remote.
1421 1416 remotephases = {b'publishing': b'True'}
1422 1417 if not remotephases: # old server or public only reply from non-publishing
1423 1418 _localphasemove(pushop, cheads)
1424 1419 # don't push any phase data as there is nothing to push
1425 1420 else:
1426 1421 ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
1427 1422 pheads, droots = ana
1428 1423 ### Apply remote phase on local
1429 1424 if remotephases.get(b'publishing', False):
1430 1425 _localphasemove(pushop, cheads)
1431 1426 else: # publish = False
1432 1427 _localphasemove(pushop, pheads)
1433 1428 _localphasemove(pushop, cheads, phases.draft)
1434 1429 ### Apply local phase on remote
1435 1430
1436 1431 if pushop.cgresult:
1437 1432 if b'phases' in pushop.stepsdone:
1438 1433 # phases already pushed though bundle2
1439 1434 return
1440 1435 outdated = pushop.outdatedphases
1441 1436 else:
1442 1437 outdated = pushop.fallbackoutdatedphases
1443 1438
1444 1439 pushop.stepsdone.add(b'phases')
1445 1440
1446 1441 # filter heads already turned public by the push
1447 1442 outdated = [c for c in outdated if c.node() not in pheads]
1448 1443 # fallback to independent pushkey command
1449 1444 for newremotehead in outdated:
1450 1445 with pushop.remote.commandexecutor() as e:
1451 1446 r = e.callcommand(
1452 1447 b'pushkey',
1453 1448 {
1454 1449 b'namespace': b'phases',
1455 1450 b'key': newremotehead.hex(),
1456 1451 b'old': b'%d' % phases.draft,
1457 1452 b'new': b'%d' % phases.public,
1458 1453 },
1459 1454 ).result()
1460 1455
1461 1456 if not r:
1462 1457 pushop.ui.warn(
1463 1458 _(b'updating %s to public failed!\n') % newremotehead
1464 1459 )
1465 1460
1466 1461
1467 1462 def _localphasemove(pushop, nodes, phase=phases.public):
1468 1463 """move <nodes> to <phase> in the local source repo"""
1469 1464 if pushop.trmanager:
1470 1465 phases.advanceboundary(
1471 1466 pushop.repo, pushop.trmanager.transaction(), phase, nodes
1472 1467 )
1473 1468 else:
1474 1469 # repo is not locked, do not change any phases!
1475 1470 # Informs the user that phases should have been moved when
1476 1471 # applicable.
1477 1472 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1478 1473 phasestr = phases.phasenames[phase]
1479 1474 if actualmoves:
1480 1475 pushop.ui.status(
1481 1476 _(
1482 1477 b'cannot lock source repo, skipping '
1483 1478 b'local %s phase update\n'
1484 1479 )
1485 1480 % phasestr
1486 1481 )
1487 1482
1488 1483
1489 1484 def _pushobsolete(pushop):
1490 1485 """utility function to push obsolete markers to a remote"""
1491 1486 if b'obsmarkers' in pushop.stepsdone:
1492 1487 return
1493 1488 repo = pushop.repo
1494 1489 remote = pushop.remote
1495 1490 pushop.stepsdone.add(b'obsmarkers')
1496 1491 if pushop.outobsmarkers:
1497 1492 pushop.ui.debug(b'try to push obsolete markers to remote\n')
1498 1493 rslts = []
1499 1494 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1500 1495 remotedata = obsolete._pushkeyescape(markers)
1501 1496 for key in sorted(remotedata, reverse=True):
1502 1497 # reverse sort to ensure we end with dump0
1503 1498 data = remotedata[key]
1504 1499 rslts.append(remote.pushkey(b'obsolete', key, b'', data))
1505 1500 if [r for r in rslts if not r]:
1506 1501 msg = _(b'failed to push some obsolete markers!\n')
1507 1502 repo.ui.warn(msg)
1508 1503
1509 1504
1510 1505 def _pushbookmark(pushop):
1511 1506 """Update bookmark position on remote"""
1512 1507 if pushop.cgresult == 0 or b'bookmarks' in pushop.stepsdone:
1513 1508 return
1514 1509 pushop.stepsdone.add(b'bookmarks')
1515 1510 ui = pushop.ui
1516 1511 remote = pushop.remote
1517 1512
1518 1513 for b, old, new in pushop.outbookmarks:
1519 1514 action = b'update'
1520 1515 if not old:
1521 1516 action = b'export'
1522 1517 elif not new:
1523 1518 action = b'delete'
1524 1519
1525 1520 with remote.commandexecutor() as e:
1526 1521 r = e.callcommand(
1527 1522 b'pushkey',
1528 1523 {
1529 1524 b'namespace': b'bookmarks',
1530 1525 b'key': b,
1531 1526 b'old': hex(old),
1532 1527 b'new': hex(new),
1533 1528 },
1534 1529 ).result()
1535 1530
1536 1531 if r:
1537 1532 ui.status(bookmsgmap[action][0] % b)
1538 1533 else:
1539 1534 ui.warn(bookmsgmap[action][1] % b)
1540 1535 # discovery can have set the value form invalid entry
1541 1536 if pushop.bkresult is not None:
1542 1537 pushop.bkresult = 1
1543 1538
1544 1539
1545 1540 class pulloperation(object):
1546 1541 """A object that represent a single pull operation
1547 1542
1548 1543 It purpose is to carry pull related state and very common operation.
1549 1544
1550 1545 A new should be created at the beginning of each pull and discarded
1551 1546 afterward.
1552 1547 """
1553 1548
1554 1549 def __init__(
1555 1550 self,
1556 1551 repo,
1557 1552 remote,
1558 1553 heads=None,
1559 1554 force=False,
1560 1555 bookmarks=(),
1561 1556 remotebookmarks=None,
1562 1557 streamclonerequested=None,
1563 1558 includepats=None,
1564 1559 excludepats=None,
1565 1560 depth=None,
1566 1561 ):
1567 1562 # repo we pull into
1568 1563 self.repo = repo
1569 1564 # repo we pull from
1570 1565 self.remote = remote
1571 1566 # revision we try to pull (None is "all")
1572 1567 self.heads = heads
1573 1568 # bookmark pulled explicitly
1574 1569 self.explicitbookmarks = [
1575 1570 repo._bookmarks.expandname(bookmark) for bookmark in bookmarks
1576 1571 ]
1577 1572 # do we force pull?
1578 1573 self.force = force
1579 1574 # whether a streaming clone was requested
1580 1575 self.streamclonerequested = streamclonerequested
1581 1576 # transaction manager
1582 1577 self.trmanager = None
1583 1578 # set of common changeset between local and remote before pull
1584 1579 self.common = None
1585 1580 # set of pulled head
1586 1581 self.rheads = None
1587 1582 # list of missing changeset to fetch remotely
1588 1583 self.fetch = None
1589 1584 # remote bookmarks data
1590 1585 self.remotebookmarks = remotebookmarks
1591 1586 # result of changegroup pulling (used as return code by pull)
1592 1587 self.cgresult = None
1593 1588 # list of step already done
1594 1589 self.stepsdone = set()
1595 1590 # Whether we attempted a clone from pre-generated bundles.
1596 1591 self.clonebundleattempted = False
1597 1592 # Set of file patterns to include.
1598 1593 self.includepats = includepats
1599 1594 # Set of file patterns to exclude.
1600 1595 self.excludepats = excludepats
1601 1596 # Number of ancestor changesets to pull from each pulled head.
1602 1597 self.depth = depth
1603 1598
1604 1599 @util.propertycache
1605 1600 def pulledsubset(self):
1606 1601 """heads of the set of changeset target by the pull"""
1607 1602 # compute target subset
1608 1603 if self.heads is None:
1609 1604 # We pulled every thing possible
1610 1605 # sync on everything common
1611 1606 c = set(self.common)
1612 1607 ret = list(self.common)
1613 1608 for n in self.rheads:
1614 1609 if n not in c:
1615 1610 ret.append(n)
1616 1611 return ret
1617 1612 else:
1618 1613 # We pulled a specific subset
1619 1614 # sync on this subset
1620 1615 return self.heads
1621 1616
1622 1617 @util.propertycache
1623 1618 def canusebundle2(self):
1624 1619 return not _forcebundle1(self)
1625 1620
1626 1621 @util.propertycache
1627 1622 def remotebundle2caps(self):
1628 1623 return bundle2.bundle2caps(self.remote)
1629 1624
1630 1625 def gettransaction(self):
1631 1626 # deprecated; talk to trmanager directly
1632 1627 return self.trmanager.transaction()
1633 1628
1634 1629
1635 1630 class transactionmanager(util.transactional):
1636 1631 """An object to manage the life cycle of a transaction
1637 1632
1638 1633 It creates the transaction on demand and calls the appropriate hooks when
1639 1634 closing the transaction."""
1640 1635
1641 1636 def __init__(self, repo, source, url):
1642 1637 self.repo = repo
1643 1638 self.source = source
1644 1639 self.url = url
1645 1640 self._tr = None
1646 1641
1647 1642 def transaction(self):
1648 1643 """Return an open transaction object, constructing if necessary"""
1649 1644 if not self._tr:
1650 1645 trname = b'%s\n%s' % (self.source, util.hidepassword(self.url))
1651 1646 self._tr = self.repo.transaction(trname)
1652 1647 self._tr.hookargs[b'source'] = self.source
1653 1648 self._tr.hookargs[b'url'] = self.url
1654 1649 return self._tr
1655 1650
1656 1651 def close(self):
1657 1652 """close transaction if created"""
1658 1653 if self._tr is not None:
1659 1654 self._tr.close()
1660 1655
1661 1656 def release(self):
1662 1657 """release transaction if created"""
1663 1658 if self._tr is not None:
1664 1659 self._tr.release()
1665 1660
1666 1661
1667 1662 def listkeys(remote, namespace):
1668 1663 with remote.commandexecutor() as e:
1669 1664 return e.callcommand(b'listkeys', {b'namespace': namespace}).result()
1670 1665
1671 1666
1672 1667 def _fullpullbundle2(repo, pullop):
1673 1668 # The server may send a partial reply, i.e. when inlining
1674 1669 # pre-computed bundles. In that case, update the common
1675 1670 # set based on the results and pull another bundle.
1676 1671 #
1677 1672 # There are two indicators that the process is finished:
1678 1673 # - no changeset has been added, or
1679 1674 # - all remote heads are known locally.
1680 1675 # The head check must use the unfiltered view as obsoletion
1681 1676 # markers can hide heads.
1682 1677 unfi = repo.unfiltered()
1683 1678 unficl = unfi.changelog
1684 1679
1685 1680 def headsofdiff(h1, h2):
1686 1681 """Returns heads(h1 % h2)"""
1687 1682 res = unfi.set(b'heads(%ln %% %ln)', h1, h2)
1688 1683 return {ctx.node() for ctx in res}
1689 1684
1690 1685 def headsofunion(h1, h2):
1691 1686 """Returns heads((h1 + h2) - null)"""
1692 1687 res = unfi.set(b'heads((%ln + %ln - null))', h1, h2)
1693 1688 return {ctx.node() for ctx in res}
1694 1689
1695 1690 while True:
1696 1691 old_heads = unficl.heads()
1697 1692 clstart = len(unficl)
1698 1693 _pullbundle2(pullop)
1699 1694 if repository.NARROW_REQUIREMENT in repo.requirements:
1700 1695 # XXX narrow clones filter the heads on the server side during
1701 1696 # XXX getbundle and result in partial replies as well.
1702 1697 # XXX Disable pull bundles in this case as band aid to avoid
1703 1698 # XXX extra round trips.
1704 1699 break
1705 1700 if clstart == len(unficl):
1706 1701 break
1707 1702 if all(unficl.hasnode(n) for n in pullop.rheads):
1708 1703 break
1709 1704 new_heads = headsofdiff(unficl.heads(), old_heads)
1710 1705 pullop.common = headsofunion(new_heads, pullop.common)
1711 1706 pullop.rheads = set(pullop.rheads) - pullop.common
1712 1707
1713 1708
1714 1709 def add_confirm_callback(repo, pullop):
1715 1710 """ adds a finalize callback to transaction which can be used to show stats
1716 1711 to user and confirm the pull before committing transaction """
1717 1712
1718 1713 tr = pullop.trmanager.transaction()
1719 1714 scmutil.registersummarycallback(
1720 1715 repo, tr, txnname=b'pull', as_validator=True
1721 1716 )
1722 1717 reporef = weakref.ref(repo.unfiltered())
1723 1718
1724 1719 def prompt(tr):
1725 1720 repo = reporef()
1726 1721 cm = _(b'accept incoming changes (yn)?$$ &Yes $$ &No')
1727 1722 if repo.ui.promptchoice(cm):
1728 1723 raise error.Abort("user aborted")
1729 1724
1730 1725 tr.addvalidator(b'900-pull-prompt', prompt)
1731 1726
1732 1727
1733 1728 def pull(
1734 1729 repo,
1735 1730 remote,
1736 1731 heads=None,
1737 1732 force=False,
1738 1733 bookmarks=(),
1739 1734 opargs=None,
1740 1735 streamclonerequested=None,
1741 1736 includepats=None,
1742 1737 excludepats=None,
1743 1738 depth=None,
1744 1739 confirm=None,
1745 1740 ):
1746 1741 """Fetch repository data from a remote.
1747 1742
1748 1743 This is the main function used to retrieve data from a remote repository.
1749 1744
1750 1745 ``repo`` is the local repository to clone into.
1751 1746 ``remote`` is a peer instance.
1752 1747 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1753 1748 default) means to pull everything from the remote.
1754 1749 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1755 1750 default, all remote bookmarks are pulled.
1756 1751 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1757 1752 initialization.
1758 1753 ``streamclonerequested`` is a boolean indicating whether a "streaming
1759 1754 clone" is requested. A "streaming clone" is essentially a raw file copy
1760 1755 of revlogs from the server. This only works when the local repository is
1761 1756 empty. The default value of ``None`` means to respect the server
1762 1757 configuration for preferring stream clones.
1763 1758 ``includepats`` and ``excludepats`` define explicit file patterns to
1764 1759 include and exclude in storage, respectively. If not defined, narrow
1765 1760 patterns from the repo instance are used, if available.
1766 1761 ``depth`` is an integer indicating the DAG depth of history we're
1767 1762 interested in. If defined, for each revision specified in ``heads``, we
1768 1763 will fetch up to this many of its ancestors and data associated with them.
1769 1764 ``confirm`` is a boolean indicating whether the pull should be confirmed
1770 1765 before committing the transaction. This overrides HGPLAIN.
1771 1766
1772 1767 Returns the ``pulloperation`` created for this pull.
1773 1768 """
1774 1769 if opargs is None:
1775 1770 opargs = {}
1776 1771
1777 1772 # We allow the narrow patterns to be passed in explicitly to provide more
1778 1773 # flexibility for API consumers.
1779 1774 if includepats or excludepats:
1780 1775 includepats = includepats or set()
1781 1776 excludepats = excludepats or set()
1782 1777 else:
1783 1778 includepats, excludepats = repo.narrowpats
1784 1779
1785 1780 narrowspec.validatepatterns(includepats)
1786 1781 narrowspec.validatepatterns(excludepats)
1787 1782
1788 1783 pullop = pulloperation(
1789 1784 repo,
1790 1785 remote,
1791 1786 heads,
1792 1787 force,
1793 1788 bookmarks=bookmarks,
1794 1789 streamclonerequested=streamclonerequested,
1795 1790 includepats=includepats,
1796 1791 excludepats=excludepats,
1797 1792 depth=depth,
1798 1793 **pycompat.strkwargs(opargs)
1799 1794 )
1800 1795
1801 1796 peerlocal = pullop.remote.local()
1802 1797 if peerlocal:
1803 1798 missing = set(peerlocal.requirements) - pullop.repo.supported
1804 1799 if missing:
1805 1800 msg = _(
1806 1801 b"required features are not"
1807 1802 b" supported in the destination:"
1808 1803 b" %s"
1809 1804 ) % (b', '.join(sorted(missing)))
1810 1805 raise error.Abort(msg)
1811 1806
1812 1807 pullop.trmanager = transactionmanager(repo, b'pull', remote.url())
1813 1808 wlock = util.nullcontextmanager()
1814 1809 if not bookmod.bookmarksinstore(repo):
1815 1810 wlock = repo.wlock()
1816 1811 with wlock, repo.lock(), pullop.trmanager:
1817 1812 if confirm or (
1818 1813 repo.ui.configbool(b"pull", b"confirm") and not repo.ui.plain()
1819 1814 ):
1820 1815 add_confirm_callback(repo, pullop)
1821 1816
1822 1817 # Use the modern wire protocol, if available.
1823 1818 if remote.capable(b'command-changesetdata'):
1824 1819 exchangev2.pull(pullop)
1825 1820 else:
1826 1821 # This should ideally be in _pullbundle2(). However, it needs to run
1827 1822 # before discovery to avoid extra work.
1828 1823 _maybeapplyclonebundle(pullop)
1829 1824 streamclone.maybeperformlegacystreamclone(pullop)
1830 1825 _pulldiscovery(pullop)
1831 1826 if pullop.canusebundle2:
1832 1827 _fullpullbundle2(repo, pullop)
1833 1828 _pullchangeset(pullop)
1834 1829 _pullphase(pullop)
1835 1830 _pullbookmarks(pullop)
1836 1831 _pullobsolete(pullop)
1837 1832
1838 1833 # storing remotenames
1839 1834 if repo.ui.configbool(b'experimental', b'remotenames'):
1840 1835 logexchange.pullremotenames(repo, remote)
1841 1836
1842 1837 return pullop
1843 1838
1844 1839
1845 1840 # list of steps to perform discovery before pull
1846 1841 pulldiscoveryorder = []
1847 1842
1848 1843 # Mapping between step name and function
1849 1844 #
1850 1845 # This exists to help extensions wrap steps if necessary
1851 1846 pulldiscoverymapping = {}
1852 1847
1853 1848
1854 1849 def pulldiscovery(stepname):
1855 1850 """decorator for function performing discovery before pull
1856 1851
1857 1852 The function is added to the step -> function mapping and appended to the
1858 1853 list of steps. Beware that decorated function will be added in order (this
1859 1854 may matter).
1860 1855
1861 1856 You can only use this decorator for a new step, if you want to wrap a step
1862 1857 from an extension, change the pulldiscovery dictionary directly."""
1863 1858
1864 1859 def dec(func):
1865 1860 assert stepname not in pulldiscoverymapping
1866 1861 pulldiscoverymapping[stepname] = func
1867 1862 pulldiscoveryorder.append(stepname)
1868 1863 return func
1869 1864
1870 1865 return dec
1871 1866
1872 1867
1873 1868 def _pulldiscovery(pullop):
1874 1869 """Run all discovery steps"""
1875 1870 for stepname in pulldiscoveryorder:
1876 1871 step = pulldiscoverymapping[stepname]
1877 1872 step(pullop)
1878 1873
1879 1874
1880 1875 @pulldiscovery(b'b1:bookmarks')
1881 1876 def _pullbookmarkbundle1(pullop):
1882 1877 """fetch bookmark data in bundle1 case
1883 1878
1884 1879 If not using bundle2, we have to fetch bookmarks before changeset
1885 1880 discovery to reduce the chance and impact of race conditions."""
1886 1881 if pullop.remotebookmarks is not None:
1887 1882 return
1888 1883 if pullop.canusebundle2 and b'listkeys' in pullop.remotebundle2caps:
1889 1884 # all known bundle2 servers now support listkeys, but lets be nice with
1890 1885 # new implementation.
1891 1886 return
1892 1887 books = listkeys(pullop.remote, b'bookmarks')
1893 1888 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1894 1889
1895 1890
1896 1891 @pulldiscovery(b'changegroup')
1897 1892 def _pulldiscoverychangegroup(pullop):
1898 1893 """discovery phase for the pull
1899 1894
1900 1895 Current handle changeset discovery only, will change handle all discovery
1901 1896 at some point."""
1902 1897 tmp = discovery.findcommonincoming(
1903 1898 pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force
1904 1899 )
1905 1900 common, fetch, rheads = tmp
1906 1901 has_node = pullop.repo.unfiltered().changelog.index.has_node
1907 1902 if fetch and rheads:
1908 1903 # If a remote heads is filtered locally, put in back in common.
1909 1904 #
1910 1905 # This is a hackish solution to catch most of "common but locally
1911 1906 # hidden situation". We do not performs discovery on unfiltered
1912 1907 # repository because it end up doing a pathological amount of round
1913 1908 # trip for w huge amount of changeset we do not care about.
1914 1909 #
1915 1910 # If a set of such "common but filtered" changeset exist on the server
1916 1911 # but are not including a remote heads, we'll not be able to detect it,
1917 1912 scommon = set(common)
1918 1913 for n in rheads:
1919 1914 if has_node(n):
1920 1915 if n not in scommon:
1921 1916 common.append(n)
1922 1917 if set(rheads).issubset(set(common)):
1923 1918 fetch = []
1924 1919 pullop.common = common
1925 1920 pullop.fetch = fetch
1926 1921 pullop.rheads = rheads
1927 1922
1928 1923
1929 1924 def _pullbundle2(pullop):
1930 1925 """pull data using bundle2
1931 1926
1932 1927 For now, the only supported data are changegroup."""
1933 1928 kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')}
1934 1929
1935 1930 # make ui easier to access
1936 1931 ui = pullop.repo.ui
1937 1932
1938 1933 # At the moment we don't do stream clones over bundle2. If that is
1939 1934 # implemented then here's where the check for that will go.
1940 1935 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1941 1936
1942 1937 # declare pull perimeters
1943 1938 kwargs[b'common'] = pullop.common
1944 1939 kwargs[b'heads'] = pullop.heads or pullop.rheads
1945 1940
1946 1941 # check server supports narrow and then adding includepats and excludepats
1947 1942 servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
1948 1943 if servernarrow and pullop.includepats:
1949 1944 kwargs[b'includepats'] = pullop.includepats
1950 1945 if servernarrow and pullop.excludepats:
1951 1946 kwargs[b'excludepats'] = pullop.excludepats
1952 1947
1953 1948 if streaming:
1954 1949 kwargs[b'cg'] = False
1955 1950 kwargs[b'stream'] = True
1956 1951 pullop.stepsdone.add(b'changegroup')
1957 1952 pullop.stepsdone.add(b'phases')
1958 1953
1959 1954 else:
1960 1955 # pulling changegroup
1961 1956 pullop.stepsdone.add(b'changegroup')
1962 1957
1963 1958 kwargs[b'cg'] = pullop.fetch
1964 1959
1965 1960 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1966 1961 hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ())
1967 1962 if not legacyphase and hasbinaryphase:
1968 1963 kwargs[b'phases'] = True
1969 1964 pullop.stepsdone.add(b'phases')
1970 1965
1971 1966 if b'listkeys' in pullop.remotebundle2caps:
1972 1967 if b'phases' not in pullop.stepsdone:
1973 1968 kwargs[b'listkeys'] = [b'phases']
1974 1969
1975 1970 bookmarksrequested = False
1976 1971 legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange')
1977 1972 hasbinarybook = b'bookmarks' in pullop.remotebundle2caps
1978 1973
1979 1974 if pullop.remotebookmarks is not None:
1980 1975 pullop.stepsdone.add(b'request-bookmarks')
1981 1976
1982 1977 if (
1983 1978 b'request-bookmarks' not in pullop.stepsdone
1984 1979 and pullop.remotebookmarks is None
1985 1980 and not legacybookmark
1986 1981 and hasbinarybook
1987 1982 ):
1988 1983 kwargs[b'bookmarks'] = True
1989 1984 bookmarksrequested = True
1990 1985
1991 1986 if b'listkeys' in pullop.remotebundle2caps:
1992 1987 if b'request-bookmarks' not in pullop.stepsdone:
1993 1988 # make sure to always includes bookmark data when migrating
1994 1989 # `hg incoming --bundle` to using this function.
1995 1990 pullop.stepsdone.add(b'request-bookmarks')
1996 1991 kwargs.setdefault(b'listkeys', []).append(b'bookmarks')
1997 1992
1998 1993 # If this is a full pull / clone and the server supports the clone bundles
1999 1994 # feature, tell the server whether we attempted a clone bundle. The
2000 1995 # presence of this flag indicates the client supports clone bundles. This
2001 1996 # will enable the server to treat clients that support clone bundles
2002 1997 # differently from those that don't.
2003 1998 if (
2004 1999 pullop.remote.capable(b'clonebundles')
2005 2000 and pullop.heads is None
2006 2001 and list(pullop.common) == [nullid]
2007 2002 ):
2008 2003 kwargs[b'cbattempted'] = pullop.clonebundleattempted
2009 2004
2010 2005 if streaming:
2011 2006 pullop.repo.ui.status(_(b'streaming all changes\n'))
2012 2007 elif not pullop.fetch:
2013 2008 pullop.repo.ui.status(_(b"no changes found\n"))
2014 2009 pullop.cgresult = 0
2015 2010 else:
2016 2011 if pullop.heads is None and list(pullop.common) == [nullid]:
2017 2012 pullop.repo.ui.status(_(b"requesting all changes\n"))
2018 2013 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2019 2014 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
2020 2015 if obsolete.commonversion(remoteversions) is not None:
2021 2016 kwargs[b'obsmarkers'] = True
2022 2017 pullop.stepsdone.add(b'obsmarkers')
2023 2018 _pullbundle2extraprepare(pullop, kwargs)
2024 2019
2025 2020 with pullop.remote.commandexecutor() as e:
2026 2021 args = dict(kwargs)
2027 2022 args[b'source'] = b'pull'
2028 2023 bundle = e.callcommand(b'getbundle', args).result()
2029 2024
2030 2025 try:
2031 2026 op = bundle2.bundleoperation(
2032 2027 pullop.repo, pullop.gettransaction, source=b'pull'
2033 2028 )
2034 2029 op.modes[b'bookmarks'] = b'records'
2035 2030 bundle2.processbundle(pullop.repo, bundle, op=op)
2036 2031 except bundle2.AbortFromPart as exc:
2037 2032 pullop.repo.ui.status(_(b'remote: abort: %s\n') % exc)
2038 2033 raise error.Abort(_(b'pull failed on remote'), hint=exc.hint)
2039 2034 except error.BundleValueError as exc:
2040 2035 raise error.Abort(_(b'missing support for %s') % exc)
2041 2036
2042 2037 if pullop.fetch:
2043 2038 pullop.cgresult = bundle2.combinechangegroupresults(op)
2044 2039
2045 2040 # processing phases change
2046 2041 for namespace, value in op.records[b'listkeys']:
2047 2042 if namespace == b'phases':
2048 2043 _pullapplyphases(pullop, value)
2049 2044
2050 2045 # processing bookmark update
2051 2046 if bookmarksrequested:
2052 2047 books = {}
2053 2048 for record in op.records[b'bookmarks']:
2054 2049 books[record[b'bookmark']] = record[b"node"]
2055 2050 pullop.remotebookmarks = books
2056 2051 else:
2057 2052 for namespace, value in op.records[b'listkeys']:
2058 2053 if namespace == b'bookmarks':
2059 2054 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
2060 2055
2061 2056 # bookmark data were either already there or pulled in the bundle
2062 2057 if pullop.remotebookmarks is not None:
2063 2058 _pullbookmarks(pullop)
2064 2059
2065 2060
2066 2061 def _pullbundle2extraprepare(pullop, kwargs):
2067 2062 """hook function so that extensions can extend the getbundle call"""
2068 2063
2069 2064
2070 2065 def _pullchangeset(pullop):
2071 2066 """pull changeset from unbundle into the local repo"""
2072 2067 # We delay the open of the transaction as late as possible so we
2073 2068 # don't open transaction for nothing or you break future useful
2074 2069 # rollback call
2075 2070 if b'changegroup' in pullop.stepsdone:
2076 2071 return
2077 2072 pullop.stepsdone.add(b'changegroup')
2078 2073 if not pullop.fetch:
2079 2074 pullop.repo.ui.status(_(b"no changes found\n"))
2080 2075 pullop.cgresult = 0
2081 2076 return
2082 2077 tr = pullop.gettransaction()
2083 2078 if pullop.heads is None and list(pullop.common) == [nullid]:
2084 2079 pullop.repo.ui.status(_(b"requesting all changes\n"))
2085 2080 elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'):
2086 2081 # issue1320, avoid a race if remote changed after discovery
2087 2082 pullop.heads = pullop.rheads
2088 2083
2089 2084 if pullop.remote.capable(b'getbundle'):
2090 2085 # TODO: get bundlecaps from remote
2091 2086 cg = pullop.remote.getbundle(
2092 2087 b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
2093 2088 )
2094 2089 elif pullop.heads is None:
2095 2090 with pullop.remote.commandexecutor() as e:
2096 2091 cg = e.callcommand(
2097 2092 b'changegroup', {b'nodes': pullop.fetch, b'source': b'pull',}
2098 2093 ).result()
2099 2094
2100 2095 elif not pullop.remote.capable(b'changegroupsubset'):
2101 2096 raise error.Abort(
2102 2097 _(
2103 2098 b"partial pull cannot be done because "
2104 2099 b"other repository doesn't support "
2105 2100 b"changegroupsubset."
2106 2101 )
2107 2102 )
2108 2103 else:
2109 2104 with pullop.remote.commandexecutor() as e:
2110 2105 cg = e.callcommand(
2111 2106 b'changegroupsubset',
2112 2107 {
2113 2108 b'bases': pullop.fetch,
2114 2109 b'heads': pullop.heads,
2115 2110 b'source': b'pull',
2116 2111 },
2117 2112 ).result()
2118 2113
2119 2114 bundleop = bundle2.applybundle(
2120 2115 pullop.repo, cg, tr, b'pull', pullop.remote.url()
2121 2116 )
2122 2117 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
2123 2118
2124 2119
2125 2120 def _pullphase(pullop):
2126 2121 # Get remote phases data from remote
2127 2122 if b'phases' in pullop.stepsdone:
2128 2123 return
2129 2124 remotephases = listkeys(pullop.remote, b'phases')
2130 2125 _pullapplyphases(pullop, remotephases)
2131 2126
2132 2127
2133 2128 def _pullapplyphases(pullop, remotephases):
2134 2129 """apply phase movement from observed remote state"""
2135 2130 if b'phases' in pullop.stepsdone:
2136 2131 return
2137 2132 pullop.stepsdone.add(b'phases')
2138 2133 publishing = bool(remotephases.get(b'publishing', False))
2139 2134 if remotephases and not publishing:
2140 2135 # remote is new and non-publishing
2141 2136 pheads, _dr = phases.analyzeremotephases(
2142 2137 pullop.repo, pullop.pulledsubset, remotephases
2143 2138 )
2144 2139 dheads = pullop.pulledsubset
2145 2140 else:
2146 2141 # Remote is old or publishing all common changesets
2147 2142 # should be seen as public
2148 2143 pheads = pullop.pulledsubset
2149 2144 dheads = []
2150 2145 unfi = pullop.repo.unfiltered()
2151 2146 phase = unfi._phasecache.phase
2152 2147 rev = unfi.changelog.index.get_rev
2153 2148 public = phases.public
2154 2149 draft = phases.draft
2155 2150
2156 2151 # exclude changesets already public locally and update the others
2157 2152 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
2158 2153 if pheads:
2159 2154 tr = pullop.gettransaction()
2160 2155 phases.advanceboundary(pullop.repo, tr, public, pheads)
2161 2156
2162 2157 # exclude changesets already draft locally and update the others
2163 2158 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
2164 2159 if dheads:
2165 2160 tr = pullop.gettransaction()
2166 2161 phases.advanceboundary(pullop.repo, tr, draft, dheads)
2167 2162
2168 2163
2169 2164 def _pullbookmarks(pullop):
2170 2165 """process the remote bookmark information to update the local one"""
2171 2166 if b'bookmarks' in pullop.stepsdone:
2172 2167 return
2173 2168 pullop.stepsdone.add(b'bookmarks')
2174 2169 repo = pullop.repo
2175 2170 remotebookmarks = pullop.remotebookmarks
2176 2171 bookmod.updatefromremote(
2177 2172 repo.ui,
2178 2173 repo,
2179 2174 remotebookmarks,
2180 2175 pullop.remote.url(),
2181 2176 pullop.gettransaction,
2182 2177 explicit=pullop.explicitbookmarks,
2183 2178 )
2184 2179
2185 2180
2186 2181 def _pullobsolete(pullop):
2187 2182 """utility function to pull obsolete markers from a remote
2188 2183
2189 2184 The `gettransaction` is function that return the pull transaction, creating
2190 2185 one if necessary. We return the transaction to inform the calling code that
2191 2186 a new transaction have been created (when applicable).
2192 2187
2193 2188 Exists mostly to allow overriding for experimentation purpose"""
2194 2189 if b'obsmarkers' in pullop.stepsdone:
2195 2190 return
2196 2191 pullop.stepsdone.add(b'obsmarkers')
2197 2192 tr = None
2198 2193 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2199 2194 pullop.repo.ui.debug(b'fetching remote obsolete markers\n')
2200 2195 remoteobs = listkeys(pullop.remote, b'obsolete')
2201 2196 if b'dump0' in remoteobs:
2202 2197 tr = pullop.gettransaction()
2203 2198 markers = []
2204 2199 for key in sorted(remoteobs, reverse=True):
2205 2200 if key.startswith(b'dump'):
2206 2201 data = util.b85decode(remoteobs[key])
2207 2202 version, newmarks = obsolete._readmarkers(data)
2208 2203 markers += newmarks
2209 2204 if markers:
2210 2205 pullop.repo.obsstore.add(tr, markers)
2211 2206 pullop.repo.invalidatevolatilesets()
2212 2207 return tr
2213 2208
2214 2209
2215 2210 def applynarrowacl(repo, kwargs):
2216 2211 """Apply narrow fetch access control.
2217 2212
2218 2213 This massages the named arguments for getbundle wire protocol commands
2219 2214 so requested data is filtered through access control rules.
2220 2215 """
2221 2216 ui = repo.ui
2222 2217 # TODO this assumes existence of HTTP and is a layering violation.
2223 2218 username = ui.shortuser(ui.environ.get(b'REMOTE_USER') or ui.username())
2224 2219 user_includes = ui.configlist(
2225 2220 _NARROWACL_SECTION,
2226 2221 username + b'.includes',
2227 2222 ui.configlist(_NARROWACL_SECTION, b'default.includes'),
2228 2223 )
2229 2224 user_excludes = ui.configlist(
2230 2225 _NARROWACL_SECTION,
2231 2226 username + b'.excludes',
2232 2227 ui.configlist(_NARROWACL_SECTION, b'default.excludes'),
2233 2228 )
2234 2229 if not user_includes:
2235 2230 raise error.Abort(
2236 2231 _(b"%s configuration for user %s is empty")
2237 2232 % (_NARROWACL_SECTION, username)
2238 2233 )
2239 2234
2240 2235 user_includes = [
2241 2236 b'path:.' if p == b'*' else b'path:' + p for p in user_includes
2242 2237 ]
2243 2238 user_excludes = [
2244 2239 b'path:.' if p == b'*' else b'path:' + p for p in user_excludes
2245 2240 ]
2246 2241
2247 2242 req_includes = set(kwargs.get('includepats', []))
2248 2243 req_excludes = set(kwargs.get('excludepats', []))
2249 2244
2250 2245 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
2251 2246 req_includes, req_excludes, user_includes, user_excludes
2252 2247 )
2253 2248
2254 2249 if invalid_includes:
2255 2250 raise error.Abort(
2256 2251 _(b"The following includes are not accessible for %s: %s")
2257 2252 % (username, stringutil.pprint(invalid_includes))
2258 2253 )
2259 2254
2260 2255 new_args = {}
2261 2256 new_args.update(kwargs)
2262 2257 new_args['narrow'] = True
2263 2258 new_args['narrow_acl'] = True
2264 2259 new_args['includepats'] = req_includes
2265 2260 if req_excludes:
2266 2261 new_args['excludepats'] = req_excludes
2267 2262
2268 2263 return new_args
2269 2264
2270 2265
2271 2266 def _computeellipsis(repo, common, heads, known, match, depth=None):
2272 2267 """Compute the shape of a narrowed DAG.
2273 2268
2274 2269 Args:
2275 2270 repo: The repository we're transferring.
2276 2271 common: The roots of the DAG range we're transferring.
2277 2272 May be just [nullid], which means all ancestors of heads.
2278 2273 heads: The heads of the DAG range we're transferring.
2279 2274 match: The narrowmatcher that allows us to identify relevant changes.
2280 2275 depth: If not None, only consider nodes to be full nodes if they are at
2281 2276 most depth changesets away from one of heads.
2282 2277
2283 2278 Returns:
2284 2279 A tuple of (visitnodes, relevant_nodes, ellipsisroots) where:
2285 2280
2286 2281 visitnodes: The list of nodes (either full or ellipsis) which
2287 2282 need to be sent to the client.
2288 2283 relevant_nodes: The set of changelog nodes which change a file inside
2289 2284 the narrowspec. The client needs these as non-ellipsis nodes.
2290 2285 ellipsisroots: A dict of {rev: parents} that is used in
2291 2286 narrowchangegroup to produce ellipsis nodes with the
2292 2287 correct parents.
2293 2288 """
2294 2289 cl = repo.changelog
2295 2290 mfl = repo.manifestlog
2296 2291
2297 2292 clrev = cl.rev
2298 2293
2299 2294 commonrevs = {clrev(n) for n in common} | {nullrev}
2300 2295 headsrevs = {clrev(n) for n in heads}
2301 2296
2302 2297 if depth:
2303 2298 revdepth = {h: 0 for h in headsrevs}
2304 2299
2305 2300 ellipsisheads = collections.defaultdict(set)
2306 2301 ellipsisroots = collections.defaultdict(set)
2307 2302
2308 2303 def addroot(head, curchange):
2309 2304 """Add a root to an ellipsis head, splitting heads with 3 roots."""
2310 2305 ellipsisroots[head].add(curchange)
2311 2306 # Recursively split ellipsis heads with 3 roots by finding the
2312 2307 # roots' youngest common descendant which is an elided merge commit.
2313 2308 # That descendant takes 2 of the 3 roots as its own, and becomes a
2314 2309 # root of the head.
2315 2310 while len(ellipsisroots[head]) > 2:
2316 2311 child, roots = splithead(head)
2317 2312 splitroots(head, child, roots)
2318 2313 head = child # Recurse in case we just added a 3rd root
2319 2314
2320 2315 def splitroots(head, child, roots):
2321 2316 ellipsisroots[head].difference_update(roots)
2322 2317 ellipsisroots[head].add(child)
2323 2318 ellipsisroots[child].update(roots)
2324 2319 ellipsisroots[child].discard(child)
2325 2320
2326 2321 def splithead(head):
2327 2322 r1, r2, r3 = sorted(ellipsisroots[head])
2328 2323 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
2329 2324 mid = repo.revs(
2330 2325 b'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
2331 2326 )
2332 2327 for j in mid:
2333 2328 if j == nr2:
2334 2329 return nr2, (nr1, nr2)
2335 2330 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
2336 2331 return j, (nr1, nr2)
2337 2332 raise error.Abort(
2338 2333 _(
2339 2334 b'Failed to split up ellipsis node! head: %d, '
2340 2335 b'roots: %d %d %d'
2341 2336 )
2342 2337 % (head, r1, r2, r3)
2343 2338 )
2344 2339
2345 2340 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
2346 2341 visit = reversed(missing)
2347 2342 relevant_nodes = set()
2348 2343 visitnodes = [cl.node(m) for m in missing]
2349 2344 required = set(headsrevs) | known
2350 2345 for rev in visit:
2351 2346 clrev = cl.changelogrevision(rev)
2352 2347 ps = [prev for prev in cl.parentrevs(rev) if prev != nullrev]
2353 2348 if depth is not None:
2354 2349 curdepth = revdepth[rev]
2355 2350 for p in ps:
2356 2351 revdepth[p] = min(curdepth + 1, revdepth.get(p, depth + 1))
2357 2352 needed = False
2358 2353 shallow_enough = depth is None or revdepth[rev] <= depth
2359 2354 if shallow_enough:
2360 2355 curmf = mfl[clrev.manifest].read()
2361 2356 if ps:
2362 2357 # We choose to not trust the changed files list in
2363 2358 # changesets because it's not always correct. TODO: could
2364 2359 # we trust it for the non-merge case?
2365 2360 p1mf = mfl[cl.changelogrevision(ps[0]).manifest].read()
2366 2361 needed = bool(curmf.diff(p1mf, match))
2367 2362 if not needed and len(ps) > 1:
2368 2363 # For merge changes, the list of changed files is not
2369 2364 # helpful, since we need to emit the merge if a file
2370 2365 # in the narrow spec has changed on either side of the
2371 2366 # merge. As a result, we do a manifest diff to check.
2372 2367 p2mf = mfl[cl.changelogrevision(ps[1]).manifest].read()
2373 2368 needed = bool(curmf.diff(p2mf, match))
2374 2369 else:
2375 2370 # For a root node, we need to include the node if any
2376 2371 # files in the node match the narrowspec.
2377 2372 needed = any(curmf.walk(match))
2378 2373
2379 2374 if needed:
2380 2375 for head in ellipsisheads[rev]:
2381 2376 addroot(head, rev)
2382 2377 for p in ps:
2383 2378 required.add(p)
2384 2379 relevant_nodes.add(cl.node(rev))
2385 2380 else:
2386 2381 if not ps:
2387 2382 ps = [nullrev]
2388 2383 if rev in required:
2389 2384 for head in ellipsisheads[rev]:
2390 2385 addroot(head, rev)
2391 2386 for p in ps:
2392 2387 ellipsisheads[p].add(rev)
2393 2388 else:
2394 2389 for p in ps:
2395 2390 ellipsisheads[p] |= ellipsisheads[rev]
2396 2391
2397 2392 # add common changesets as roots of their reachable ellipsis heads
2398 2393 for c in commonrevs:
2399 2394 for head in ellipsisheads[c]:
2400 2395 addroot(head, c)
2401 2396 return visitnodes, relevant_nodes, ellipsisroots
2402 2397
2403 2398
2404 2399 def caps20to10(repo, role):
2405 2400 """return a set with appropriate options to use bundle20 during getbundle"""
2406 2401 caps = {b'HG20'}
2407 2402 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
2408 2403 caps.add(b'bundle2=' + urlreq.quote(capsblob))
2409 2404 return caps
2410 2405
2411 2406
2412 2407 # List of names of steps to perform for a bundle2 for getbundle, order matters.
2413 2408 getbundle2partsorder = []
2414 2409
2415 2410 # Mapping between step name and function
2416 2411 #
2417 2412 # This exists to help extensions wrap steps if necessary
2418 2413 getbundle2partsmapping = {}
2419 2414
2420 2415
2421 2416 def getbundle2partsgenerator(stepname, idx=None):
2422 2417 """decorator for function generating bundle2 part for getbundle
2423 2418
2424 2419 The function is added to the step -> function mapping and appended to the
2425 2420 list of steps. Beware that decorated functions will be added in order
2426 2421 (this may matter).
2427 2422
2428 2423 You can only use this decorator for new steps, if you want to wrap a step
2429 2424 from an extension, attack the getbundle2partsmapping dictionary directly."""
2430 2425
2431 2426 def dec(func):
2432 2427 assert stepname not in getbundle2partsmapping
2433 2428 getbundle2partsmapping[stepname] = func
2434 2429 if idx is None:
2435 2430 getbundle2partsorder.append(stepname)
2436 2431 else:
2437 2432 getbundle2partsorder.insert(idx, stepname)
2438 2433 return func
2439 2434
2440 2435 return dec
2441 2436
2442 2437
2443 2438 def bundle2requested(bundlecaps):
2444 2439 if bundlecaps is not None:
2445 2440 return any(cap.startswith(b'HG2') for cap in bundlecaps)
2446 2441 return False
2447 2442
2448 2443
2449 2444 def getbundlechunks(
2450 2445 repo, source, heads=None, common=None, bundlecaps=None, **kwargs
2451 2446 ):
2452 2447 """Return chunks constituting a bundle's raw data.
2453 2448
2454 2449 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
2455 2450 passed.
2456 2451
2457 2452 Returns a 2-tuple of a dict with metadata about the generated bundle
2458 2453 and an iterator over raw chunks (of varying sizes).
2459 2454 """
2460 2455 kwargs = pycompat.byteskwargs(kwargs)
2461 2456 info = {}
2462 2457 usebundle2 = bundle2requested(bundlecaps)
2463 2458 # bundle10 case
2464 2459 if not usebundle2:
2465 2460 if bundlecaps and not kwargs.get(b'cg', True):
2466 2461 raise ValueError(
2467 2462 _(b'request for bundle10 must include changegroup')
2468 2463 )
2469 2464
2470 2465 if kwargs:
2471 2466 raise ValueError(
2472 2467 _(b'unsupported getbundle arguments: %s')
2473 2468 % b', '.join(sorted(kwargs.keys()))
2474 2469 )
2475 2470 outgoing = _computeoutgoing(repo, heads, common)
2476 2471 info[b'bundleversion'] = 1
2477 2472 return (
2478 2473 info,
2479 2474 changegroup.makestream(
2480 2475 repo, outgoing, b'01', source, bundlecaps=bundlecaps
2481 2476 ),
2482 2477 )
2483 2478
2484 2479 # bundle20 case
2485 2480 info[b'bundleversion'] = 2
2486 2481 b2caps = {}
2487 2482 for bcaps in bundlecaps:
2488 2483 if bcaps.startswith(b'bundle2='):
2489 2484 blob = urlreq.unquote(bcaps[len(b'bundle2=') :])
2490 2485 b2caps.update(bundle2.decodecaps(blob))
2491 2486 bundler = bundle2.bundle20(repo.ui, b2caps)
2492 2487
2493 2488 kwargs[b'heads'] = heads
2494 2489 kwargs[b'common'] = common
2495 2490
2496 2491 for name in getbundle2partsorder:
2497 2492 func = getbundle2partsmapping[name]
2498 2493 func(
2499 2494 bundler,
2500 2495 repo,
2501 2496 source,
2502 2497 bundlecaps=bundlecaps,
2503 2498 b2caps=b2caps,
2504 2499 **pycompat.strkwargs(kwargs)
2505 2500 )
2506 2501
2507 2502 info[b'prefercompressed'] = bundler.prefercompressed
2508 2503
2509 2504 return info, bundler.getchunks()
2510 2505
2511 2506
2512 2507 @getbundle2partsgenerator(b'stream2')
2513 2508 def _getbundlestream2(bundler, repo, *args, **kwargs):
2514 2509 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
2515 2510
2516 2511
2517 2512 @getbundle2partsgenerator(b'changegroup')
2518 2513 def _getbundlechangegrouppart(
2519 2514 bundler,
2520 2515 repo,
2521 2516 source,
2522 2517 bundlecaps=None,
2523 2518 b2caps=None,
2524 2519 heads=None,
2525 2520 common=None,
2526 2521 **kwargs
2527 2522 ):
2528 2523 """add a changegroup part to the requested bundle"""
2529 2524 if not kwargs.get('cg', True) or not b2caps:
2530 2525 return
2531 2526
2532 2527 version = b'01'
2533 2528 cgversions = b2caps.get(b'changegroup')
2534 2529 if cgversions: # 3.1 and 3.2 ship with an empty value
2535 2530 cgversions = [
2536 2531 v
2537 2532 for v in cgversions
2538 2533 if v in changegroup.supportedoutgoingversions(repo)
2539 2534 ]
2540 2535 if not cgversions:
2541 2536 raise error.Abort(_(b'no common changegroup version'))
2542 2537 version = max(cgversions)
2543 2538
2544 2539 outgoing = _computeoutgoing(repo, heads, common)
2545 2540 if not outgoing.missing:
2546 2541 return
2547 2542
2548 2543 if kwargs.get('narrow', False):
2549 2544 include = sorted(filter(bool, kwargs.get('includepats', [])))
2550 2545 exclude = sorted(filter(bool, kwargs.get('excludepats', [])))
2551 2546 matcher = narrowspec.match(repo.root, include=include, exclude=exclude)
2552 2547 else:
2553 2548 matcher = None
2554 2549
2555 2550 cgstream = changegroup.makestream(
2556 2551 repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher
2557 2552 )
2558 2553
2559 2554 part = bundler.newpart(b'changegroup', data=cgstream)
2560 2555 if cgversions:
2561 2556 part.addparam(b'version', version)
2562 2557
2563 2558 part.addparam(b'nbchanges', b'%d' % len(outgoing.missing), mandatory=False)
2564 2559
2565 2560 if b'treemanifest' in repo.requirements:
2566 2561 part.addparam(b'treemanifest', b'1')
2567 2562
2568 2563 if b'exp-sidedata-flag' in repo.requirements:
2569 2564 part.addparam(b'exp-sidedata', b'1')
2570 2565
2571 2566 if (
2572 2567 kwargs.get('narrow', False)
2573 2568 and kwargs.get('narrow_acl', False)
2574 2569 and (include or exclude)
2575 2570 ):
2576 2571 # this is mandatory because otherwise ACL clients won't work
2577 2572 narrowspecpart = bundler.newpart(b'Narrow:responsespec')
2578 2573 narrowspecpart.data = b'%s\0%s' % (
2579 2574 b'\n'.join(include),
2580 2575 b'\n'.join(exclude),
2581 2576 )
2582 2577
2583 2578
2584 2579 @getbundle2partsgenerator(b'bookmarks')
2585 2580 def _getbundlebookmarkpart(
2586 2581 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2587 2582 ):
2588 2583 """add a bookmark part to the requested bundle"""
2589 2584 if not kwargs.get('bookmarks', False):
2590 2585 return
2591 2586 if not b2caps or b'bookmarks' not in b2caps:
2592 2587 raise error.Abort(_(b'no common bookmarks exchange method'))
2593 2588 books = bookmod.listbinbookmarks(repo)
2594 2589 data = bookmod.binaryencode(books)
2595 2590 if data:
2596 2591 bundler.newpart(b'bookmarks', data=data)
2597 2592
2598 2593
2599 2594 @getbundle2partsgenerator(b'listkeys')
2600 2595 def _getbundlelistkeysparts(
2601 2596 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2602 2597 ):
2603 2598 """add parts containing listkeys namespaces to the requested bundle"""
2604 2599 listkeys = kwargs.get('listkeys', ())
2605 2600 for namespace in listkeys:
2606 2601 part = bundler.newpart(b'listkeys')
2607 2602 part.addparam(b'namespace', namespace)
2608 2603 keys = repo.listkeys(namespace).items()
2609 2604 part.data = pushkey.encodekeys(keys)
2610 2605
2611 2606
2612 2607 @getbundle2partsgenerator(b'obsmarkers')
2613 2608 def _getbundleobsmarkerpart(
2614 2609 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2615 2610 ):
2616 2611 """add an obsolescence markers part to the requested bundle"""
2617 2612 if kwargs.get('obsmarkers', False):
2618 2613 if heads is None:
2619 2614 heads = repo.heads()
2620 2615 subset = [c.node() for c in repo.set(b'::%ln', heads)]
2621 2616 markers = repo.obsstore.relevantmarkers(subset)
2622 2617 markers = obsutil.sortedmarkers(markers)
2623 2618 bundle2.buildobsmarkerspart(bundler, markers)
2624 2619
2625 2620
2626 2621 @getbundle2partsgenerator(b'phases')
2627 2622 def _getbundlephasespart(
2628 2623 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2629 2624 ):
2630 2625 """add phase heads part to the requested bundle"""
2631 2626 if kwargs.get('phases', False):
2632 2627 if not b2caps or b'heads' not in b2caps.get(b'phases'):
2633 2628 raise error.Abort(_(b'no common phases exchange method'))
2634 2629 if heads is None:
2635 2630 heads = repo.heads()
2636 2631
2637 2632 headsbyphase = collections.defaultdict(set)
2638 2633 if repo.publishing():
2639 2634 headsbyphase[phases.public] = heads
2640 2635 else:
2641 2636 # find the appropriate heads to move
2642 2637
2643 2638 phase = repo._phasecache.phase
2644 2639 node = repo.changelog.node
2645 2640 rev = repo.changelog.rev
2646 2641 for h in heads:
2647 2642 headsbyphase[phase(repo, rev(h))].add(h)
2648 2643 seenphases = list(headsbyphase.keys())
2649 2644
2650 2645 # We do not handle anything but public and draft phase for now)
2651 2646 if seenphases:
2652 2647 assert max(seenphases) <= phases.draft
2653 2648
2654 2649 # if client is pulling non-public changesets, we need to find
2655 2650 # intermediate public heads.
2656 2651 draftheads = headsbyphase.get(phases.draft, set())
2657 2652 if draftheads:
2658 2653 publicheads = headsbyphase.get(phases.public, set())
2659 2654
2660 2655 revset = b'heads(only(%ln, %ln) and public())'
2661 2656 extraheads = repo.revs(revset, draftheads, publicheads)
2662 2657 for r in extraheads:
2663 2658 headsbyphase[phases.public].add(node(r))
2664 2659
2665 2660 # transform data in a format used by the encoding function
2666 2661 phasemapping = {
2667 2662 phase: sorted(headsbyphase[phase]) for phase in phases.allphases
2668 2663 }
2669 2664
2670 2665 # generate the actual part
2671 2666 phasedata = phases.binaryencode(phasemapping)
2672 2667 bundler.newpart(b'phase-heads', data=phasedata)
2673 2668
2674 2669
2675 2670 @getbundle2partsgenerator(b'hgtagsfnodes')
2676 2671 def _getbundletagsfnodes(
2677 2672 bundler,
2678 2673 repo,
2679 2674 source,
2680 2675 bundlecaps=None,
2681 2676 b2caps=None,
2682 2677 heads=None,
2683 2678 common=None,
2684 2679 **kwargs
2685 2680 ):
2686 2681 """Transfer the .hgtags filenodes mapping.
2687 2682
2688 2683 Only values for heads in this bundle will be transferred.
2689 2684
2690 2685 The part data consists of pairs of 20 byte changeset node and .hgtags
2691 2686 filenodes raw values.
2692 2687 """
2693 2688 # Don't send unless:
2694 2689 # - changeset are being exchanged,
2695 2690 # - the client supports it.
2696 2691 if not b2caps or not (kwargs.get('cg', True) and b'hgtagsfnodes' in b2caps):
2697 2692 return
2698 2693
2699 2694 outgoing = _computeoutgoing(repo, heads, common)
2700 2695 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2701 2696
2702 2697
2703 2698 @getbundle2partsgenerator(b'cache:rev-branch-cache')
2704 2699 def _getbundlerevbranchcache(
2705 2700 bundler,
2706 2701 repo,
2707 2702 source,
2708 2703 bundlecaps=None,
2709 2704 b2caps=None,
2710 2705 heads=None,
2711 2706 common=None,
2712 2707 **kwargs
2713 2708 ):
2714 2709 """Transfer the rev-branch-cache mapping
2715 2710
2716 2711 The payload is a series of data related to each branch
2717 2712
2718 2713 1) branch name length
2719 2714 2) number of open heads
2720 2715 3) number of closed heads
2721 2716 4) open heads nodes
2722 2717 5) closed heads nodes
2723 2718 """
2724 2719 # Don't send unless:
2725 2720 # - changeset are being exchanged,
2726 2721 # - the client supports it.
2727 2722 # - narrow bundle isn't in play (not currently compatible).
2728 2723 if (
2729 2724 not kwargs.get('cg', True)
2730 2725 or not b2caps
2731 2726 or b'rev-branch-cache' not in b2caps
2732 2727 or kwargs.get('narrow', False)
2733 2728 or repo.ui.has_section(_NARROWACL_SECTION)
2734 2729 ):
2735 2730 return
2736 2731
2737 2732 outgoing = _computeoutgoing(repo, heads, common)
2738 2733 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2739 2734
2740 2735
2741 2736 def check_heads(repo, their_heads, context):
2742 2737 """check if the heads of a repo have been modified
2743 2738
2744 2739 Used by peer for unbundling.
2745 2740 """
2746 2741 heads = repo.heads()
2747 2742 heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest()
2748 2743 if not (
2749 2744 their_heads == [b'force']
2750 2745 or their_heads == heads
2751 2746 or their_heads == [b'hashed', heads_hash]
2752 2747 ):
2753 2748 # someone else committed/pushed/unbundled while we
2754 2749 # were transferring data
2755 2750 raise error.PushRaced(
2756 2751 b'repository changed while %s - please try again' % context
2757 2752 )
2758 2753
2759 2754
2760 2755 def unbundle(repo, cg, heads, source, url):
2761 2756 """Apply a bundle to a repo.
2762 2757
2763 2758 this function makes sure the repo is locked during the application and have
2764 2759 mechanism to check that no push race occurred between the creation of the
2765 2760 bundle and its application.
2766 2761
2767 2762 If the push was raced as PushRaced exception is raised."""
2768 2763 r = 0
2769 2764 # need a transaction when processing a bundle2 stream
2770 2765 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2771 2766 lockandtr = [None, None, None]
2772 2767 recordout = None
2773 2768 # quick fix for output mismatch with bundle2 in 3.4
2774 2769 captureoutput = repo.ui.configbool(
2775 2770 b'experimental', b'bundle2-output-capture'
2776 2771 )
2777 2772 if url.startswith(b'remote:http:') or url.startswith(b'remote:https:'):
2778 2773 captureoutput = True
2779 2774 try:
2780 2775 # note: outside bundle1, 'heads' is expected to be empty and this
2781 2776 # 'check_heads' call wil be a no-op
2782 2777 check_heads(repo, heads, b'uploading changes')
2783 2778 # push can proceed
2784 2779 if not isinstance(cg, bundle2.unbundle20):
2785 2780 # legacy case: bundle1 (changegroup 01)
2786 2781 txnname = b"\n".join([source, util.hidepassword(url)])
2787 2782 with repo.lock(), repo.transaction(txnname) as tr:
2788 2783 op = bundle2.applybundle(repo, cg, tr, source, url)
2789 2784 r = bundle2.combinechangegroupresults(op)
2790 2785 else:
2791 2786 r = None
2792 2787 try:
2793 2788
2794 2789 def gettransaction():
2795 2790 if not lockandtr[2]:
2796 2791 if not bookmod.bookmarksinstore(repo):
2797 2792 lockandtr[0] = repo.wlock()
2798 2793 lockandtr[1] = repo.lock()
2799 2794 lockandtr[2] = repo.transaction(source)
2800 2795 lockandtr[2].hookargs[b'source'] = source
2801 2796 lockandtr[2].hookargs[b'url'] = url
2802 2797 lockandtr[2].hookargs[b'bundle2'] = b'1'
2803 2798 return lockandtr[2]
2804 2799
2805 2800 # Do greedy locking by default until we're satisfied with lazy
2806 2801 # locking.
2807 2802 if not repo.ui.configbool(
2808 2803 b'experimental', b'bundle2lazylocking'
2809 2804 ):
2810 2805 gettransaction()
2811 2806
2812 2807 op = bundle2.bundleoperation(
2813 2808 repo,
2814 2809 gettransaction,
2815 2810 captureoutput=captureoutput,
2816 2811 source=b'push',
2817 2812 )
2818 2813 try:
2819 2814 op = bundle2.processbundle(repo, cg, op=op)
2820 2815 finally:
2821 2816 r = op.reply
2822 2817 if captureoutput and r is not None:
2823 2818 repo.ui.pushbuffer(error=True, subproc=True)
2824 2819
2825 2820 def recordout(output):
2826 2821 r.newpart(b'output', data=output, mandatory=False)
2827 2822
2828 2823 if lockandtr[2] is not None:
2829 2824 lockandtr[2].close()
2830 2825 except BaseException as exc:
2831 2826 exc.duringunbundle2 = True
2832 2827 if captureoutput and r is not None:
2833 2828 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2834 2829
2835 2830 def recordout(output):
2836 2831 part = bundle2.bundlepart(
2837 2832 b'output', data=output, mandatory=False
2838 2833 )
2839 2834 parts.append(part)
2840 2835
2841 2836 raise
2842 2837 finally:
2843 2838 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2844 2839 if recordout is not None:
2845 2840 recordout(repo.ui.popbuffer())
2846 2841 return r
2847 2842
2848 2843
2849 2844 def _maybeapplyclonebundle(pullop):
2850 2845 """Apply a clone bundle from a remote, if possible."""
2851 2846
2852 2847 repo = pullop.repo
2853 2848 remote = pullop.remote
2854 2849
2855 2850 if not repo.ui.configbool(b'ui', b'clonebundles'):
2856 2851 return
2857 2852
2858 2853 # Only run if local repo is empty.
2859 2854 if len(repo):
2860 2855 return
2861 2856
2862 2857 if pullop.heads:
2863 2858 return
2864 2859
2865 2860 if not remote.capable(b'clonebundles'):
2866 2861 return
2867 2862
2868 2863 with remote.commandexecutor() as e:
2869 2864 res = e.callcommand(b'clonebundles', {}).result()
2870 2865
2871 2866 # If we call the wire protocol command, that's good enough to record the
2872 2867 # attempt.
2873 2868 pullop.clonebundleattempted = True
2874 2869
2875 2870 entries = parseclonebundlesmanifest(repo, res)
2876 2871 if not entries:
2877 2872 repo.ui.note(
2878 2873 _(
2879 2874 b'no clone bundles available on remote; '
2880 2875 b'falling back to regular clone\n'
2881 2876 )
2882 2877 )
2883 2878 return
2884 2879
2885 2880 entries = filterclonebundleentries(
2886 2881 repo, entries, streamclonerequested=pullop.streamclonerequested
2887 2882 )
2888 2883
2889 2884 if not entries:
2890 2885 # There is a thundering herd concern here. However, if a server
2891 2886 # operator doesn't advertise bundles appropriate for its clients,
2892 2887 # they deserve what's coming. Furthermore, from a client's
2893 2888 # perspective, no automatic fallback would mean not being able to
2894 2889 # clone!
2895 2890 repo.ui.warn(
2896 2891 _(
2897 2892 b'no compatible clone bundles available on server; '
2898 2893 b'falling back to regular clone\n'
2899 2894 )
2900 2895 )
2901 2896 repo.ui.warn(
2902 2897 _(b'(you may want to report this to the server operator)\n')
2903 2898 )
2904 2899 return
2905 2900
2906 2901 entries = sortclonebundleentries(repo.ui, entries)
2907 2902
2908 2903 url = entries[0][b'URL']
2909 2904 repo.ui.status(_(b'applying clone bundle from %s\n') % url)
2910 2905 if trypullbundlefromurl(repo.ui, repo, url):
2911 2906 repo.ui.status(_(b'finished applying clone bundle\n'))
2912 2907 # Bundle failed.
2913 2908 #
2914 2909 # We abort by default to avoid the thundering herd of
2915 2910 # clients flooding a server that was expecting expensive
2916 2911 # clone load to be offloaded.
2917 2912 elif repo.ui.configbool(b'ui', b'clonebundlefallback'):
2918 2913 repo.ui.warn(_(b'falling back to normal clone\n'))
2919 2914 else:
2920 2915 raise error.Abort(
2921 2916 _(b'error applying bundle'),
2922 2917 hint=_(
2923 2918 b'if this error persists, consider contacting '
2924 2919 b'the server operator or disable clone '
2925 2920 b'bundles via '
2926 2921 b'"--config ui.clonebundles=false"'
2927 2922 ),
2928 2923 )
2929 2924
2930 2925
2931 2926 def parseclonebundlesmanifest(repo, s):
2932 2927 """Parses the raw text of a clone bundles manifest.
2933 2928
2934 2929 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2935 2930 to the URL and other keys are the attributes for the entry.
2936 2931 """
2937 2932 m = []
2938 2933 for line in s.splitlines():
2939 2934 fields = line.split()
2940 2935 if not fields:
2941 2936 continue
2942 2937 attrs = {b'URL': fields[0]}
2943 2938 for rawattr in fields[1:]:
2944 2939 key, value = rawattr.split(b'=', 1)
2945 2940 key = urlreq.unquote(key)
2946 2941 value = urlreq.unquote(value)
2947 2942 attrs[key] = value
2948 2943
2949 2944 # Parse BUNDLESPEC into components. This makes client-side
2950 2945 # preferences easier to specify since you can prefer a single
2951 2946 # component of the BUNDLESPEC.
2952 2947 if key == b'BUNDLESPEC':
2953 2948 try:
2954 2949 bundlespec = parsebundlespec(repo, value)
2955 2950 attrs[b'COMPRESSION'] = bundlespec.compression
2956 2951 attrs[b'VERSION'] = bundlespec.version
2957 2952 except error.InvalidBundleSpecification:
2958 2953 pass
2959 2954 except error.UnsupportedBundleSpecification:
2960 2955 pass
2961 2956
2962 2957 m.append(attrs)
2963 2958
2964 2959 return m
2965 2960
2966 2961
2967 2962 def isstreamclonespec(bundlespec):
2968 2963 # Stream clone v1
2969 2964 if bundlespec.wirecompression == b'UN' and bundlespec.wireversion == b's1':
2970 2965 return True
2971 2966
2972 2967 # Stream clone v2
2973 2968 if (
2974 2969 bundlespec.wirecompression == b'UN'
2975 2970 and bundlespec.wireversion == b'02'
2976 2971 and bundlespec.contentopts.get(b'streamv2')
2977 2972 ):
2978 2973 return True
2979 2974
2980 2975 return False
2981 2976
2982 2977
2983 2978 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2984 2979 """Remove incompatible clone bundle manifest entries.
2985 2980
2986 2981 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2987 2982 and returns a new list consisting of only the entries that this client
2988 2983 should be able to apply.
2989 2984
2990 2985 There is no guarantee we'll be able to apply all returned entries because
2991 2986 the metadata we use to filter on may be missing or wrong.
2992 2987 """
2993 2988 newentries = []
2994 2989 for entry in entries:
2995 2990 spec = entry.get(b'BUNDLESPEC')
2996 2991 if spec:
2997 2992 try:
2998 2993 bundlespec = parsebundlespec(repo, spec, strict=True)
2999 2994
3000 2995 # If a stream clone was requested, filter out non-streamclone
3001 2996 # entries.
3002 2997 if streamclonerequested and not isstreamclonespec(bundlespec):
3003 2998 repo.ui.debug(
3004 2999 b'filtering %s because not a stream clone\n'
3005 3000 % entry[b'URL']
3006 3001 )
3007 3002 continue
3008 3003
3009 3004 except error.InvalidBundleSpecification as e:
3010 3005 repo.ui.debug(stringutil.forcebytestr(e) + b'\n')
3011 3006 continue
3012 3007 except error.UnsupportedBundleSpecification as e:
3013 3008 repo.ui.debug(
3014 3009 b'filtering %s because unsupported bundle '
3015 3010 b'spec: %s\n' % (entry[b'URL'], stringutil.forcebytestr(e))
3016 3011 )
3017 3012 continue
3018 3013 # If we don't have a spec and requested a stream clone, we don't know
3019 3014 # what the entry is so don't attempt to apply it.
3020 3015 elif streamclonerequested:
3021 3016 repo.ui.debug(
3022 3017 b'filtering %s because cannot determine if a stream '
3023 3018 b'clone bundle\n' % entry[b'URL']
3024 3019 )
3025 3020 continue
3026 3021
3027 3022 if b'REQUIRESNI' in entry and not sslutil.hassni:
3028 3023 repo.ui.debug(
3029 3024 b'filtering %s because SNI not supported\n' % entry[b'URL']
3030 3025 )
3031 3026 continue
3032 3027
3033 3028 if b'REQUIREDRAM' in entry:
3034 3029 try:
3035 3030 requiredram = util.sizetoint(entry[b'REQUIREDRAM'])
3036 3031 except error.ParseError:
3037 3032 repo.ui.debug(
3038 3033 b'filtering %s due to a bad REQUIREDRAM attribute\n'
3039 3034 % entry[b'URL']
3040 3035 )
3041 3036 continue
3042 3037 actualram = repo.ui.estimatememory()
3043 3038 if actualram is not None and actualram * 0.66 < requiredram:
3044 3039 repo.ui.debug(
3045 3040 b'filtering %s as it needs more than 2/3 of system memory\n'
3046 3041 % entry[b'URL']
3047 3042 )
3048 3043 continue
3049 3044
3050 3045 newentries.append(entry)
3051 3046
3052 3047 return newentries
3053 3048
3054 3049
3055 3050 class clonebundleentry(object):
3056 3051 """Represents an item in a clone bundles manifest.
3057 3052
3058 3053 This rich class is needed to support sorting since sorted() in Python 3
3059 3054 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
3060 3055 won't work.
3061 3056 """
3062 3057
3063 3058 def __init__(self, value, prefers):
3064 3059 self.value = value
3065 3060 self.prefers = prefers
3066 3061
3067 3062 def _cmp(self, other):
3068 3063 for prefkey, prefvalue in self.prefers:
3069 3064 avalue = self.value.get(prefkey)
3070 3065 bvalue = other.value.get(prefkey)
3071 3066
3072 3067 # Special case for b missing attribute and a matches exactly.
3073 3068 if avalue is not None and bvalue is None and avalue == prefvalue:
3074 3069 return -1
3075 3070
3076 3071 # Special case for a missing attribute and b matches exactly.
3077 3072 if bvalue is not None and avalue is None and bvalue == prefvalue:
3078 3073 return 1
3079 3074
3080 3075 # We can't compare unless attribute present on both.
3081 3076 if avalue is None or bvalue is None:
3082 3077 continue
3083 3078
3084 3079 # Same values should fall back to next attribute.
3085 3080 if avalue == bvalue:
3086 3081 continue
3087 3082
3088 3083 # Exact matches come first.
3089 3084 if avalue == prefvalue:
3090 3085 return -1
3091 3086 if bvalue == prefvalue:
3092 3087 return 1
3093 3088
3094 3089 # Fall back to next attribute.
3095 3090 continue
3096 3091
3097 3092 # If we got here we couldn't sort by attributes and prefers. Fall
3098 3093 # back to index order.
3099 3094 return 0
3100 3095
3101 3096 def __lt__(self, other):
3102 3097 return self._cmp(other) < 0
3103 3098
3104 3099 def __gt__(self, other):
3105 3100 return self._cmp(other) > 0
3106 3101
3107 3102 def __eq__(self, other):
3108 3103 return self._cmp(other) == 0
3109 3104
3110 3105 def __le__(self, other):
3111 3106 return self._cmp(other) <= 0
3112 3107
3113 3108 def __ge__(self, other):
3114 3109 return self._cmp(other) >= 0
3115 3110
3116 3111 def __ne__(self, other):
3117 3112 return self._cmp(other) != 0
3118 3113
3119 3114
3120 3115 def sortclonebundleentries(ui, entries):
3121 3116 prefers = ui.configlist(b'ui', b'clonebundleprefers')
3122 3117 if not prefers:
3123 3118 return list(entries)
3124 3119
3125 3120 def _split(p):
3126 3121 if b'=' not in p:
3127 3122 hint = _(b"each comma separated item should be key=value pairs")
3128 3123 raise error.Abort(
3129 3124 _(b"invalid ui.clonebundleprefers item: %s") % p, hint=hint
3130 3125 )
3131 3126 return p.split(b'=', 1)
3132 3127
3133 3128 prefers = [_split(p) for p in prefers]
3134 3129
3135 3130 items = sorted(clonebundleentry(v, prefers) for v in entries)
3136 3131 return [i.value for i in items]
3137 3132
3138 3133
3139 3134 def trypullbundlefromurl(ui, repo, url):
3140 3135 """Attempt to apply a bundle from a URL."""
3141 3136 with repo.lock(), repo.transaction(b'bundleurl') as tr:
3142 3137 try:
3143 3138 fh = urlmod.open(ui, url)
3144 3139 cg = readbundle(ui, fh, b'stream')
3145 3140
3146 3141 if isinstance(cg, streamclone.streamcloneapplier):
3147 3142 cg.apply(repo)
3148 3143 else:
3149 3144 bundle2.applybundle(repo, cg, tr, b'clonebundles', url)
3150 3145 return True
3151 3146 except urlerr.httperror as e:
3152 3147 ui.warn(
3153 3148 _(b'HTTP error fetching bundle: %s\n')
3154 3149 % stringutil.forcebytestr(e)
3155 3150 )
3156 3151 except urlerr.urlerror as e:
3157 3152 ui.warn(
3158 3153 _(b'error fetching bundle: %s\n')
3159 3154 % stringutil.forcebytestr(e.reason)
3160 3155 )
3161 3156
3162 3157 return False
@@ -1,800 +1,798 b''
1 1 Test file dedicated to testing the divergent troubles from obsolete changeset.
2 2
3 3 This is the most complex troubles from far so we isolate it in a dedicated
4 4 file.
5 5
6 6 Enable obsolete
7 7
8 8 $ cat >> $HGRCPATH << EOF
9 9 > [ui]
10 10 > logtemplate = {rev}:{node|short} {desc}{if(obsfate, " [{join(obsfate, "; ")}]")}\n
11 11 > [experimental]
12 12 > evolution.createmarkers=True
13 13 > [extensions]
14 14 > drawdag=$TESTDIR/drawdag.py
15 15 > [alias]
16 16 > debugobsolete = debugobsolete -d '0 0'
17 17 > [phases]
18 18 > publish=False
19 19 > [templates]
20 20 > wuentryshort = '{instability}:{if(divergentnodes, " ")}{divergentnodes} {reason} {node|shortest}\n'
21 21 > whyunstableshort = '{whyunstable % wuentryshort}'
22 22 > wuentryshorter = '{instability}:{divergentnodes % " {node|shortest} ({phase})"} {reason} {node|shortest}\n'
23 23 > whyunstableshorter = '{whyunstable % wuentryshorter}'
24 24 > EOF
25 25
26 26
27 27 $ mkcommit() {
28 28 > echo "$1" > "$1"
29 29 > hg add "$1"
30 30 > hg ci -m "$1"
31 31 > }
32 32 $ getid() {
33 33 > hg log --hidden -r "desc('$1')" -T '{node}\n'
34 34 > }
35 35
36 36 setup repo
37 37
38 38 $ hg init reference
39 39 $ cd reference
40 40 $ mkcommit base
41 41 $ mkcommit A_0
42 42 $ hg up 0
43 43 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
44 44 $ mkcommit A_1
45 45 created new head
46 46 $ hg up 0
47 47 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
48 48 $ mkcommit A_2
49 49 created new head
50 50 $ hg up 0
51 51 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
52 52 $ cd ..
53 53
54 54
55 55 $ newcase() {
56 56 > hg clone -u 0 -q reference $1
57 57 > cd $1
58 58 > }
59 59
60 60 direct divergence
61 61 -----------------
62 62
63 63 A_1 have two direct and divergent successors A_1 and A_1
64 64
65 65 $ newcase direct
66 66 $ hg debugobsolete `getid A_0` `getid A_1`
67 67 1 new obsolescence markers
68 68 obsoleted 1 changesets
69 69 $ hg debugobsolete `getid A_0` `getid A_2`
70 70 1 new obsolescence markers
71 71 2 new content-divergent changesets
72 72 $ hg log -G --hidden
73 73 * 3:392fd25390da A_2
74 74 |
75 75 | * 2:82623d38b9ba A_1
76 76 |/
77 77 | x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
78 78 |/
79 79 @ 0:d20a80d4def3 base
80 80
81 81 $ hg debugsuccessorssets --hidden 'all()'
82 82 d20a80d4def3
83 83 d20a80d4def3
84 84 007dc284c1f8
85 85 82623d38b9ba
86 86 392fd25390da
87 87 82623d38b9ba
88 88 82623d38b9ba
89 89 392fd25390da
90 90 392fd25390da
91 91 $ hg log -r 'contentdivergent()'
92 92 2:82623d38b9ba A_1
93 93 3:392fd25390da A_2
94 94 $ hg log -r 'unstable()'
95 95 2:82623d38b9ba A_1
96 96 3:392fd25390da A_2
97 97 $ hg debugsuccessorssets 'all()' --closest
98 98 d20a80d4def3
99 99 d20a80d4def3
100 100 82623d38b9ba
101 101 82623d38b9ba
102 102 392fd25390da
103 103 392fd25390da
104 104 $ hg debugsuccessorssets 'all()' --closest --hidden
105 105 d20a80d4def3
106 106 d20a80d4def3
107 107 007dc284c1f8
108 108 82623d38b9ba
109 109 392fd25390da
110 110 82623d38b9ba
111 111 82623d38b9ba
112 112 392fd25390da
113 113 392fd25390da
114 114
115 115 check that mercurial refuse to push
116 116
117 117 $ hg init ../other
118 118 $ hg push ../other
119 119 pushing to ../other
120 120 searching for changes
121 abort: push includes unstable changesets:
122 82623d38b9ba (content-divergent)
123 392fd25390da (content-divergent)
121 abort: push includes content-divergent changeset: 392fd25390da!
124 122 [255]
125 123
126 124 $ cd ..
127 125
128 126
129 127 indirect divergence with known changeset
130 128 -------------------------------------------
131 129
132 130 $ newcase indirect_known
133 131 $ hg debugobsolete `getid A_0` `getid A_1`
134 132 1 new obsolescence markers
135 133 obsoleted 1 changesets
136 134 $ hg debugobsolete `getid A_0` `getid A_2`
137 135 1 new obsolescence markers
138 136 2 new content-divergent changesets
139 137 $ mkcommit A_3
140 138 created new head
141 139 $ hg debugobsolete `getid A_2` `getid A_3`
142 140 1 new obsolescence markers
143 141 obsoleted 1 changesets
144 142 $ hg log -G --hidden
145 143 @ 4:01f36c5a8fda A_3
146 144 |
147 145 | x 3:392fd25390da A_2 [rewritten as 4:01f36c5a8fda]
148 146 |/
149 147 | * 2:82623d38b9ba A_1
150 148 |/
151 149 | x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
152 150 |/
153 151 o 0:d20a80d4def3 base
154 152
155 153 $ hg debugsuccessorssets --hidden 'all()'
156 154 d20a80d4def3
157 155 d20a80d4def3
158 156 007dc284c1f8
159 157 82623d38b9ba
160 158 01f36c5a8fda
161 159 82623d38b9ba
162 160 82623d38b9ba
163 161 392fd25390da
164 162 01f36c5a8fda
165 163 01f36c5a8fda
166 164 01f36c5a8fda
167 165 $ hg log -r 'contentdivergent()'
168 166 2:82623d38b9ba A_1
169 167 4:01f36c5a8fda A_3
170 168 $ hg debugsuccessorssets 'all()' --closest
171 169 d20a80d4def3
172 170 d20a80d4def3
173 171 82623d38b9ba
174 172 82623d38b9ba
175 173 01f36c5a8fda
176 174 01f36c5a8fda
177 175 $ hg debugsuccessorssets 'all()' --closest --hidden
178 176 d20a80d4def3
179 177 d20a80d4def3
180 178 007dc284c1f8
181 179 82623d38b9ba
182 180 392fd25390da
183 181 82623d38b9ba
184 182 82623d38b9ba
185 183 392fd25390da
186 184 392fd25390da
187 185 01f36c5a8fda
188 186 01f36c5a8fda
189 187 $ cd ..
190 188
191 189
192 190 indirect divergence with known changeset
193 191 -------------------------------------------
194 192
195 193 $ newcase indirect_unknown
196 194 $ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
197 195 1 new obsolescence markers
198 196 obsoleted 1 changesets
199 197 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
200 198 1 new obsolescence markers
201 199 $ hg debugobsolete `getid A_0` `getid A_2`
202 200 1 new obsolescence markers
203 201 2 new content-divergent changesets
204 202 $ hg log -G --hidden
205 203 * 3:392fd25390da A_2
206 204 |
207 205 | * 2:82623d38b9ba A_1
208 206 |/
209 207 | x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
210 208 |/
211 209 @ 0:d20a80d4def3 base
212 210
213 211 $ hg debugsuccessorssets --hidden 'all()'
214 212 d20a80d4def3
215 213 d20a80d4def3
216 214 007dc284c1f8
217 215 82623d38b9ba
218 216 392fd25390da
219 217 82623d38b9ba
220 218 82623d38b9ba
221 219 392fd25390da
222 220 392fd25390da
223 221 $ hg log -r 'contentdivergent()'
224 222 2:82623d38b9ba A_1
225 223 3:392fd25390da A_2
226 224 $ hg debugsuccessorssets 'all()' --closest
227 225 d20a80d4def3
228 226 d20a80d4def3
229 227 82623d38b9ba
230 228 82623d38b9ba
231 229 392fd25390da
232 230 392fd25390da
233 231 $ hg debugsuccessorssets 'all()' --closest --hidden
234 232 d20a80d4def3
235 233 d20a80d4def3
236 234 007dc284c1f8
237 235 82623d38b9ba
238 236 392fd25390da
239 237 82623d38b9ba
240 238 82623d38b9ba
241 239 392fd25390da
242 240 392fd25390da
243 241 $ cd ..
244 242
245 243 do not take unknown node in account if they are final
246 244 -----------------------------------------------------
247 245
248 246 $ newcase final-unknown
249 247 $ hg debugobsolete `getid A_0` `getid A_1`
250 248 1 new obsolescence markers
251 249 obsoleted 1 changesets
252 250 $ hg debugobsolete `getid A_1` `getid A_2`
253 251 1 new obsolescence markers
254 252 obsoleted 1 changesets
255 253 $ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
256 254 1 new obsolescence markers
257 255 $ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc
258 256 1 new obsolescence markers
259 257 $ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd
260 258 1 new obsolescence markers
261 259
262 260 $ hg debugsuccessorssets --hidden 'desc('A_0')'
263 261 007dc284c1f8
264 262 392fd25390da
265 263 $ hg debugsuccessorssets 'desc('A_0')' --closest
266 264 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
267 265 007dc284c1f8
268 266 82623d38b9ba
269 267
270 268 $ cd ..
271 269
272 270 divergence that converge again is not divergence anymore
273 271 -----------------------------------------------------
274 272
275 273 $ newcase converged_divergence
276 274 $ hg debugobsolete `getid A_0` `getid A_1`
277 275 1 new obsolescence markers
278 276 obsoleted 1 changesets
279 277 $ hg debugobsolete `getid A_0` `getid A_2`
280 278 1 new obsolescence markers
281 279 2 new content-divergent changesets
282 280 $ mkcommit A_3
283 281 created new head
284 282 $ hg debugobsolete `getid A_1` `getid A_3`
285 283 1 new obsolescence markers
286 284 obsoleted 1 changesets
287 285 $ hg debugobsolete `getid A_2` `getid A_3`
288 286 1 new obsolescence markers
289 287 obsoleted 1 changesets
290 288 $ hg log -G --hidden
291 289 @ 4:01f36c5a8fda A_3
292 290 |
293 291 | x 3:392fd25390da A_2 [rewritten as 4:01f36c5a8fda]
294 292 |/
295 293 | x 2:82623d38b9ba A_1 [rewritten as 4:01f36c5a8fda]
296 294 |/
297 295 | x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
298 296 |/
299 297 o 0:d20a80d4def3 base
300 298
301 299 $ hg debugsuccessorssets --hidden 'all()'
302 300 d20a80d4def3
303 301 d20a80d4def3
304 302 007dc284c1f8
305 303 01f36c5a8fda
306 304 82623d38b9ba
307 305 01f36c5a8fda
308 306 392fd25390da
309 307 01f36c5a8fda
310 308 01f36c5a8fda
311 309 01f36c5a8fda
312 310 $ hg log -r 'contentdivergent()'
313 311 $ hg debugsuccessorssets 'all()' --closest
314 312 d20a80d4def3
315 313 d20a80d4def3
316 314 01f36c5a8fda
317 315 01f36c5a8fda
318 316 $ hg debugsuccessorssets 'all()' --closest --hidden
319 317 d20a80d4def3
320 318 d20a80d4def3
321 319 007dc284c1f8
322 320 82623d38b9ba
323 321 392fd25390da
324 322 82623d38b9ba
325 323 82623d38b9ba
326 324 392fd25390da
327 325 392fd25390da
328 326 01f36c5a8fda
329 327 01f36c5a8fda
330 328 $ cd ..
331 329
332 330 split is not divergences
333 331 -----------------------------
334 332
335 333 $ newcase split
336 334 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
337 335 1 new obsolescence markers
338 336 obsoleted 1 changesets
339 337 $ hg log -G --hidden
340 338 o 3:392fd25390da A_2
341 339 |
342 340 | o 2:82623d38b9ba A_1
343 341 |/
344 342 | x 1:007dc284c1f8 A_0 [split as 2:82623d38b9ba, 3:392fd25390da]
345 343 |/
346 344 @ 0:d20a80d4def3 base
347 345
348 346 $ hg debugsuccessorssets --hidden 'all()'
349 347 d20a80d4def3
350 348 d20a80d4def3
351 349 007dc284c1f8
352 350 82623d38b9ba 392fd25390da
353 351 82623d38b9ba
354 352 82623d38b9ba
355 353 392fd25390da
356 354 392fd25390da
357 355 $ hg log -r 'contentdivergent()'
358 356 $ hg debugsuccessorssets 'all()' --closest
359 357 d20a80d4def3
360 358 d20a80d4def3
361 359 82623d38b9ba
362 360 82623d38b9ba
363 361 392fd25390da
364 362 392fd25390da
365 363 $ hg debugsuccessorssets 'all()' --closest --hidden
366 364 d20a80d4def3
367 365 d20a80d4def3
368 366 007dc284c1f8
369 367 82623d38b9ba 392fd25390da
370 368 82623d38b9ba
371 369 82623d38b9ba
372 370 392fd25390da
373 371 392fd25390da
374 372
375 373 Even when subsequent rewriting happen
376 374
377 375 $ mkcommit A_3
378 376 created new head
379 377 $ hg debugobsolete `getid A_1` `getid A_3`
380 378 1 new obsolescence markers
381 379 obsoleted 1 changesets
382 380 $ hg up 0
383 381 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
384 382 $ mkcommit A_4
385 383 created new head
386 384 $ hg debugobsolete `getid A_2` `getid A_4`
387 385 1 new obsolescence markers
388 386 obsoleted 1 changesets
389 387 $ hg up 0
390 388 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
391 389 $ mkcommit A_5
392 390 created new head
393 391 $ hg debugobsolete `getid A_4` `getid A_5`
394 392 1 new obsolescence markers
395 393 obsoleted 1 changesets
396 394 $ hg log -G --hidden
397 395 @ 6:e442cfc57690 A_5
398 396 |
399 397 | x 5:6a411f0d7a0a A_4 [rewritten as 6:e442cfc57690]
400 398 |/
401 399 | o 4:01f36c5a8fda A_3
402 400 |/
403 401 | x 3:392fd25390da A_2 [rewritten as 5:6a411f0d7a0a]
404 402 |/
405 403 | x 2:82623d38b9ba A_1 [rewritten as 4:01f36c5a8fda]
406 404 |/
407 405 | x 1:007dc284c1f8 A_0 [split as 2:82623d38b9ba, 3:392fd25390da]
408 406 |/
409 407 o 0:d20a80d4def3 base
410 408
411 409 $ hg debugsuccessorssets --hidden 'all()'
412 410 d20a80d4def3
413 411 d20a80d4def3
414 412 007dc284c1f8
415 413 01f36c5a8fda e442cfc57690
416 414 82623d38b9ba
417 415 01f36c5a8fda
418 416 392fd25390da
419 417 e442cfc57690
420 418 01f36c5a8fda
421 419 01f36c5a8fda
422 420 6a411f0d7a0a
423 421 e442cfc57690
424 422 e442cfc57690
425 423 e442cfc57690
426 424 $ hg debugsuccessorssets 'all()' --closest
427 425 d20a80d4def3
428 426 d20a80d4def3
429 427 01f36c5a8fda
430 428 01f36c5a8fda
431 429 e442cfc57690
432 430 e442cfc57690
433 431 $ hg debugsuccessorssets 'all()' --closest --hidden
434 432 d20a80d4def3
435 433 d20a80d4def3
436 434 007dc284c1f8
437 435 82623d38b9ba 392fd25390da
438 436 82623d38b9ba
439 437 82623d38b9ba
440 438 392fd25390da
441 439 392fd25390da
442 440 01f36c5a8fda
443 441 01f36c5a8fda
444 442 6a411f0d7a0a
445 443 e442cfc57690
446 444 e442cfc57690
447 445 e442cfc57690
448 446 $ hg log -r 'contentdivergent()'
449 447
450 448 Check more complex obsolescence graft (with divergence)
451 449
452 450 $ mkcommit B_0; hg up 0
453 451 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
454 452 $ hg debugobsolete `getid B_0` `getid A_2`
455 453 1 new obsolescence markers
456 454 obsoleted 1 changesets
457 455 $ mkcommit A_7; hg up 0
458 456 created new head
459 457 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
460 458 $ mkcommit A_8; hg up 0
461 459 created new head
462 460 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
463 461 $ hg debugobsolete `getid A_5` `getid A_7` `getid A_8`
464 462 1 new obsolescence markers
465 463 obsoleted 1 changesets
466 464 $ mkcommit A_9; hg up 0
467 465 created new head
468 466 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
469 467 $ hg debugobsolete `getid A_5` `getid A_9`
470 468 1 new obsolescence markers
471 469 4 new content-divergent changesets
472 470 $ hg log -G --hidden
473 471 * 10:bed64f5d2f5a A_9
474 472 |
475 473 | * 9:14608b260df8 A_8
476 474 |/
477 475 | * 8:7ae126973a96 A_7
478 476 |/
479 477 | x 7:3750ebee865d B_0 [rewritten as 3:392fd25390da]
480 478 | |
481 479 | x 6:e442cfc57690 A_5 [rewritten as 10:bed64f5d2f5a; split as 8:7ae126973a96, 9:14608b260df8]
482 480 |/
483 481 | x 5:6a411f0d7a0a A_4 [rewritten as 6:e442cfc57690]
484 482 |/
485 483 | * 4:01f36c5a8fda A_3
486 484 |/
487 485 | x 3:392fd25390da A_2 [rewritten as 5:6a411f0d7a0a]
488 486 |/
489 487 | x 2:82623d38b9ba A_1 [rewritten as 4:01f36c5a8fda]
490 488 |/
491 489 | x 1:007dc284c1f8 A_0 [split as 2:82623d38b9ba, 3:392fd25390da]
492 490 |/
493 491 @ 0:d20a80d4def3 base
494 492
495 493 $ hg debugsuccessorssets --hidden 'all()'
496 494 d20a80d4def3
497 495 d20a80d4def3
498 496 007dc284c1f8
499 497 01f36c5a8fda bed64f5d2f5a
500 498 01f36c5a8fda 7ae126973a96 14608b260df8
501 499 82623d38b9ba
502 500 01f36c5a8fda
503 501 392fd25390da
504 502 bed64f5d2f5a
505 503 7ae126973a96 14608b260df8
506 504 01f36c5a8fda
507 505 01f36c5a8fda
508 506 6a411f0d7a0a
509 507 bed64f5d2f5a
510 508 7ae126973a96 14608b260df8
511 509 e442cfc57690
512 510 bed64f5d2f5a
513 511 7ae126973a96 14608b260df8
514 512 3750ebee865d
515 513 bed64f5d2f5a
516 514 7ae126973a96 14608b260df8
517 515 7ae126973a96
518 516 7ae126973a96
519 517 14608b260df8
520 518 14608b260df8
521 519 bed64f5d2f5a
522 520 bed64f5d2f5a
523 521 $ hg debugsuccessorssets 'all()' --closest
524 522 d20a80d4def3
525 523 d20a80d4def3
526 524 01f36c5a8fda
527 525 01f36c5a8fda
528 526 7ae126973a96
529 527 7ae126973a96
530 528 14608b260df8
531 529 14608b260df8
532 530 bed64f5d2f5a
533 531 bed64f5d2f5a
534 532 $ hg debugsuccessorssets 'all()' --closest --hidden
535 533 d20a80d4def3
536 534 d20a80d4def3
537 535 007dc284c1f8
538 536 82623d38b9ba 392fd25390da
539 537 82623d38b9ba
540 538 82623d38b9ba
541 539 392fd25390da
542 540 392fd25390da
543 541 01f36c5a8fda
544 542 01f36c5a8fda
545 543 6a411f0d7a0a
546 544 e442cfc57690
547 545 e442cfc57690
548 546 e442cfc57690
549 547 3750ebee865d
550 548 392fd25390da
551 549 7ae126973a96
552 550 7ae126973a96
553 551 14608b260df8
554 552 14608b260df8
555 553 bed64f5d2f5a
556 554 bed64f5d2f5a
557 555 $ hg log -r 'contentdivergent()'
558 556 4:01f36c5a8fda A_3
559 557 8:7ae126973a96 A_7
560 558 9:14608b260df8 A_8
561 559 10:bed64f5d2f5a A_9
562 560
563 561 $ hg log -r bed64f5d2f5a -T '{whyunstable}\n' | sort
564 562 content-divergent: 4:01f36c5a8fda (draft) 8:7ae126973a96 (draft) 9:14608b260df8 (draft) predecessor 007dc284c1f8
565 563 content-divergent: 8:7ae126973a96 (draft) 9:14608b260df8 (draft) predecessor e442cfc57690
566 564 $ hg log -r bed64f5d2f5a -T whyunstableshort | sort
567 565 content-divergent: 4:01f36c5a8fda (draft) 8:7ae126973a96 (draft) 9:14608b260df8 (draft) predecessor 007d
568 566 content-divergent: 8:7ae126973a96 (draft) 9:14608b260df8 (draft) predecessor e442
569 567 $ hg log -r bed64f5d2f5a -T whyunstableshorter | sort
570 568 content-divergent: 01f3 (draft) 7ae1 (draft) 1460 (draft) predecessor 007d
571 569 content-divergent: 7ae1 (draft) 1460 (draft) predecessor e442
572 570
573 571 fix the divergence
574 572
575 573 $ mkcommit A_A; hg up 0
576 574 created new head
577 575 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
578 576 $ hg debugobsolete `getid A_9` `getid A_A`
579 577 1 new obsolescence markers
580 578 obsoleted 1 changesets
581 579 $ hg debugobsolete `getid A_7` `getid A_A`
582 580 1 new obsolescence markers
583 581 obsoleted 1 changesets
584 582 $ hg debugobsolete `getid A_8` `getid A_A`
585 583 1 new obsolescence markers
586 584 obsoleted 1 changesets
587 585 $ hg log -G --hidden
588 586 o 11:a139f71be9da A_A
589 587 |
590 588 | x 10:bed64f5d2f5a A_9 [rewritten as 11:a139f71be9da]
591 589 |/
592 590 | x 9:14608b260df8 A_8 [rewritten as 11:a139f71be9da]
593 591 |/
594 592 | x 8:7ae126973a96 A_7 [rewritten as 11:a139f71be9da]
595 593 |/
596 594 | x 7:3750ebee865d B_0 [rewritten as 3:392fd25390da]
597 595 | |
598 596 | x 6:e442cfc57690 A_5 [rewritten as 10:bed64f5d2f5a; split as 8:7ae126973a96, 9:14608b260df8]
599 597 |/
600 598 | x 5:6a411f0d7a0a A_4 [rewritten as 6:e442cfc57690]
601 599 |/
602 600 | o 4:01f36c5a8fda A_3
603 601 |/
604 602 | x 3:392fd25390da A_2 [rewritten as 5:6a411f0d7a0a]
605 603 |/
606 604 | x 2:82623d38b9ba A_1 [rewritten as 4:01f36c5a8fda]
607 605 |/
608 606 | x 1:007dc284c1f8 A_0 [split as 2:82623d38b9ba, 3:392fd25390da]
609 607 |/
610 608 @ 0:d20a80d4def3 base
611 609
612 610 $ hg debugsuccessorssets --hidden 'all()'
613 611 d20a80d4def3
614 612 d20a80d4def3
615 613 007dc284c1f8
616 614 01f36c5a8fda a139f71be9da
617 615 82623d38b9ba
618 616 01f36c5a8fda
619 617 392fd25390da
620 618 a139f71be9da
621 619 01f36c5a8fda
622 620 01f36c5a8fda
623 621 6a411f0d7a0a
624 622 a139f71be9da
625 623 e442cfc57690
626 624 a139f71be9da
627 625 3750ebee865d
628 626 a139f71be9da
629 627 7ae126973a96
630 628 a139f71be9da
631 629 14608b260df8
632 630 a139f71be9da
633 631 bed64f5d2f5a
634 632 a139f71be9da
635 633 a139f71be9da
636 634 a139f71be9da
637 635 $ hg debugsuccessorssets 'all()' --closest
638 636 d20a80d4def3
639 637 d20a80d4def3
640 638 01f36c5a8fda
641 639 01f36c5a8fda
642 640 a139f71be9da
643 641 a139f71be9da
644 642 $ hg debugsuccessorssets 'all()' --closest --hidden
645 643 d20a80d4def3
646 644 d20a80d4def3
647 645 007dc284c1f8
648 646 82623d38b9ba 392fd25390da
649 647 82623d38b9ba
650 648 82623d38b9ba
651 649 392fd25390da
652 650 392fd25390da
653 651 01f36c5a8fda
654 652 01f36c5a8fda
655 653 6a411f0d7a0a
656 654 e442cfc57690
657 655 e442cfc57690
658 656 e442cfc57690
659 657 3750ebee865d
660 658 392fd25390da
661 659 7ae126973a96
662 660 a139f71be9da
663 661 14608b260df8
664 662 a139f71be9da
665 663 bed64f5d2f5a
666 664 a139f71be9da
667 665 a139f71be9da
668 666 a139f71be9da
669 667 $ hg log -r 'contentdivergent()'
670 668
671 669 #if serve
672 670
673 671 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid --config web.view=all \
674 672 > -A access.log -E errors.log
675 673 $ cat hg.pid >> $DAEMON_PIDS
676 674
677 675 check an obsolete changeset that was rewritten and also split
678 676
679 677 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=paper' | egrep 'rewritten|split'
680 678 <td>rewritten as <a href="/rev/bed64f5d2f5a?style=paper">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span><br>
681 679 split as <a href="/rev/7ae126973a96?style=paper">7ae126973a96</a> <a href="/rev/14608b260df8?style=paper">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
682 680 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=coal' | egrep 'rewritten|split'
683 681 <td>rewritten as <a href="/rev/bed64f5d2f5a?style=coal">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span><br>
684 682 split as <a href="/rev/7ae126973a96?style=coal">7ae126973a96</a> <a href="/rev/14608b260df8?style=coal">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
685 683 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=gitweb' | egrep 'rewritten|split'
686 684 <td>rewritten as <a class="list" href="/rev/bed64f5d2f5a?style=gitweb">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
687 685 <td>split as <a class="list" href="/rev/7ae126973a96?style=gitweb">7ae126973a96</a> <a class="list" href="/rev/14608b260df8?style=gitweb">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
688 686 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=monoblue' | egrep 'rewritten|split'
689 687 <dd>rewritten as <a href="/rev/bed64f5d2f5a?style=monoblue">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></dd>
690 688 <dd>split as <a href="/rev/7ae126973a96?style=monoblue">7ae126973a96</a> <a href="/rev/14608b260df8?style=monoblue">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></dd>
691 689 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=spartan' | egrep 'rewritten|split'
692 690 <td class="obsolete">rewritten as <a href="/rev/bed64f5d2f5a?style=spartan">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
693 691 <td class="obsolete">split as <a href="/rev/7ae126973a96?style=spartan">7ae126973a96</a> <a href="/rev/14608b260df8?style=spartan">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
694 692
695 693 $ killdaemons.py
696 694
697 695 #endif
698 696
699 697 $ cd ..
700 698
701 699
702 700 Subset does not diverge
703 701 ------------------------------
704 702
705 703 Do not report divergent successors-set if it is a subset of another
706 704 successors-set. (report [A,B] not [A] + [A,B])
707 705
708 706 $ newcase subset
709 707 $ hg debugobsolete `getid A_0` `getid A_2`
710 708 1 new obsolescence markers
711 709 obsoleted 1 changesets
712 710 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
713 711 1 new obsolescence markers
714 712 $ hg debugsuccessorssets --hidden 'desc('A_0')'
715 713 007dc284c1f8
716 714 82623d38b9ba 392fd25390da
717 715 $ hg debugsuccessorssets 'desc('A_0')' --closest
718 716 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
719 717 007dc284c1f8
720 718 82623d38b9ba 392fd25390da
721 719
722 720 $ cd ..
723 721
724 722 Use scmutil.cleanupnodes API to create divergence
725 723
726 724 $ hg init cleanupnodes
727 725 $ cd cleanupnodes
728 726 $ hg debugdrawdag <<'EOS'
729 727 > B1 B3 B4
730 728 > | \|
731 729 > A Z
732 730 > EOS
733 731
734 732 $ hg update -q B1
735 733 $ echo 3 >> B
736 734 $ hg commit --amend -m B2
737 735 $ cat > $TESTTMP/scmutilcleanup.py <<EOF
738 736 > from mercurial import registrar, scmutil
739 737 > cmdtable = {}
740 738 > command = registrar.command(cmdtable)
741 739 > @command(b'cleanup')
742 740 > def cleanup(ui, repo):
743 741 > def node(expr):
744 742 > unfi = repo.unfiltered()
745 743 > rev = unfi.revs(expr).first()
746 744 > return unfi.changelog.node(rev)
747 745 > with repo.wlock(), repo.lock(), repo.transaction(b'delayedstrip'):
748 746 > mapping = {node(b'desc(B1)'): [node(b'desc(B3)')],
749 747 > node(b'desc(B3)'): [node(b'desc(B4)')]}
750 748 > scmutil.cleanupnodes(repo, mapping, b'test')
751 749 > EOF
752 750
753 751 $ rm .hg/localtags
754 752 $ hg cleanup --config extensions.t=$TESTTMP/scmutilcleanup.py
755 753 2 new content-divergent changesets
756 754 $ hg log -G -T '{rev}:{node|short} {desc} {instabilities}' -r 'sort(all(), topo)'
757 755 @ 5:1a2a9b5b0030 B2 content-divergent
758 756 |
759 757 | * 4:70d5a63ca112 B4 content-divergent
760 758 | |
761 759 | o 1:48b9aae0607f Z
762 760 |
763 761 o 0:426bada5c675 A
764 762
765 763 $ hg debugobsolete
766 764 a178212c3433c4e77b573f6011e29affb8aefa33 1a2a9b5b0030632400aa78e00388c20f99d3ec44 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
767 765 a178212c3433c4e77b573f6011e29affb8aefa33 ad6478fb94ecec98b86daae98722865d494ac561 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'operation': 'test', 'user': 'test'}
768 766 ad6478fb94ecec98b86daae98722865d494ac561 70d5a63ca112acb3764bc1d7320ca90ea688d671 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '9', 'operation': 'test', 'user': 'test'}
769 767
770 768 $ hg debugwhyunstable 1a2a9b5b0030
771 769 content-divergent: 70d5a63ca112acb3764bc1d7320ca90ea688d671 (draft) predecessor a178212c3433c4e77b573f6011e29affb8aefa33
772 770
773 771 $ hg log -r 1a2a9b5b0030 -T '{whyunstable}\n'
774 772 content-divergent: 4:70d5a63ca112 (draft) predecessor a178212c3433
775 773 $ hg log -r 1a2a9b5b0030 -T whyunstableshort
776 774 content-divergent: 4:70d5a63ca112 (draft) predecessor a178
777 775 $ hg log -r 1a2a9b5b0030 -T whyunstableshorter
778 776 content-divergent: 70d5 (draft) predecessor a178
779 777
780 778 #if serve
781 779
782 780 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
783 781 $ cat hg.pid >> $DAEMON_PIDS
784 782
785 783 check explanation for a content-divergent changeset
786 784
787 785 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=paper' | grep divergent:
788 786 <td>content-divergent: <a href="/rev/70d5a63ca112?style=paper">70d5a63ca112</a> (draft) predecessor <a href="/rev/a178212c3433?style=paper">a178212c3433</a></td>
789 787 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=coal' | grep divergent:
790 788 <td>content-divergent: <a href="/rev/70d5a63ca112?style=coal">70d5a63ca112</a> (draft) predecessor <a href="/rev/a178212c3433?style=coal">a178212c3433</a></td>
791 789 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=gitweb' | grep divergent:
792 790 <td>content-divergent: <a class="list" href="/rev/70d5a63ca112?style=gitweb">70d5a63ca112</a> (draft) predecessor <a class="list" href="/rev/a178212c3433?style=gitweb">a178212c3433</a></td>
793 791 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=monoblue' | grep divergent:
794 792 <dd>content-divergent: <a href="/rev/70d5a63ca112?style=monoblue">70d5a63ca112</a> (draft) predecessor <a href="/rev/a178212c3433?style=monoblue">a178212c3433</a></dd>
795 793 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=spartan' | grep divergent:
796 794 <td class="unstable">content-divergent: <a href="/rev/70d5a63ca112?style=spartan">70d5a63ca112</a> (draft) predecessor <a href="/rev/a178212c3433?style=spartan">a178212c3433</a></td>
797 795
798 796 $ killdaemons.py
799 797
800 798 #endif
@@ -1,1825 +1,1792 b''
1 1 $ cat >> $HGRCPATH << EOF
2 2 > [phases]
3 3 > # public changeset are not obsolete
4 4 > publish=false
5 5 > [ui]
6 6 > logtemplate="{rev}:{node|short} ({phase}{if(obsolete, ' *{obsolete}*')}{if(instabilities, ' {instabilities}')}) [{tags} {bookmarks}] {desc|firstline}{if(obsfate, " [{join(obsfate, "; ")}]")}\n"
7 7 > EOF
8 8 $ mkcommit() {
9 9 > echo "$1" > "$1"
10 10 > hg add "$1"
11 11 > hg ci -m "add $1"
12 12 > }
13 13 $ getid() {
14 14 > hg log -T "{node}\n" --hidden -r "desc('$1')"
15 15 > }
16 16
17 17 $ cat > debugkeys.py <<EOF
18 18 > def reposetup(ui, repo):
19 19 > class debugkeysrepo(repo.__class__):
20 20 > def listkeys(self, namespace):
21 21 > ui.write(b'listkeys %s\n' % (namespace,))
22 22 > return super(debugkeysrepo, self).listkeys(namespace)
23 23 >
24 24 > if repo.local():
25 25 > repo.__class__ = debugkeysrepo
26 26 > EOF
27 27
28 28 $ hg init tmpa
29 29 $ cd tmpa
30 30 $ mkcommit kill_me
31 31
32 32 Checking that the feature is properly disabled
33 33
34 34 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
35 35 abort: creating obsolete markers is not enabled on this repo
36 36 [255]
37 37
38 38 Enabling it
39 39
40 40 $ cat >> $HGRCPATH << EOF
41 41 > [experimental]
42 42 > evolution=exchange
43 43 > evolution.createmarkers=True
44 44 > EOF
45 45
46 46 Killing a single changeset without replacement
47 47
48 48 $ hg debugobsolete 0
49 49 abort: changeset references must be full hexadecimal node identifiers
50 50 [255]
51 51 $ hg debugobsolete '00'
52 52 abort: changeset references must be full hexadecimal node identifiers
53 53 [255]
54 54 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
55 55 1 new obsolescence markers
56 56 obsoleted 1 changesets
57 57 $ hg debugobsolete
58 58 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'babar'}
59 59
60 60 (test that mercurial is not confused)
61 61
62 62 $ hg up null --quiet # having 0 as parent prevents it to be hidden
63 63 $ hg tip
64 64 -1:000000000000 (public) [tip ]
65 65 $ hg up --hidden tip --quiet
66 66 updated to hidden changeset 97b7c2d76b18
67 67 (hidden revision '97b7c2d76b18' is pruned)
68 68
69 69 Killing a single changeset with itself should fail
70 70 (simple local safeguard)
71 71
72 72 $ hg debugobsolete `getid kill_me` `getid kill_me`
73 73 abort: bad obsmarker input: in-marker cycle with 97b7c2d76b1845ed3eb988cd612611e72406cef0
74 74 [255]
75 75
76 76 $ cd ..
77 77
78 78 Killing a single changeset with replacement
79 79 (and testing the format option)
80 80
81 81 $ hg init tmpb
82 82 $ cd tmpb
83 83 $ mkcommit a
84 84 $ mkcommit b
85 85 $ mkcommit original_c
86 86 $ hg up "desc('b')"
87 87 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
88 88 $ mkcommit new_c
89 89 created new head
90 90 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
91 91 $ hg debugobsolete --config format.obsstore-version=0 --flag 12 `getid original_c` `getid new_c` -d '121 120'
92 92 1 new obsolescence markers
93 93 obsoleted 1 changesets
94 94 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
95 95 2:245bde4270cd add original_c
96 96 $ hg debugrevlog -cd
97 97 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
98 98 0 -1 -1 0 59 0 0 0 0 58 58 0 1 0
99 99 1 0 -1 59 118 59 59 0 0 58 116 0 1 0
100 100 2 1 -1 118 193 118 118 59 0 76 192 0 1 0
101 101 3 1 -1 193 260 193 193 59 0 66 258 0 2 0
102 102 $ hg debugobsolete
103 103 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
104 104
105 105 (check for version number of the obsstore)
106 106
107 107 $ dd bs=1 count=1 if=.hg/store/obsstore 2>/dev/null
108 108 \x00 (no-eol) (esc)
109 109
110 110 do it again (it read the obsstore before adding new changeset)
111 111
112 112 $ hg up '.^'
113 113 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
114 114 $ mkcommit new_2_c
115 115 created new head
116 116 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
117 117 1 new obsolescence markers
118 118 obsoleted 1 changesets
119 119 $ hg debugobsolete
120 120 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
121 121 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
122 122
123 123 Register two markers with a missing node
124 124
125 125 $ hg up '.^'
126 126 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
127 127 $ mkcommit new_3_c
128 128 created new head
129 129 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
130 130 1 new obsolescence markers
131 131 obsoleted 1 changesets
132 132 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
133 133 1 new obsolescence markers
134 134 $ hg debugobsolete
135 135 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
136 136 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
137 137 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
138 138 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
139 139
140 140 Test the --index option of debugobsolete command
141 141 $ hg debugobsolete --index
142 142 0 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
143 143 1 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
144 144 2 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
145 145 3 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
146 146
147 147 Refuse pathological nullid successors
148 148 $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000
149 149 transaction abort!
150 150 rollback completed
151 151 abort: bad obsolescence marker detected: invalid successors nullid
152 152 [255]
153 153
154 154 Check that graphlog detect that a changeset is obsolete:
155 155
156 156 $ hg log -G
157 157 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
158 158 |
159 159 o 1:7c3bad9141dc (draft) [ ] add b
160 160 |
161 161 o 0:1f0dee641bb7 (draft) [ ] add a
162 162
163 163
164 164 check that heads does not report them
165 165
166 166 $ hg heads
167 167 5:5601fb93a350 (draft) [tip ] add new_3_c
168 168 $ hg heads --hidden
169 169 5:5601fb93a350 (draft) [tip ] add new_3_c
170 170 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c [rewritten as 5:5601fb93a350]
171 171 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c [rewritten as 4:ca819180edb9]
172 172 2:245bde4270cd (draft *obsolete*) [ ] add original_c [rewritten as 3:cdbce2fbb163]
173 173
174 174
175 175 check that summary does not report them
176 176
177 177 $ hg init ../sink
178 178 $ echo '[paths]' >> .hg/hgrc
179 179 $ echo 'default=../sink' >> .hg/hgrc
180 180 $ hg summary --remote
181 181 parent: 5:5601fb93a350 tip
182 182 add new_3_c
183 183 branch: default
184 184 commit: (clean)
185 185 update: (current)
186 186 phases: 3 draft
187 187 remote: 3 outgoing
188 188
189 189 $ hg summary --remote --hidden
190 190 parent: 5:5601fb93a350 tip
191 191 add new_3_c
192 192 branch: default
193 193 commit: (clean)
194 194 update: 3 new changesets, 4 branch heads (merge)
195 195 phases: 6 draft
196 196 remote: 3 outgoing
197 197
198 198 check that various commands work well with filtering
199 199
200 200 $ hg tip
201 201 5:5601fb93a350 (draft) [tip ] add new_3_c
202 202 $ hg log -r 6
203 203 abort: unknown revision '6'!
204 204 [255]
205 205 $ hg log -r 4
206 206 abort: hidden revision '4' was rewritten as: 5601fb93a350!
207 207 (use --hidden to access hidden revisions)
208 208 [255]
209 209 $ hg debugrevspec 'rev(6)'
210 210 $ hg debugrevspec 'rev(4)'
211 211 $ hg debugrevspec 'null'
212 212 -1
213 213
214 214 Check that public changeset are not accounted as obsolete:
215 215
216 216 $ hg --hidden phase --public 2
217 217 1 new phase-divergent changesets
218 218 $ hg log -G
219 219 @ 5:5601fb93a350 (draft phase-divergent) [tip ] add new_3_c
220 220 |
221 221 | o 2:245bde4270cd (public) [ ] add original_c
222 222 |/
223 223 o 1:7c3bad9141dc (public) [ ] add b
224 224 |
225 225 o 0:1f0dee641bb7 (public) [ ] add a
226 226
227 227 $ hg log -r 'unstable()'
228 228 5:5601fb93a350 (draft phase-divergent) [tip ] add new_3_c
229 229
230 230
231 231 And that bumped changeset are detected
232 232 --------------------------------------
233 233
234 234 If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also
235 235 note that the bumped changeset (5:5601fb93a350) is not a direct successor of
236 236 the public changeset
237 237
238 238 $ hg log --hidden -r 'phasedivergent()'
239 239 5:5601fb93a350 (draft phase-divergent) [tip ] add new_3_c
240 240
241 241 And that we can't push bumped changeset
242 242
243 243 $ hg push ../tmpa -r 0 --force #(make repo related)
244 244 pushing to ../tmpa
245 245 searching for changes
246 246 warning: repository is unrelated
247 247 adding changesets
248 248 adding manifests
249 249 adding file changes
250 250 added 1 changesets with 1 changes to 1 files (+1 heads)
251 251 $ hg push ../tmpa
252 252 pushing to ../tmpa
253 253 searching for changes
254 abort: push includes unstable changesets:
255 5601fb93a350 (phase-divergent)
254 abort: push includes phase-divergent changeset: 5601fb93a350!
256 255 [255]
257 256
258 257 Fixing "bumped" situation
259 258 We need to create a clone of 5 and add a special marker with a flag
260 259
261 260 $ hg summary
262 261 parent: 5:5601fb93a350 tip (phase-divergent)
263 262 add new_3_c
264 263 branch: default
265 264 commit: (clean)
266 265 update: 1 new changesets, 2 branch heads (merge)
267 266 phases: 1 draft
268 267 phase-divergent: 1 changesets
269 268 $ hg up '5^'
270 269 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
271 270 $ hg revert -ar 5
272 271 adding new_3_c
273 272 $ hg ci -m 'add n3w_3_c'
274 273 created new head
275 274 $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
276 275 1 new obsolescence markers
277 276 obsoleted 1 changesets
278 277 $ hg log -r 'phasedivergent()'
279 278 $ hg log -G
280 279 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
281 280 |
282 281 | o 2:245bde4270cd (public) [ ] add original_c
283 282 |/
284 283 o 1:7c3bad9141dc (public) [ ] add b
285 284 |
286 285 o 0:1f0dee641bb7 (public) [ ] add a
287 286
288 287
289 288 Basic exclusive testing
290 289
291 290 $ hg log -G --hidden
292 291 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
293 292 |
294 293 | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c [rewritten as 6:6f9641995072]
295 294 |/
296 295 | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c [rewritten as 5:5601fb93a350]
297 296 |/
298 297 | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c [rewritten as 4:ca819180edb9]
299 298 |/
300 299 | o 2:245bde4270cd (public) [ ] add original_c
301 300 |/
302 301 o 1:7c3bad9141dc (public) [ ] add b
303 302 |
304 303 o 0:1f0dee641bb7 (public) [ ] add a
305 304
306 305 $ hg debugobsolete --rev 6f9641995072
307 306 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
308 307 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
309 308 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
310 309 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
311 310 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
312 311 $ hg debugobsolete --rev 6f9641995072 --exclusive
313 312 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
314 313 $ hg debugobsolete --rev 5601fb93a350 --hidden
315 314 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
316 315 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
317 316 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
318 317 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
319 318 $ hg debugobsolete --rev 5601fb93a350 --hidden --exclusive
320 319 $ hg debugobsolete --rev 5601fb93a350+6f9641995072 --hidden --exclusive
321 320 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
322 321 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
323 322 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
324 323
325 324 $ cd ..
326 325
327 326 Revision 0 is hidden
328 327 --------------------
329 328
330 329 $ hg init rev0hidden
331 330 $ cd rev0hidden
332 331
333 332 $ mkcommit kill0
334 333 $ hg up -q null
335 334 $ hg debugobsolete `getid kill0`
336 335 1 new obsolescence markers
337 336 obsoleted 1 changesets
338 337 $ mkcommit a
339 338 $ mkcommit b
340 339
341 340 Should pick the first visible revision as "repo" node
342 341
343 342 $ hg archive ../archive-null
344 343 $ cat ../archive-null/.hg_archival.txt
345 344 repo: 1f0dee641bb7258c56bd60e93edfa2405381c41e
346 345 node: 7c3bad9141dcb46ff89abf5f61856facd56e476c
347 346 branch: default
348 347 latesttag: null
349 348 latesttagdistance: 2
350 349 changessincelatesttag: 2
351 350
352 351
353 352 $ cd ..
354 353
355 354 Can disable transaction summary report
356 355
357 356 $ hg init transaction-summary
358 357 $ cd transaction-summary
359 358 $ mkcommit a
360 359 $ mkcommit b
361 360 $ hg up -q null
362 361 $ hg --config experimental.evolution.report-instabilities=false debugobsolete `getid a`
363 362 1 new obsolescence markers
364 363 obsoleted 1 changesets
365 364 $ cd ..
366 365
367 366 Exchange Test
368 367 ============================
369 368
370 369 Destination repo does not have any data
371 370 ---------------------------------------
372 371
373 372 Simple incoming test
374 373
375 374 $ hg init tmpc
376 375 $ cd tmpc
377 376 $ hg incoming ../tmpb
378 377 comparing with ../tmpb
379 378 0:1f0dee641bb7 (public) [ ] add a
380 379 1:7c3bad9141dc (public) [ ] add b
381 380 2:245bde4270cd (public) [ ] add original_c
382 381 6:6f9641995072 (draft) [tip ] add n3w_3_c
383 382
384 383 Try to pull markers while testing pull --confirm
385 384 (extinct changeset are excluded but marker are pushed)
386 385
387 386 $ hg pull ../tmpb --confirm --config ui.interactive=true <<EOF
388 387 > n
389 388 > EOF
390 389 pulling from ../tmpb
391 390 requesting all changes
392 391 adding changesets
393 392 adding manifests
394 393 adding file changes
395 394 adding 4 changesets with 4 changes to 4 files (+1 heads)
396 395 5 new obsolescence markers
397 396 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
398 397 accept incoming changes (yn)? n
399 398 transaction abort!
400 399 rollback completed
401 400 abort: user aborted
402 401 [255]
403 402 $ HGPLAIN=1 hg pull ../tmpb --confirm --config ui.interactive=true <<EOF
404 403 > n
405 404 > EOF
406 405 pulling from ../tmpb
407 406 requesting all changes
408 407 adding changesets
409 408 adding manifests
410 409 adding file changes
411 410 adding 4 changesets with 4 changes to 4 files (+1 heads)
412 411 5 new obsolescence markers
413 412 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
414 413 accept incoming changes (yn)? n
415 414 transaction abort!
416 415 rollback completed
417 416 abort: user aborted
418 417 [255]
419 418 $ hg pull ../tmpb --confirm --config ui.interactive=true <<EOF
420 419 > y
421 420 > EOF
422 421 pulling from ../tmpb
423 422 requesting all changes
424 423 adding changesets
425 424 adding manifests
426 425 adding file changes
427 426 adding 4 changesets with 4 changes to 4 files (+1 heads)
428 427 5 new obsolescence markers
429 428 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
430 429 accept incoming changes (yn)? y
431 430 added 4 changesets with 4 changes to 4 files (+1 heads)
432 431 5 new obsolescence markers
433 432 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
434 433 (run 'hg heads' to see heads, 'hg merge' to merge)
435 434 $ hg debugobsolete
436 435 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
437 436 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
438 437 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
439 438 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
440 439 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
441 440
442 441 Rollback//Transaction support
443 442
444 443 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
445 444 1 new obsolescence markers
446 445 $ hg debugobsolete
447 446 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
448 447 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
449 448 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
450 449 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
451 450 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
452 451 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 (Thu Jan 01 00:22:20 1970 +0000) {'user': 'test'}
453 452 $ hg rollback -n
454 453 repository tip rolled back to revision 3 (undo debugobsolete)
455 454 $ hg rollback
456 455 repository tip rolled back to revision 3 (undo debugobsolete)
457 456 $ hg debugobsolete
458 457 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
459 458 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
460 459 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
461 460 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
462 461 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
463 462
464 463 $ cd ..
465 464
466 465 Try to push markers
467 466
468 467 $ hg init tmpd
469 468 $ hg -R tmpb push tmpd
470 469 pushing to tmpd
471 470 searching for changes
472 471 adding changesets
473 472 adding manifests
474 473 adding file changes
475 474 added 4 changesets with 4 changes to 4 files (+1 heads)
476 475 5 new obsolescence markers
477 476 $ hg -R tmpd debugobsolete | sort
478 477 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
479 478 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
480 479 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
481 480 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
482 481 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
483 482
484 483 Check obsolete keys are exchanged only if source has an obsolete store
485 484
486 485 $ hg init empty
487 486 $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd
488 487 pushing to tmpd
489 488 listkeys phases
490 489 listkeys bookmarks
491 490 no changes found
492 491 listkeys phases
493 492 [1]
494 493
495 494 clone support
496 495 (markers are copied and extinct changesets are included to allow hardlinks)
497 496
498 497 $ hg clone tmpb clone-dest
499 498 updating to branch default
500 499 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
501 500 $ hg -R clone-dest log -G --hidden
502 501 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
503 502 |
504 503 | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c [rewritten as 6:6f9641995072]
505 504 |/
506 505 | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c [rewritten as 5:5601fb93a350]
507 506 |/
508 507 | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c [rewritten as 4:ca819180edb9]
509 508 |/
510 509 | o 2:245bde4270cd (public) [ ] add original_c
511 510 |/
512 511 o 1:7c3bad9141dc (public) [ ] add b
513 512 |
514 513 o 0:1f0dee641bb7 (public) [ ] add a
515 514
516 515 $ hg -R clone-dest debugobsolete
517 516 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
518 517 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
519 518 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
520 519 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
521 520 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
522 521
523 522
524 523 Destination repo have existing data
525 524 ---------------------------------------
526 525
527 526 On pull
528 527
529 528 $ hg init tmpe
530 529 $ cd tmpe
531 530 $ hg debugobsolete -d '1339 0' 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00
532 531 1 new obsolescence markers
533 532 $ hg pull ../tmpb
534 533 pulling from ../tmpb
535 534 requesting all changes
536 535 adding changesets
537 536 adding manifests
538 537 adding file changes
539 538 added 4 changesets with 4 changes to 4 files (+1 heads)
540 539 5 new obsolescence markers
541 540 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
542 541 (run 'hg heads' to see heads, 'hg merge' to merge)
543 542 $ hg debugobsolete
544 543 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
545 544 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
546 545 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
547 546 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
548 547 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
549 548 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
550 549
551 550
552 551 On push
553 552
554 553 $ hg push ../tmpc
555 554 pushing to ../tmpc
556 555 searching for changes
557 556 no changes found
558 557 1 new obsolescence markers
559 558 [1]
560 559 $ hg -R ../tmpc debugobsolete
561 560 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
562 561 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
563 562 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
564 563 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
565 564 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
566 565 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
567 566
568 567 detect outgoing obsolete and unstable
569 568 ---------------------------------------
570 569
571 570
572 571 $ hg log -G
573 572 o 3:6f9641995072 (draft) [tip ] add n3w_3_c
574 573 |
575 574 | o 2:245bde4270cd (public) [ ] add original_c
576 575 |/
577 576 o 1:7c3bad9141dc (public) [ ] add b
578 577 |
579 578 o 0:1f0dee641bb7 (public) [ ] add a
580 579
581 580 $ hg up 'desc("n3w_3_c")'
582 581 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
583 582 $ mkcommit original_d
584 583 $ mkcommit original_e
585 584 $ hg debugobsolete --record-parents `getid original_d` -d '0 0'
586 585 1 new obsolescence markers
587 586 obsoleted 1 changesets
588 587 1 new orphan changesets
589 588 $ hg log -r 'unstable()'
590 589 5:cda648ca50f5 (draft orphan) [tip ] add original_e
591 590 $ hg debugobsolete | grep `getid original_d`
592 591 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
593 592 $ hg log -r 'obsolete()'
594 593 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
595 594 $ hg summary
596 595 parent: 5:cda648ca50f5 tip (orphan)
597 596 add original_e
598 597 branch: default
599 598 commit: (clean)
600 599 update: 1 new changesets, 2 branch heads (merge)
601 600 phases: 3 draft
602 601 orphan: 1 changesets
603 602 $ hg log -G -r '::orphan()'
604 603 @ 5:cda648ca50f5 (draft orphan) [tip ] add original_e
605 604 |
606 605 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
607 606 |
608 607 o 3:6f9641995072 (draft) [ ] add n3w_3_c
609 608 |
610 609 o 1:7c3bad9141dc (public) [ ] add b
611 610 |
612 611 o 0:1f0dee641bb7 (public) [ ] add a
613 612
614 613
615 614 refuse to push obsolete changeset
616 615
617 616 $ hg push ../tmpc/ -r 'desc("original_d")'
618 617 pushing to ../tmpc/
619 618 searching for changes
620 abort: push includes obsolete changesets:
621 94b33453f93b
619 abort: push includes obsolete changeset: 94b33453f93b!
622 620 [255]
623 621
624 622 refuse to push unstable changeset
625 623
626 624 $ hg push ../tmpc/
627 625 pushing to ../tmpc/
628 626 searching for changes
629 abort: push includes obsolete changesets:
630 94b33453f93b
631 push includes unstable changesets:
632 cda648ca50f5 (orphan)
627 abort: push includes orphan changeset: cda648ca50f5!
633 628 [255]
634 629
635 630 with --force it will work anyway
636 631
637 632 $ hg push ../tmpc/ --force
638 633 pushing to ../tmpc/
639 634 searching for changes
640 635 adding changesets
641 636 adding manifests
642 637 adding file changes
643 638 added 2 changesets with 2 changes to 2 files
644 639 1 new obsolescence markers
645 640 1 new orphan changesets
646 641
647 642 if the orphan changeset is already on the server, pushing should work
648 643
649 644 $ hg push ../tmpc/
650 645 pushing to ../tmpc/
651 646 searching for changes
652 647 no changes found
653 648 [1]
654 649
655 pushing should work even if the outgoing changes contain an unrelated changeset
656 (neither obsolete nor unstable) (issue6372)
657
658 $ hg up 1 -q
659 $ hg branch new -q
660 $ mkcommit c
661
662 $ hg push ../tmpc/ --new-branch
663 pushing to ../tmpc/
664 searching for changes
665 adding changesets
666 adding manifests
667 adding file changes
668 added 1 changesets with 1 changes to 1 files (+1 heads)
669
670 make later tests work unmodified
671
672 $ hg --config extensions.strip= strip tip -q
673 $ hg up 5 -q
674
675 650 Test that extinct changeset are properly detected
676 651
677 652 $ hg log -r 'extinct()'
678 653
679 654 Don't try to push extinct changeset
680 655
681 656 $ hg init ../tmpf
682 657 $ hg out ../tmpf
683 658 comparing with ../tmpf
684 659 searching for changes
685 660 0:1f0dee641bb7 (public) [ ] add a
686 661 1:7c3bad9141dc (public) [ ] add b
687 662 2:245bde4270cd (public) [ ] add original_c
688 663 3:6f9641995072 (draft) [ ] add n3w_3_c
689 664 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
690 665 5:cda648ca50f5 (draft orphan) [tip ] add original_e
691 666 $ hg push ../tmpf -f # -f because be push unstable too
692 667 pushing to ../tmpf
693 668 searching for changes
694 669 adding changesets
695 670 adding manifests
696 671 adding file changes
697 672 added 6 changesets with 6 changes to 6 files (+1 heads)
698 673 7 new obsolescence markers
699 674 1 new orphan changesets
700 675
701 676 no warning displayed
702 677
703 678 $ hg push ../tmpf
704 679 pushing to ../tmpf
705 680 searching for changes
706 681 no changes found
707 682 [1]
708 683
709 684 Do not warn about new head when the new head is a successors of a remote one
710 685
711 686 $ hg log -G
712 687 @ 5:cda648ca50f5 (draft orphan) [tip ] add original_e
713 688 |
714 689 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
715 690 |
716 691 o 3:6f9641995072 (draft) [ ] add n3w_3_c
717 692 |
718 693 | o 2:245bde4270cd (public) [ ] add original_c
719 694 |/
720 695 o 1:7c3bad9141dc (public) [ ] add b
721 696 |
722 697 o 0:1f0dee641bb7 (public) [ ] add a
723 698
724 699 $ hg up -q 'desc(n3w_3_c)'
725 700 $ mkcommit obsolete_e
726 701 created new head
727 702 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'` \
728 703 > -u 'test <test@example.net>'
729 704 1 new obsolescence markers
730 705 obsoleted 1 changesets
731 706 $ hg outgoing ../tmpf # parasite hg outgoing testin
732 707 comparing with ../tmpf
733 708 searching for changes
734 709 6:3de5eca88c00 (draft) [tip ] add obsolete_e
735 710 $ hg push ../tmpf
736 711 pushing to ../tmpf
737 712 searching for changes
738 713 adding changesets
739 714 adding manifests
740 715 adding file changes
741 716 added 1 changesets with 1 changes to 1 files (+1 heads)
742 717 1 new obsolescence markers
743 718 obsoleted 1 changesets
744 719
745 720 test relevance computation
746 721 ---------------------------------------
747 722
748 723 Checking simple case of "marker relevance".
749 724
750 725
751 726 Reminder of the repo situation
752 727
753 728 $ hg log --hidden --graph
754 729 @ 6:3de5eca88c00 (draft) [tip ] add obsolete_e
755 730 |
756 731 | x 5:cda648ca50f5 (draft *obsolete*) [ ] add original_e [rewritten as 6:3de5eca88c00 by test <test@example.net>]
757 732 | |
758 733 | x 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
759 734 |/
760 735 o 3:6f9641995072 (draft) [ ] add n3w_3_c
761 736 |
762 737 | o 2:245bde4270cd (public) [ ] add original_c
763 738 |/
764 739 o 1:7c3bad9141dc (public) [ ] add b
765 740 |
766 741 o 0:1f0dee641bb7 (public) [ ] add a
767 742
768 743
769 744 List of all markers
770 745
771 746 $ hg debugobsolete
772 747 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
773 748 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
774 749 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
775 750 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
776 751 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
777 752 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
778 753 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
779 754 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
780 755
781 756 List of changesets with no chain
782 757
783 758 $ hg debugobsolete --hidden --rev ::2
784 759
785 760 List of changesets that are included on marker chain
786 761
787 762 $ hg debugobsolete --hidden --rev 6
788 763 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
789 764
790 765 List of changesets with a longer chain, (including a pruned children)
791 766
792 767 $ hg debugobsolete --hidden --rev 3
793 768 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
794 769 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
795 770 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
796 771 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
797 772 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
798 773 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
799 774 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
800 775
801 776 List of both
802 777
803 778 $ hg debugobsolete --hidden --rev 3::6
804 779 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
805 780 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
806 781 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
807 782 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
808 783 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
809 784 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
810 785 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
811 786 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
812 787
813 788 List of all markers in JSON
814 789
815 790 $ hg debugobsolete -Tjson
816 791 [
817 792 {
818 793 "date": [1339, 0],
819 794 "flag": 0,
820 795 "metadata": {"user": "test"},
821 796 "prednode": "1339133913391339133913391339133913391339",
822 797 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
823 798 },
824 799 {
825 800 "date": [1339, 0],
826 801 "flag": 0,
827 802 "metadata": {"user": "test"},
828 803 "prednode": "1337133713371337133713371337133713371337",
829 804 "succnodes": ["5601fb93a350734d935195fee37f4054c529ff39"]
830 805 },
831 806 {
832 807 "date": [121, 120],
833 808 "flag": 12,
834 809 "metadata": {"user": "test"},
835 810 "prednode": "245bde4270cd1072a27757984f9cda8ba26f08ca",
836 811 "succnodes": ["cdbce2fbb16313928851e97e0d85413f3f7eb77f"]
837 812 },
838 813 {
839 814 "date": [1338, 0],
840 815 "flag": 1,
841 816 "metadata": {"user": "test"},
842 817 "prednode": "5601fb93a350734d935195fee37f4054c529ff39",
843 818 "succnodes": ["6f96419950729f3671185b847352890f074f7557"]
844 819 },
845 820 {
846 821 "date": [1338, 0],
847 822 "flag": 0,
848 823 "metadata": {"user": "test"},
849 824 "prednode": "ca819180edb99ed25ceafb3e9584ac287e240b00",
850 825 "succnodes": ["1337133713371337133713371337133713371337"]
851 826 },
852 827 {
853 828 "date": [1337, 0],
854 829 "flag": 0,
855 830 "metadata": {"user": "test"},
856 831 "prednode": "cdbce2fbb16313928851e97e0d85413f3f7eb77f",
857 832 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
858 833 },
859 834 {
860 835 "date": [0, 0],
861 836 "flag": 0,
862 837 "metadata": {"user": "test"},
863 838 "parentnodes": ["6f96419950729f3671185b847352890f074f7557"],
864 839 "prednode": "94b33453f93bdb8d457ef9b770851a618bf413e1",
865 840 "succnodes": []
866 841 },
867 842 {
868 843 "date": *, (glob)
869 844 "flag": 0,
870 845 "metadata": {"user": "test <test@example.net>"},
871 846 "prednode": "cda648ca50f50482b7055c0b0c4c117bba6733d9",
872 847 "succnodes": ["3de5eca88c00aa039da7399a220f4a5221faa585"]
873 848 }
874 849 ]
875 850
876 851 Template keywords
877 852
878 853 $ hg debugobsolete -r6 -T '{succnodes % "{node|short}"} {date|shortdate}\n'
879 854 3de5eca88c00 ????-??-?? (glob)
880 855 $ hg debugobsolete -r6 -T '{join(metadata % "{key}={value}", " ")}\n'
881 856 user=test <test@example.net>
882 857 $ hg debugobsolete -r6 -T '{metadata}\n{metadata}\n'
883 858 'user': 'test <test@example.net>'
884 859 'user': 'test <test@example.net>'
885 860 $ hg debugobsolete -r6 -T '{succnodes}\n{succnodes}\n'
886 861 3de5eca88c00aa039da7399a220f4a5221faa585
887 862 3de5eca88c00aa039da7399a220f4a5221faa585
888 863 $ hg debugobsolete -r6 -T '{flag} {get(metadata, "user")}\n'
889 864 0 test <test@example.net>
890 865
891 866 Test the debug output for exchange
892 867 ----------------------------------
893 868
894 869 $ hg pull ../tmpb --config 'experimental.obsmarkers-exchange-debug=True' # bundle2
895 870 pulling from ../tmpb
896 871 searching for changes
897 872 no changes found
898 873 obsmarker-exchange: 346 bytes received
899 874
900 875 check hgweb does not explode
901 876 ====================================
902 877
903 878 $ hg unbundle $TESTDIR/bundles/hgweb+obs.hg
904 879 adding changesets
905 880 adding manifests
906 881 adding file changes
907 882 added 62 changesets with 63 changes to 9 files (+60 heads)
908 883 new changesets 50c51b361e60:c15e9edfca13 (62 drafts)
909 884 (2 other changesets obsolete on arrival)
910 885 (run 'hg heads .' to see heads, 'hg merge' to merge)
911 886 $ for node in `hg log -r 'desc(babar_)' --template '{node}\n'`;
912 887 > do
913 888 > hg debugobsolete $node
914 889 > done
915 890 1 new obsolescence markers
916 891 obsoleted 1 changesets
917 892 1 new obsolescence markers
918 893 obsoleted 1 changesets
919 894 1 new obsolescence markers
920 895 obsoleted 1 changesets
921 896 1 new obsolescence markers
922 897 obsoleted 1 changesets
923 898 1 new obsolescence markers
924 899 obsoleted 1 changesets
925 900 1 new obsolescence markers
926 901 obsoleted 1 changesets
927 902 1 new obsolescence markers
928 903 obsoleted 1 changesets
929 904 1 new obsolescence markers
930 905 obsoleted 1 changesets
931 906 1 new obsolescence markers
932 907 obsoleted 1 changesets
933 908 1 new obsolescence markers
934 909 obsoleted 1 changesets
935 910 1 new obsolescence markers
936 911 obsoleted 1 changesets
937 912 1 new obsolescence markers
938 913 obsoleted 1 changesets
939 914 1 new obsolescence markers
940 915 obsoleted 1 changesets
941 916 1 new obsolescence markers
942 917 obsoleted 1 changesets
943 918 1 new obsolescence markers
944 919 obsoleted 1 changesets
945 920 1 new obsolescence markers
946 921 obsoleted 1 changesets
947 922 1 new obsolescence markers
948 923 obsoleted 1 changesets
949 924 1 new obsolescence markers
950 925 obsoleted 1 changesets
951 926 1 new obsolescence markers
952 927 obsoleted 1 changesets
953 928 1 new obsolescence markers
954 929 obsoleted 1 changesets
955 930 1 new obsolescence markers
956 931 obsoleted 1 changesets
957 932 1 new obsolescence markers
958 933 obsoleted 1 changesets
959 934 1 new obsolescence markers
960 935 obsoleted 1 changesets
961 936 1 new obsolescence markers
962 937 obsoleted 1 changesets
963 938 1 new obsolescence markers
964 939 obsoleted 1 changesets
965 940 1 new obsolescence markers
966 941 obsoleted 1 changesets
967 942 1 new obsolescence markers
968 943 obsoleted 1 changesets
969 944 1 new obsolescence markers
970 945 obsoleted 1 changesets
971 946 1 new obsolescence markers
972 947 obsoleted 1 changesets
973 948 1 new obsolescence markers
974 949 obsoleted 1 changesets
975 950 1 new obsolescence markers
976 951 obsoleted 1 changesets
977 952 1 new obsolescence markers
978 953 obsoleted 1 changesets
979 954 1 new obsolescence markers
980 955 obsoleted 1 changesets
981 956 1 new obsolescence markers
982 957 obsoleted 1 changesets
983 958 1 new obsolescence markers
984 959 obsoleted 1 changesets
985 960 1 new obsolescence markers
986 961 obsoleted 1 changesets
987 962 1 new obsolescence markers
988 963 obsoleted 1 changesets
989 964 1 new obsolescence markers
990 965 obsoleted 1 changesets
991 966 1 new obsolescence markers
992 967 obsoleted 1 changesets
993 968 1 new obsolescence markers
994 969 obsoleted 1 changesets
995 970 1 new obsolescence markers
996 971 obsoleted 1 changesets
997 972 1 new obsolescence markers
998 973 obsoleted 1 changesets
999 974 1 new obsolescence markers
1000 975 obsoleted 1 changesets
1001 976 1 new obsolescence markers
1002 977 obsoleted 1 changesets
1003 978 1 new obsolescence markers
1004 979 obsoleted 1 changesets
1005 980 1 new obsolescence markers
1006 981 obsoleted 1 changesets
1007 982 1 new obsolescence markers
1008 983 obsoleted 1 changesets
1009 984 1 new obsolescence markers
1010 985 obsoleted 1 changesets
1011 986 1 new obsolescence markers
1012 987 obsoleted 1 changesets
1013 988 1 new obsolescence markers
1014 989 obsoleted 1 changesets
1015 990 1 new obsolescence markers
1016 991 obsoleted 1 changesets
1017 992 1 new obsolescence markers
1018 993 obsoleted 1 changesets
1019 994 1 new obsolescence markers
1020 995 obsoleted 1 changesets
1021 996 1 new obsolescence markers
1022 997 obsoleted 1 changesets
1023 998 1 new obsolescence markers
1024 999 obsoleted 1 changesets
1025 1000 1 new obsolescence markers
1026 1001 obsoleted 1 changesets
1027 1002 1 new obsolescence markers
1028 1003 obsoleted 1 changesets
1029 1004 1 new obsolescence markers
1030 1005 obsoleted 1 changesets
1031 1006 1 new obsolescence markers
1032 1007 obsoleted 1 changesets
1033 1008 1 new obsolescence markers
1034 1009 obsoleted 1 changesets
1035 1010 $ hg up tip
1036 1011 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1037 1012
1038 1013 #if serve
1039 1014
1040 1015 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1041 1016 $ cat hg.pid >> $DAEMON_PIDS
1042 1017
1043 1018 check changelog view
1044 1019
1045 1020 $ get-with-headers.py --headeronly localhost:$HGPORT 'shortlog/'
1046 1021 200 Script output follows
1047 1022
1048 1023 check graph view
1049 1024
1050 1025 $ get-with-headers.py --headeronly localhost:$HGPORT 'graph'
1051 1026 200 Script output follows
1052 1027
1053 1028 check filelog view
1054 1029
1055 1030 $ get-with-headers.py --headeronly localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar'
1056 1031 200 Script output follows
1057 1032
1058 1033 check filelog view for hidden commits (obsolete ones are hidden here)
1059 1034
1060 1035 $ get-with-headers.py localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar' | grep obsolete
1061 1036 [1]
1062 1037
1063 1038 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/68'
1064 1039 200 Script output follows
1065 1040 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
1066 1041 404 Not Found
1067 1042 [1]
1068 1043
1069 1044 check that web.view config option:
1070 1045
1071 1046 $ killdaemons.py hg.pid
1072 1047 $ cat >> .hg/hgrc << EOF
1073 1048 > [web]
1074 1049 > view=all
1075 1050 > EOF
1076 1051 $ wait
1077 1052 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1078 1053 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
1079 1054 200 Script output follows
1080 1055 $ killdaemons.py hg.pid
1081 1056
1082 1057 Checking _enable=False warning if obsolete marker exists
1083 1058
1084 1059 $ echo '[experimental]' >> $HGRCPATH
1085 1060 $ echo "evolution=" >> $HGRCPATH
1086 1061 $ hg log -r tip
1087 1062 68:c15e9edfca13 (draft) [tip ] add celestine
1088 1063
1089 1064 reenable for later test
1090 1065
1091 1066 $ echo '[experimental]' >> $HGRCPATH
1092 1067 $ echo "evolution.exchange=True" >> $HGRCPATH
1093 1068 $ echo "evolution.createmarkers=True" >> $HGRCPATH
1094 1069
1095 1070 $ rm access.log errors.log
1096 1071 #endif
1097 1072
1098 1073 Several troubles on the same changeset (create an unstable and bumped and content-divergent changeset)
1099 1074
1100 1075 $ hg debugobsolete `getid obsolete_e`
1101 1076 1 new obsolescence markers
1102 1077 obsoleted 1 changesets
1103 1078 2 new orphan changesets
1104 1079 $ hg debugobsolete `getid original_c` `getid babar`
1105 1080 1 new obsolescence markers
1106 1081 1 new phase-divergent changesets
1107 1082 2 new content-divergent changesets
1108 1083 $ hg log --config ui.logtemplate= -r 'phasedivergent() and orphan() and contentdivergent()'
1109 1084 changeset: 7:50c51b361e60
1110 1085 user: test
1111 1086 date: Thu Jan 01 00:00:00 1970 +0000
1112 1087 instability: orphan, phase-divergent, content-divergent
1113 1088 summary: add babar
1114 1089
1115 1090 test the "obsolete" templatekw
1116 1091
1117 1092 $ hg log -r 'obsolete()'
1118 1093 6:3de5eca88c00 (draft *obsolete*) [ ] add obsolete_e [pruned]
1119 1094
1120 1095 test the "troubles" templatekw
1121 1096
1122 1097 $ hg log -r 'phasedivergent() and orphan()'
1123 1098 7:50c51b361e60 (draft orphan phase-divergent content-divergent) [ ] add babar
1124 1099
1125 1100 test the default cmdline template
1126 1101
1127 1102 $ hg log -T default -r 'phasedivergent()'
1128 1103 changeset: 7:50c51b361e60
1129 1104 user: test
1130 1105 date: Thu Jan 01 00:00:00 1970 +0000
1131 1106 instability: orphan, phase-divergent, content-divergent
1132 1107 summary: add babar
1133 1108
1134 1109 $ hg log -T default -r 'obsolete()'
1135 1110 changeset: 6:3de5eca88c00
1136 1111 parent: 3:6f9641995072
1137 1112 user: test
1138 1113 date: Thu Jan 01 00:00:00 1970 +0000
1139 1114 obsolete: pruned
1140 1115 summary: add obsolete_e
1141 1116
1142 1117
1143 1118 test the obsolete labels
1144 1119
1145 1120 $ hg log --config ui.logtemplate= --color=debug -r 'phasedivergent()'
1146 1121 [log.changeset changeset.draft changeset.unstable instability.orphan instability.phase-divergent instability.content-divergent|changeset: 7:50c51b361e60]
1147 1122 [log.user|user: test]
1148 1123 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
1149 1124 [log.instability|instability: orphan, phase-divergent, content-divergent]
1150 1125 [log.summary|summary: add babar]
1151 1126
1152 1127
1153 1128 $ hg log -T default -r 'phasedivergent()' --color=debug
1154 1129 [log.changeset changeset.draft changeset.unstable instability.orphan instability.phase-divergent instability.content-divergent|changeset: 7:50c51b361e60]
1155 1130 [log.user|user: test]
1156 1131 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
1157 1132 [log.instability|instability: orphan, phase-divergent, content-divergent]
1158 1133 [log.summary|summary: add babar]
1159 1134
1160 1135
1161 1136 $ hg log --config ui.logtemplate= --color=debug -r "obsolete()"
1162 1137 [log.changeset changeset.draft changeset.obsolete|changeset: 6:3de5eca88c00]
1163 1138 [log.parent changeset.draft|parent: 3:6f9641995072]
1164 1139 [log.user|user: test]
1165 1140 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
1166 1141 [log.obsfate|obsolete: pruned]
1167 1142 [log.summary|summary: add obsolete_e]
1168 1143
1169 1144
1170 1145 $ hg log -T default -r 'obsolete()' --color=debug
1171 1146 [log.changeset changeset.draft changeset.obsolete|changeset: 6:3de5eca88c00]
1172 1147 [log.parent changeset.draft|parent: 3:6f9641995072]
1173 1148 [log.user|user: test]
1174 1149 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
1175 1150 [log.obsfate|obsolete: pruned]
1176 1151 [log.summary|summary: add obsolete_e]
1177 1152
1178 1153
1179 1154 test summary output
1180 1155
1181 1156 $ hg up -r 'phasedivergent() and orphan()'
1182 1157 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1183 1158 $ hg summary
1184 1159 parent: 7:50c51b361e60 (orphan, phase-divergent, content-divergent)
1185 1160 add babar
1186 1161 branch: default
1187 1162 commit: (clean)
1188 1163 update: 2 new changesets (update)
1189 1164 phases: 4 draft
1190 1165 orphan: 2 changesets
1191 1166 content-divergent: 2 changesets
1192 1167 phase-divergent: 1 changesets
1193 1168 $ hg up -r 'obsolete()'
1194 1169 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1195 1170 $ hg summary
1196 1171 parent: 6:3de5eca88c00 (obsolete)
1197 1172 add obsolete_e
1198 1173 branch: default
1199 1174 commit: (clean)
1200 1175 update: 3 new changesets (update)
1201 1176 phases: 4 draft
1202 1177 orphan: 2 changesets
1203 1178 content-divergent: 2 changesets
1204 1179 phase-divergent: 1 changesets
1205 1180
1206 1181 test debugwhyunstable output
1207 1182
1208 1183 $ hg debugwhyunstable 50c51b361e60
1209 1184 orphan: obsolete parent 3de5eca88c00aa039da7399a220f4a5221faa585
1210 1185 phase-divergent: immutable predecessor 245bde4270cd1072a27757984f9cda8ba26f08ca
1211 1186 content-divergent: 6f96419950729f3671185b847352890f074f7557 (draft) predecessor 245bde4270cd1072a27757984f9cda8ba26f08ca
1212 1187
1213 1188 test whyunstable template keyword
1214 1189
1215 1190 $ hg log -r 50c51b361e60 -T '{whyunstable}\n'
1216 1191 orphan: obsolete parent 3de5eca88c00
1217 1192 phase-divergent: immutable predecessor 245bde4270cd
1218 1193 content-divergent: 3:6f9641995072 (draft) predecessor 245bde4270cd
1219 1194 $ hg log -r 50c51b361e60 -T '{whyunstable % "{instability}: {reason} {node|shortest}\n"}'
1220 1195 orphan: obsolete parent 3de5
1221 1196 phase-divergent: immutable predecessor 245b
1222 1197 content-divergent: predecessor 245b
1223 1198
1224 $ hg push ../tmpf -r 50c51b361e60
1225 pushing to ../tmpf
1226 searching for changes
1227 abort: push includes unstable changesets:
1228 50c51b361e60 (orphan, phase-divergent, content-divergent)
1229 [255]
1230
1231
1232 1199 #if serve
1233 1200
1234 1201 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1235 1202 $ cat hg.pid >> $DAEMON_PIDS
1236 1203
1237 1204 check obsolete changeset
1238 1205
1239 1206 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=paper' | grep '<span class="obsolete">'
1240 1207 <span class="phase">draft</span> <span class="obsolete">obsolete</span>
1241 1208 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=coal' | grep '<span class="obsolete">'
1242 1209 <span class="phase">draft</span> <span class="obsolete">obsolete</span>
1243 1210 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=gitweb' | grep '<span class="logtags">'
1244 1211 <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="obsoletetag" title="obsolete">obsolete</span> </span>
1245 1212 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=monoblue' | grep '<span class="logtags">'
1246 1213 <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="obsoletetag" title="obsolete">obsolete</span> </span>
1247 1214 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=spartan' | grep 'class="obsolete"'
1248 1215 <th class="obsolete">obsolete:</th>
1249 1216 <td class="obsolete">pruned by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
1250 1217
1251 1218 check changeset with instabilities
1252 1219
1253 1220 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=paper' | grep '<span class="instability">'
1254 1221 <span class="phase">draft</span> <span class="instability">orphan</span> <span class="instability">phase-divergent</span> <span class="instability">content-divergent</span>
1255 1222 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=coal' | grep '<span class="instability">'
1256 1223 <span class="phase">draft</span> <span class="instability">orphan</span> <span class="instability">phase-divergent</span> <span class="instability">content-divergent</span>
1257 1224 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=gitweb' | grep '<span class="logtags">'
1258 1225 <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="instabilitytag" title="orphan">orphan</span> <span class="instabilitytag" title="phase-divergent">phase-divergent</span> <span class="instabilitytag" title="content-divergent">content-divergent</span> </span>
1259 1226 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=monoblue' | grep '<span class="logtags">'
1260 1227 <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="instabilitytag" title="orphan">orphan</span> <span class="instabilitytag" title="phase-divergent">phase-divergent</span> <span class="instabilitytag" title="content-divergent">content-divergent</span> </span>
1261 1228 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=spartan' | grep 'class="unstable"'
1262 1229 <th class="unstable">unstable:</th>
1263 1230 <td class="unstable">orphan: obsolete parent <a href="/rev/3de5eca88c00?style=spartan">3de5eca88c00</a></td>
1264 1231 <th class="unstable">unstable:</th>
1265 1232 <td class="unstable">phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=spartan">245bde4270cd</a></td>
1266 1233 <th class="unstable">unstable:</th>
1267 1234 <td class="unstable">content-divergent: <a href="/rev/6f9641995072?style=spartan">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=spartan">245bde4270cd</a></td>
1268 1235
1269 1236 check explanation for an orphan, phase-divergent and content-divergent changeset
1270 1237
1271 1238 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=paper' | egrep '(orphan|phase-divergent|content-divergent):'
1272 1239 <td>orphan: obsolete parent <a href="/rev/3de5eca88c00?style=paper">3de5eca88c00</a><br>
1273 1240 phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=paper">245bde4270cd</a><br>
1274 1241 content-divergent: <a href="/rev/6f9641995072?style=paper">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=paper">245bde4270cd</a></td>
1275 1242 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=coal' | egrep '(orphan|phase-divergent|content-divergent):'
1276 1243 <td>orphan: obsolete parent <a href="/rev/3de5eca88c00?style=coal">3de5eca88c00</a><br>
1277 1244 phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=coal">245bde4270cd</a><br>
1278 1245 content-divergent: <a href="/rev/6f9641995072?style=coal">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=coal">245bde4270cd</a></td>
1279 1246 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=gitweb' | egrep '(orphan|phase-divergent|content-divergent):'
1280 1247 <td>orphan: obsolete parent <a class="list" href="/rev/3de5eca88c00?style=gitweb">3de5eca88c00</a></td>
1281 1248 <td>phase-divergent: immutable predecessor <a class="list" href="/rev/245bde4270cd?style=gitweb">245bde4270cd</a></td>
1282 1249 <td>content-divergent: <a class="list" href="/rev/6f9641995072?style=gitweb">6f9641995072</a> (draft) predecessor <a class="list" href="/rev/245bde4270cd?style=gitweb">245bde4270cd</a></td>
1283 1250 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=monoblue' | egrep '(orphan|phase-divergent|content-divergent):'
1284 1251 <dd>orphan: obsolete parent <a href="/rev/3de5eca88c00?style=monoblue">3de5eca88c00</a></dd>
1285 1252 <dd>phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=monoblue">245bde4270cd</a></dd>
1286 1253 <dd>content-divergent: <a href="/rev/6f9641995072?style=monoblue">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=monoblue">245bde4270cd</a></dd>
1287 1254 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=spartan' | egrep '(orphan|phase-divergent|content-divergent):'
1288 1255 <td class="unstable">orphan: obsolete parent <a href="/rev/3de5eca88c00?style=spartan">3de5eca88c00</a></td>
1289 1256 <td class="unstable">phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=spartan">245bde4270cd</a></td>
1290 1257 <td class="unstable">content-divergent: <a href="/rev/6f9641995072?style=spartan">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=spartan">245bde4270cd</a></td>
1291 1258
1292 1259 $ killdaemons.py
1293 1260
1294 1261 $ rm hg.pid access.log errors.log
1295 1262
1296 1263 #endif
1297 1264
1298 1265 Test incoming/outcoming with changesets obsoleted remotely, known locally
1299 1266 ===============================================================================
1300 1267
1301 1268 This test issue 3805
1302 1269
1303 1270 $ hg init repo-issue3805
1304 1271 $ cd repo-issue3805
1305 1272 $ echo "base" > base
1306 1273 $ hg ci -Am "base"
1307 1274 adding base
1308 1275 $ echo "foo" > foo
1309 1276 $ hg ci -Am "A"
1310 1277 adding foo
1311 1278 $ hg clone . ../other-issue3805
1312 1279 updating to branch default
1313 1280 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1314 1281 $ echo "bar" >> foo
1315 1282 $ hg ci --amend
1316 1283 $ cd ../other-issue3805
1317 1284 $ hg log -G
1318 1285 @ 1:29f0c6921ddd (draft) [tip ] A
1319 1286 |
1320 1287 o 0:d20a80d4def3 (draft) [ ] base
1321 1288
1322 1289 $ hg log -G -R ../repo-issue3805
1323 1290 @ 2:323a9c3ddd91 (draft) [tip ] A
1324 1291 |
1325 1292 o 0:d20a80d4def3 (draft) [ ] base
1326 1293
1327 1294 $ hg incoming
1328 1295 comparing with $TESTTMP/tmpe/repo-issue3805
1329 1296 searching for changes
1330 1297 2:323a9c3ddd91 (draft) [tip ] A
1331 1298 $ hg incoming --bundle ../issue3805.hg
1332 1299 comparing with $TESTTMP/tmpe/repo-issue3805
1333 1300 searching for changes
1334 1301 2:323a9c3ddd91 (draft) [tip ] A
1335 1302 $ hg outgoing
1336 1303 comparing with $TESTTMP/tmpe/repo-issue3805
1337 1304 searching for changes
1338 1305 1:29f0c6921ddd (draft) [tip ] A
1339 1306
1340 1307 #if serve
1341 1308
1342 1309 $ hg serve -R ../repo-issue3805 -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1343 1310 $ cat hg.pid >> $DAEMON_PIDS
1344 1311
1345 1312 $ hg incoming http://localhost:$HGPORT
1346 1313 comparing with http://localhost:$HGPORT/
1347 1314 searching for changes
1348 1315 2:323a9c3ddd91 (draft) [tip ] A
1349 1316 $ hg outgoing http://localhost:$HGPORT
1350 1317 comparing with http://localhost:$HGPORT/
1351 1318 searching for changes
1352 1319 1:29f0c6921ddd (draft) [tip ] A
1353 1320
1354 1321 $ killdaemons.py
1355 1322
1356 1323 #endif
1357 1324
1358 1325 This test issue 3814
1359 1326
1360 1327 (nothing to push but locally hidden changeset)
1361 1328
1362 1329 $ cd ..
1363 1330 $ hg init repo-issue3814
1364 1331 $ cd repo-issue3805
1365 1332 $ hg push -r 323a9c3ddd91 ../repo-issue3814
1366 1333 pushing to ../repo-issue3814
1367 1334 searching for changes
1368 1335 adding changesets
1369 1336 adding manifests
1370 1337 adding file changes
1371 1338 added 2 changesets with 2 changes to 2 files
1372 1339 1 new obsolescence markers
1373 1340 $ hg out ../repo-issue3814
1374 1341 comparing with ../repo-issue3814
1375 1342 searching for changes
1376 1343 no changes found
1377 1344 [1]
1378 1345
1379 1346 Test that a local tag blocks a changeset from being hidden
1380 1347
1381 1348 $ hg tag -l visible -r 1 --hidden
1382 1349 $ hg log -G
1383 1350 @ 2:323a9c3ddd91 (draft) [tip ] A
1384 1351 |
1385 1352 | x 1:29f0c6921ddd (draft *obsolete*) [visible ] A [rewritten using amend as 2:323a9c3ddd91]
1386 1353 |/
1387 1354 o 0:d20a80d4def3 (draft) [ ] base
1388 1355
1389 1356 Test that removing a local tag does not cause some commands to fail
1390 1357
1391 1358 $ hg tag -l -r tip tiptag
1392 1359 $ hg tags
1393 1360 tiptag 2:323a9c3ddd91
1394 1361 tip 2:323a9c3ddd91
1395 1362 visible 1:29f0c6921ddd
1396 1363 $ hg --config extensions.strip= strip -r tip --no-backup
1397 1364 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1398 1365 $ hg tags
1399 1366 visible 1:29f0c6921ddd
1400 1367 tip 1:29f0c6921ddd
1401 1368
1402 1369 Test bundle overlay onto hidden revision
1403 1370
1404 1371 $ cd ..
1405 1372 $ hg init repo-bundleoverlay
1406 1373 $ cd repo-bundleoverlay
1407 1374 $ echo "A" > foo
1408 1375 $ hg ci -Am "A"
1409 1376 adding foo
1410 1377 $ echo "B" >> foo
1411 1378 $ hg ci -m "B"
1412 1379 $ hg up 0
1413 1380 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1414 1381 $ echo "C" >> foo
1415 1382 $ hg ci -m "C"
1416 1383 created new head
1417 1384 $ hg log -G
1418 1385 @ 2:c186d7714947 (draft) [tip ] C
1419 1386 |
1420 1387 | o 1:44526ebb0f98 (draft) [ ] B
1421 1388 |/
1422 1389 o 0:4b34ecfb0d56 (draft) [ ] A
1423 1390
1424 1391
1425 1392 $ hg clone -r1 . ../other-bundleoverlay
1426 1393 adding changesets
1427 1394 adding manifests
1428 1395 adding file changes
1429 1396 added 2 changesets with 2 changes to 1 files
1430 1397 new changesets 4b34ecfb0d56:44526ebb0f98 (2 drafts)
1431 1398 updating to branch default
1432 1399 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1433 1400 $ cd ../other-bundleoverlay
1434 1401 $ echo "B+" >> foo
1435 1402 $ hg ci --amend -m "B+"
1436 1403 $ hg log -G --hidden
1437 1404 @ 2:b7d587542d40 (draft) [tip ] B+
1438 1405 |
1439 1406 | x 1:44526ebb0f98 (draft *obsolete*) [ ] B [rewritten using amend as 2:b7d587542d40]
1440 1407 |/
1441 1408 o 0:4b34ecfb0d56 (draft) [ ] A
1442 1409
1443 1410
1444 1411 #if repobundlerepo
1445 1412 $ hg incoming ../repo-bundleoverlay --bundle ../bundleoverlay.hg
1446 1413 comparing with ../repo-bundleoverlay
1447 1414 searching for changes
1448 1415 1:44526ebb0f98 (draft) [ ] B
1449 1416 2:c186d7714947 (draft) [tip ] C
1450 1417 $ hg log -G -R ../bundleoverlay.hg
1451 1418 o 3:c186d7714947 (draft) [tip ] C
1452 1419 |
1453 1420 | @ 2:b7d587542d40 (draft) [ ] B+
1454 1421 |/
1455 1422 o 0:4b34ecfb0d56 (draft) [ ] A
1456 1423
1457 1424 #endif
1458 1425
1459 1426 #if serve
1460 1427
1461 1428 Test issue 4506
1462 1429
1463 1430 $ cd ..
1464 1431 $ hg init repo-issue4506
1465 1432 $ cd repo-issue4506
1466 1433 $ echo "0" > foo
1467 1434 $ hg add foo
1468 1435 $ hg ci -m "content-0"
1469 1436
1470 1437 $ hg up null
1471 1438 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1472 1439 $ echo "1" > bar
1473 1440 $ hg add bar
1474 1441 $ hg ci -m "content-1"
1475 1442 created new head
1476 1443 $ hg up 0
1477 1444 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1478 1445 $ hg graft 1
1479 1446 grafting 1:1c9eddb02162 "content-1" (tip)
1480 1447
1481 1448 $ hg debugobsolete `hg log -r1 -T'{node}'` `hg log -r2 -T'{node}'`
1482 1449 1 new obsolescence markers
1483 1450 obsoleted 1 changesets
1484 1451
1485 1452 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1486 1453 $ cat hg.pid >> $DAEMON_PIDS
1487 1454
1488 1455 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/1'
1489 1456 404 Not Found
1490 1457 [1]
1491 1458 $ get-with-headers.py --headeronly localhost:$HGPORT 'file/tip/bar'
1492 1459 200 Script output follows
1493 1460 $ get-with-headers.py --headeronly localhost:$HGPORT 'annotate/tip/bar'
1494 1461 200 Script output follows
1495 1462
1496 1463 $ killdaemons.py
1497 1464
1498 1465 #endif
1499 1466
1500 1467 Test heads computation on pending index changes with obsolescence markers
1501 1468 $ cd ..
1502 1469 $ cat >$TESTTMP/test_extension.py << EOF
1503 1470 > from __future__ import absolute_import
1504 1471 > from mercurial.i18n import _
1505 1472 > from mercurial import cmdutil, pycompat, registrar
1506 1473 > from mercurial.utils import stringutil
1507 1474 >
1508 1475 > cmdtable = {}
1509 1476 > command = registrar.command(cmdtable)
1510 1477 > @command(b"amendtransient",[], _(b'hg amendtransient [rev]'))
1511 1478 > def amend(ui, repo, *pats, **opts):
1512 1479 > opts = pycompat.byteskwargs(opts)
1513 1480 > opts[b'message'] = b'Test'
1514 1481 > opts[b'logfile'] = None
1515 1482 > cmdutil.amend(ui, repo, repo[b'.'], {}, pats, opts)
1516 1483 > ui.write(b'%s\n' % stringutil.pprint(repo.changelog.headrevs()))
1517 1484 > EOF
1518 1485 $ cat >> $HGRCPATH << EOF
1519 1486 > [extensions]
1520 1487 > testextension=$TESTTMP/test_extension.py
1521 1488 > EOF
1522 1489 $ hg init repo-issue-nativerevs-pending-changes
1523 1490 $ cd repo-issue-nativerevs-pending-changes
1524 1491 $ mkcommit a
1525 1492 $ mkcommit b
1526 1493 $ hg up ".^"
1527 1494 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1528 1495 $ echo aa > a
1529 1496 $ hg amendtransient
1530 1497 1 new orphan changesets
1531 1498 [1, 2]
1532 1499
1533 1500 Test cache consistency for the visible filter
1534 1501 1) We want to make sure that the cached filtered revs are invalidated when
1535 1502 bookmarks change
1536 1503 $ cd ..
1537 1504 $ cat >$TESTTMP/test_extension.py << EOF
1538 1505 > from __future__ import absolute_import, print_function
1539 1506 > import weakref
1540 1507 > from mercurial import (
1541 1508 > bookmarks,
1542 1509 > cmdutil,
1543 1510 > extensions,
1544 1511 > repoview,
1545 1512 > )
1546 1513 > def _bookmarkchanged(orig, bkmstoreinst, *args, **kwargs):
1547 1514 > reporef = weakref.ref(bkmstoreinst._repo)
1548 1515 > def trhook(tr):
1549 1516 > repo = reporef()
1550 1517 > hidden1 = repoview.computehidden(repo)
1551 1518 > hidden = repoview.filterrevs(repo, b'visible')
1552 1519 > if sorted(hidden1) != sorted(hidden):
1553 1520 > print("cache inconsistency")
1554 1521 > bkmstoreinst._repo.currenttransaction().addpostclose(b'test_extension', trhook)
1555 1522 > orig(bkmstoreinst, *args, **kwargs)
1556 1523 > def extsetup(ui):
1557 1524 > extensions.wrapfunction(bookmarks.bmstore, '_recordchange',
1558 1525 > _bookmarkchanged)
1559 1526 > EOF
1560 1527
1561 1528 $ hg init repo-cache-inconsistency
1562 1529 $ cd repo-issue-nativerevs-pending-changes
1563 1530 $ mkcommit a
1564 1531 a already tracked!
1565 1532 $ mkcommit b
1566 1533 $ hg id
1567 1534 13bedc178fce tip
1568 1535 $ echo "hello" > b
1569 1536 $ hg commit --amend -m "message"
1570 1537 $ hg book bookb -r 13bedc178fce --hidden
1571 1538 bookmarking hidden changeset 13bedc178fce
1572 1539 (hidden revision '13bedc178fce' was rewritten as: a9b1f8652753)
1573 1540 $ hg log -r 13bedc178fce
1574 1541 4:13bedc178fce (draft *obsolete*) [ bookb] add b [rewritten using amend as 5:a9b1f8652753]
1575 1542 $ hg book -d bookb
1576 1543 $ hg log -r 13bedc178fce
1577 1544 abort: hidden revision '13bedc178fce' was rewritten as: a9b1f8652753!
1578 1545 (use --hidden to access hidden revisions)
1579 1546 [255]
1580 1547
1581 1548 Empty out the test extension, as it isn't compatible with later parts
1582 1549 of the test.
1583 1550 $ echo > $TESTTMP/test_extension.py
1584 1551
1585 1552 Test ability to pull changeset with locally applying obsolescence markers
1586 1553 (issue4945)
1587 1554
1588 1555 $ cd ..
1589 1556 $ hg init issue4845
1590 1557 $ cd issue4845
1591 1558
1592 1559 $ echo foo > f0
1593 1560 $ hg add f0
1594 1561 $ hg ci -m '0'
1595 1562 $ echo foo > f1
1596 1563 $ hg add f1
1597 1564 $ hg ci -m '1'
1598 1565 $ echo foo > f2
1599 1566 $ hg add f2
1600 1567 $ hg ci -m '2'
1601 1568
1602 1569 $ echo bar > f2
1603 1570 $ hg commit --amend --config experimental.evolution.createmarkers=True
1604 1571 $ hg log -G
1605 1572 @ 3:b0551702f918 (draft) [tip ] 2
1606 1573 |
1607 1574 o 1:e016b03fd86f (draft) [ ] 1
1608 1575 |
1609 1576 o 0:a78f55e5508c (draft) [ ] 0
1610 1577
1611 1578 $ hg log -G --hidden
1612 1579 @ 3:b0551702f918 (draft) [tip ] 2
1613 1580 |
1614 1581 | x 2:e008cf283490 (draft *obsolete*) [ ] 2 [rewritten using amend as 3:b0551702f918]
1615 1582 |/
1616 1583 o 1:e016b03fd86f (draft) [ ] 1
1617 1584 |
1618 1585 o 0:a78f55e5508c (draft) [ ] 0
1619 1586
1620 1587
1621 1588 $ hg strip --hidden -r 2 --config extensions.strip= --config devel.strip-obsmarkers=no
1622 1589 saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e008cf283490-ede36964-backup.hg
1623 1590 $ hg debugobsolete
1624 1591 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
1625 1592 $ hg log -G
1626 1593 @ 2:b0551702f918 (draft) [tip ] 2
1627 1594 |
1628 1595 o 1:e016b03fd86f (draft) [ ] 1
1629 1596 |
1630 1597 o 0:a78f55e5508c (draft) [ ] 0
1631 1598
1632 1599 $ hg log -G --hidden
1633 1600 @ 2:b0551702f918 (draft) [tip ] 2
1634 1601 |
1635 1602 o 1:e016b03fd86f (draft) [ ] 1
1636 1603 |
1637 1604 o 0:a78f55e5508c (draft) [ ] 0
1638 1605
1639 1606 $ hg debugbundle .hg/strip-backup/e008cf283490-*-backup.hg
1640 1607 Stream params: {Compression: BZ}
1641 1608 changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
1642 1609 e008cf2834908e5d6b0f792a9d4b0e2272260fb8
1643 1610 cache:rev-branch-cache -- {} (mandatory: False)
1644 1611 phase-heads -- {} (mandatory: True)
1645 1612 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 draft
1646 1613
1647 1614 #if repobundlerepo
1648 1615 $ hg pull .hg/strip-backup/e008cf283490-*-backup.hg
1649 1616 pulling from .hg/strip-backup/e008cf283490-ede36964-backup.hg
1650 1617 searching for changes
1651 1618 no changes found
1652 1619 #endif
1653 1620 $ hg debugobsolete
1654 1621 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
1655 1622 $ hg log -G
1656 1623 @ 2:b0551702f918 (draft) [tip ] 2
1657 1624 |
1658 1625 o 1:e016b03fd86f (draft) [ ] 1
1659 1626 |
1660 1627 o 0:a78f55e5508c (draft) [ ] 0
1661 1628
1662 1629 $ hg log -G --hidden
1663 1630 @ 2:b0551702f918 (draft) [tip ] 2
1664 1631 |
1665 1632 o 1:e016b03fd86f (draft) [ ] 1
1666 1633 |
1667 1634 o 0:a78f55e5508c (draft) [ ] 0
1668 1635
1669 1636
1670 1637 Testing that strip remove markers:
1671 1638
1672 1639 $ hg strip -r 1 --config extensions.strip=
1673 1640 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1674 1641 saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e016b03fd86f-65ede734-backup.hg
1675 1642 $ hg debugobsolete
1676 1643 $ hg log -G
1677 1644 @ 0:a78f55e5508c (draft) [tip ] 0
1678 1645
1679 1646 $ hg log -G --hidden
1680 1647 @ 0:a78f55e5508c (draft) [tip ] 0
1681 1648
1682 1649 $ hg debugbundle .hg/strip-backup/e016b03fd86f-*-backup.hg
1683 1650 Stream params: {Compression: BZ}
1684 1651 changegroup -- {nbchanges: 2, version: 02} (mandatory: True)
1685 1652 e016b03fd86fcccc54817d120b90b751aaf367d6
1686 1653 b0551702f918510f01ae838ab03a463054c67b46
1687 1654 cache:rev-branch-cache -- {} (mandatory: False)
1688 1655 obsmarkers -- {} (mandatory: True)
1689 1656 version: 1 (92 bytes)
1690 1657 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
1691 1658 phase-heads -- {} (mandatory: True)
1692 1659 b0551702f918510f01ae838ab03a463054c67b46 draft
1693 1660
1694 1661 $ hg unbundle .hg/strip-backup/e016b03fd86f-*-backup.hg
1695 1662 adding changesets
1696 1663 adding manifests
1697 1664 adding file changes
1698 1665 added 2 changesets with 2 changes to 2 files
1699 1666 1 new obsolescence markers
1700 1667 new changesets e016b03fd86f:b0551702f918 (2 drafts)
1701 1668 (run 'hg update' to get a working copy)
1702 1669 $ hg debugobsolete | sort
1703 1670 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
1704 1671 $ hg log -G
1705 1672 o 2:b0551702f918 (draft) [tip ] 2
1706 1673 |
1707 1674 o 1:e016b03fd86f (draft) [ ] 1
1708 1675 |
1709 1676 @ 0:a78f55e5508c (draft) [ ] 0
1710 1677
1711 1678 $ hg log -G --hidden
1712 1679 o 2:b0551702f918 (draft) [tip ] 2
1713 1680 |
1714 1681 o 1:e016b03fd86f (draft) [ ] 1
1715 1682 |
1716 1683 @ 0:a78f55e5508c (draft) [ ] 0
1717 1684
1718 1685 Test that 'hg debugobsolete --index --rev' can show indices of obsmarkers when
1719 1686 only a subset of those are displayed (because of --rev option)
1720 1687 $ hg init doindexrev
1721 1688 $ cd doindexrev
1722 1689 $ echo a > a
1723 1690 $ hg ci -Am a
1724 1691 adding a
1725 1692 $ hg ci --amend -m aa
1726 1693 $ echo b > b
1727 1694 $ hg ci -Am b
1728 1695 adding b
1729 1696 $ hg ci --amend -m bb
1730 1697 $ echo c > c
1731 1698 $ hg ci -Am c
1732 1699 adding c
1733 1700 $ hg ci --amend -m cc
1734 1701 $ echo d > d
1735 1702 $ hg ci -Am d
1736 1703 adding d
1737 1704 $ hg ci --amend -m dd --config experimental.evolution.track-operation=1
1738 1705 $ hg debugobsolete --index --rev "3+7"
1739 1706 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1740 1707 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1741 1708 $ hg debugobsolete --index --rev "3+7" -Tjson
1742 1709 [
1743 1710 {
1744 1711 "date": [0, 0],
1745 1712 "flag": 0,
1746 1713 "index": 1,
1747 1714 "metadata": {"ef1": "1", "operation": "amend", "user": "test"},
1748 1715 "prednode": "6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1",
1749 1716 "succnodes": ["d27fb9b066076fd921277a4b9e8b9cb48c95bc6a"]
1750 1717 },
1751 1718 {
1752 1719 "date": [0, 0],
1753 1720 "flag": 0,
1754 1721 "index": 3,
1755 1722 "metadata": {"ef1": "1", "operation": "amend", "user": "test"},
1756 1723 "prednode": "4715cf767440ed891755448016c2b8cf70760c30",
1757 1724 "succnodes": ["7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d"]
1758 1725 }
1759 1726 ]
1760 1727
1761 1728 Test the --delete option of debugobsolete command
1762 1729 $ hg debugobsolete --index
1763 1730 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1764 1731 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1765 1732 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1766 1733 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1767 1734 $ hg debugobsolete --delete 1 --delete 3
1768 1735 deleted 2 obsolescence markers
1769 1736 $ hg debugobsolete
1770 1737 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1771 1738 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1772 1739
1773 1740 Test adding changeset after obsmarkers affecting it
1774 1741 (eg: during pull, or unbundle)
1775 1742
1776 1743 $ mkcommit e
1777 1744 $ hg bundle -r . --base .~1 ../bundle-2.hg
1778 1745 1 changesets found
1779 1746 $ getid .
1780 1747 $ hg --config extensions.strip= strip -r .
1781 1748 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1782 1749 saved backup bundle to $TESTTMP/tmpe/issue4845/doindexrev/.hg/strip-backup/9bc153528424-ee80edd4-backup.hg
1783 1750 $ hg debugobsolete 9bc153528424ea266d13e57f9ff0d799dfe61e4b
1784 1751 1 new obsolescence markers
1785 1752 $ hg unbundle ../bundle-2.hg
1786 1753 adding changesets
1787 1754 adding manifests
1788 1755 adding file changes
1789 1756 added 1 changesets with 1 changes to 1 files
1790 1757 (1 other changesets obsolete on arrival)
1791 1758 (run 'hg update' to get a working copy)
1792 1759 $ hg log -G
1793 1760 @ 7:7ae79c5d60f0 (draft) [tip ] dd
1794 1761 |
1795 1762 | o 6:4715cf767440 (draft) [ ] d
1796 1763 |/
1797 1764 o 5:29346082e4a9 (draft) [ ] cc
1798 1765 |
1799 1766 o 3:d27fb9b06607 (draft) [ ] bb
1800 1767 |
1801 1768 | o 2:6fdef60fcbab (draft) [ ] b
1802 1769 |/
1803 1770 o 1:f9bd49731b0b (draft) [ ] aa
1804 1771
1805 1772
1806 1773 $ cd ..
1807 1774
1808 1775 Test issue 5783
1809 1776
1810 1777 $ hg init issue-5783 --config format.obsstore-version=0
1811 1778 $ cd issue-5783
1812 1779 $ touch a.cpp
1813 1780 $ hg add a.cpp
1814 1781 $ hg commit -m 'Add a.cpp'
1815 1782 $ echo 'Hello' > a.cpp
1816 1783 $ hg amend -n 'Testing::Obsstore' --config format.obsstore-version=0 --config extensions.amend=
1817 1784 $ touch b.cpp
1818 1785 $ hg add b.cpp
1819 1786 $ hg commit -m 'Add b.cpp'
1820 1787 $ echo 'Hello' > b.cpp
1821 1788 $ hg amend -n 'Testing::Obsstore2' --config extensions.amend=
1822 1789 $ hg debugobsolete
1823 1790 d1b09fe3ad2b2a03e23a72f0c582e29a49570145 1a1a11184d2588af24e767e5335d5d9d07e8c550 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'note': 'Testing::Obsstore', 'operation': 'amend', 'user': 'test'}
1824 1791 1bfd8e3868f641e048b6667cd672c68932f26d00 79959ca316d5b27ac6be1dd0cfd0843a5b5412eb 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'note': 'Testing::Obsstore2', 'operation': 'amend', 'user': 'test'}
1825 1792 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now