##// END OF EJS Templates
exchange: check actually missing revs for obsolete / unstable revs (issue6372)...
Manuel Jacob -
r46101:c26335fa default
parent child Browse files
Show More
@@ -1,3157 +1,3162 b''
1 1 # exchange.py - utility to exchange data between repos.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import collections
11 11 import weakref
12 12
13 13 from .i18n import _
14 14 from .node import (
15 15 hex,
16 16 nullid,
17 17 nullrev,
18 18 )
19 19 from .thirdparty import attr
20 20 from . import (
21 21 bookmarks as bookmod,
22 22 bundle2,
23 23 changegroup,
24 24 discovery,
25 25 error,
26 26 exchangev2,
27 27 lock as lockmod,
28 28 logexchange,
29 29 narrowspec,
30 30 obsolete,
31 31 obsutil,
32 32 phases,
33 33 pushkey,
34 34 pycompat,
35 35 scmutil,
36 36 sslutil,
37 37 streamclone,
38 38 url as urlmod,
39 39 util,
40 40 wireprototypes,
41 41 )
42 42 from .interfaces import repository
43 43 from .utils import (
44 44 hashutil,
45 45 stringutil,
46 46 )
47 47
48 48 urlerr = util.urlerr
49 49 urlreq = util.urlreq
50 50
51 51 _NARROWACL_SECTION = b'narrowacl'
52 52
53 53 # Maps bundle version human names to changegroup versions.
54 54 _bundlespeccgversions = {
55 55 b'v1': b'01',
56 56 b'v2': b'02',
57 57 b'packed1': b's1',
58 58 b'bundle2': b'02', # legacy
59 59 }
60 60
61 61 # Maps bundle version with content opts to choose which part to bundle
62 62 _bundlespeccontentopts = {
63 63 b'v1': {
64 64 b'changegroup': True,
65 65 b'cg.version': b'01',
66 66 b'obsolescence': False,
67 67 b'phases': False,
68 68 b'tagsfnodescache': False,
69 69 b'revbranchcache': False,
70 70 },
71 71 b'v2': {
72 72 b'changegroup': True,
73 73 b'cg.version': b'02',
74 74 b'obsolescence': False,
75 75 b'phases': False,
76 76 b'tagsfnodescache': True,
77 77 b'revbranchcache': True,
78 78 },
79 79 b'packed1': {b'cg.version': b's1'},
80 80 }
81 81 _bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2']
82 82
83 83 _bundlespecvariants = {
84 84 b"streamv2": {
85 85 b"changegroup": False,
86 86 b"streamv2": True,
87 87 b"tagsfnodescache": False,
88 88 b"revbranchcache": False,
89 89 }
90 90 }
91 91
92 92 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
93 93 _bundlespecv1compengines = {b'gzip', b'bzip2', b'none'}
94 94
95 95
96 96 @attr.s
97 97 class bundlespec(object):
98 98 compression = attr.ib()
99 99 wirecompression = attr.ib()
100 100 version = attr.ib()
101 101 wireversion = attr.ib()
102 102 params = attr.ib()
103 103 contentopts = attr.ib()
104 104
105 105
106 106 def parsebundlespec(repo, spec, strict=True):
107 107 """Parse a bundle string specification into parts.
108 108
109 109 Bundle specifications denote a well-defined bundle/exchange format.
110 110 The content of a given specification should not change over time in
111 111 order to ensure that bundles produced by a newer version of Mercurial are
112 112 readable from an older version.
113 113
114 114 The string currently has the form:
115 115
116 116 <compression>-<type>[;<parameter0>[;<parameter1>]]
117 117
118 118 Where <compression> is one of the supported compression formats
119 119 and <type> is (currently) a version string. A ";" can follow the type and
120 120 all text afterwards is interpreted as URI encoded, ";" delimited key=value
121 121 pairs.
122 122
123 123 If ``strict`` is True (the default) <compression> is required. Otherwise,
124 124 it is optional.
125 125
126 126 Returns a bundlespec object of (compression, version, parameters).
127 127 Compression will be ``None`` if not in strict mode and a compression isn't
128 128 defined.
129 129
130 130 An ``InvalidBundleSpecification`` is raised when the specification is
131 131 not syntactically well formed.
132 132
133 133 An ``UnsupportedBundleSpecification`` is raised when the compression or
134 134 bundle type/version is not recognized.
135 135
136 136 Note: this function will likely eventually return a more complex data
137 137 structure, including bundle2 part information.
138 138 """
139 139
140 140 def parseparams(s):
141 141 if b';' not in s:
142 142 return s, {}
143 143
144 144 params = {}
145 145 version, paramstr = s.split(b';', 1)
146 146
147 147 for p in paramstr.split(b';'):
148 148 if b'=' not in p:
149 149 raise error.InvalidBundleSpecification(
150 150 _(
151 151 b'invalid bundle specification: '
152 152 b'missing "=" in parameter: %s'
153 153 )
154 154 % p
155 155 )
156 156
157 157 key, value = p.split(b'=', 1)
158 158 key = urlreq.unquote(key)
159 159 value = urlreq.unquote(value)
160 160 params[key] = value
161 161
162 162 return version, params
163 163
164 164 if strict and b'-' not in spec:
165 165 raise error.InvalidBundleSpecification(
166 166 _(
167 167 b'invalid bundle specification; '
168 168 b'must be prefixed with compression: %s'
169 169 )
170 170 % spec
171 171 )
172 172
173 173 if b'-' in spec:
174 174 compression, version = spec.split(b'-', 1)
175 175
176 176 if compression not in util.compengines.supportedbundlenames:
177 177 raise error.UnsupportedBundleSpecification(
178 178 _(b'%s compression is not supported') % compression
179 179 )
180 180
181 181 version, params = parseparams(version)
182 182
183 183 if version not in _bundlespeccgversions:
184 184 raise error.UnsupportedBundleSpecification(
185 185 _(b'%s is not a recognized bundle version') % version
186 186 )
187 187 else:
188 188 # Value could be just the compression or just the version, in which
189 189 # case some defaults are assumed (but only when not in strict mode).
190 190 assert not strict
191 191
192 192 spec, params = parseparams(spec)
193 193
194 194 if spec in util.compengines.supportedbundlenames:
195 195 compression = spec
196 196 version = b'v1'
197 197 # Generaldelta repos require v2.
198 198 if b'generaldelta' in repo.requirements:
199 199 version = b'v2'
200 200 # Modern compression engines require v2.
201 201 if compression not in _bundlespecv1compengines:
202 202 version = b'v2'
203 203 elif spec in _bundlespeccgversions:
204 204 if spec == b'packed1':
205 205 compression = b'none'
206 206 else:
207 207 compression = b'bzip2'
208 208 version = spec
209 209 else:
210 210 raise error.UnsupportedBundleSpecification(
211 211 _(b'%s is not a recognized bundle specification') % spec
212 212 )
213 213
214 214 # Bundle version 1 only supports a known set of compression engines.
215 215 if version == b'v1' and compression not in _bundlespecv1compengines:
216 216 raise error.UnsupportedBundleSpecification(
217 217 _(b'compression engine %s is not supported on v1 bundles')
218 218 % compression
219 219 )
220 220
221 221 # The specification for packed1 can optionally declare the data formats
222 222 # required to apply it. If we see this metadata, compare against what the
223 223 # repo supports and error if the bundle isn't compatible.
224 224 if version == b'packed1' and b'requirements' in params:
225 225 requirements = set(params[b'requirements'].split(b','))
226 226 missingreqs = requirements - repo.supportedformats
227 227 if missingreqs:
228 228 raise error.UnsupportedBundleSpecification(
229 229 _(b'missing support for repository features: %s')
230 230 % b', '.join(sorted(missingreqs))
231 231 )
232 232
233 233 # Compute contentopts based on the version
234 234 contentopts = _bundlespeccontentopts.get(version, {}).copy()
235 235
236 236 # Process the variants
237 237 if b"stream" in params and params[b"stream"] == b"v2":
238 238 variant = _bundlespecvariants[b"streamv2"]
239 239 contentopts.update(variant)
240 240
241 241 engine = util.compengines.forbundlename(compression)
242 242 compression, wirecompression = engine.bundletype()
243 243 wireversion = _bundlespeccgversions[version]
244 244
245 245 return bundlespec(
246 246 compression, wirecompression, version, wireversion, params, contentopts
247 247 )
248 248
249 249
250 250 def readbundle(ui, fh, fname, vfs=None):
251 251 header = changegroup.readexactly(fh, 4)
252 252
253 253 alg = None
254 254 if not fname:
255 255 fname = b"stream"
256 256 if not header.startswith(b'HG') and header.startswith(b'\0'):
257 257 fh = changegroup.headerlessfixup(fh, header)
258 258 header = b"HG10"
259 259 alg = b'UN'
260 260 elif vfs:
261 261 fname = vfs.join(fname)
262 262
263 263 magic, version = header[0:2], header[2:4]
264 264
265 265 if magic != b'HG':
266 266 raise error.Abort(_(b'%s: not a Mercurial bundle') % fname)
267 267 if version == b'10':
268 268 if alg is None:
269 269 alg = changegroup.readexactly(fh, 2)
270 270 return changegroup.cg1unpacker(fh, alg)
271 271 elif version.startswith(b'2'):
272 272 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
273 273 elif version == b'S1':
274 274 return streamclone.streamcloneapplier(fh)
275 275 else:
276 276 raise error.Abort(
277 277 _(b'%s: unknown bundle version %s') % (fname, version)
278 278 )
279 279
280 280
281 281 def getbundlespec(ui, fh):
282 282 """Infer the bundlespec from a bundle file handle.
283 283
284 284 The input file handle is seeked and the original seek position is not
285 285 restored.
286 286 """
287 287
288 288 def speccompression(alg):
289 289 try:
290 290 return util.compengines.forbundletype(alg).bundletype()[0]
291 291 except KeyError:
292 292 return None
293 293
294 294 b = readbundle(ui, fh, None)
295 295 if isinstance(b, changegroup.cg1unpacker):
296 296 alg = b._type
297 297 if alg == b'_truncatedBZ':
298 298 alg = b'BZ'
299 299 comp = speccompression(alg)
300 300 if not comp:
301 301 raise error.Abort(_(b'unknown compression algorithm: %s') % alg)
302 302 return b'%s-v1' % comp
303 303 elif isinstance(b, bundle2.unbundle20):
304 304 if b'Compression' in b.params:
305 305 comp = speccompression(b.params[b'Compression'])
306 306 if not comp:
307 307 raise error.Abort(
308 308 _(b'unknown compression algorithm: %s') % comp
309 309 )
310 310 else:
311 311 comp = b'none'
312 312
313 313 version = None
314 314 for part in b.iterparts():
315 315 if part.type == b'changegroup':
316 316 version = part.params[b'version']
317 317 if version in (b'01', b'02'):
318 318 version = b'v2'
319 319 else:
320 320 raise error.Abort(
321 321 _(
322 322 b'changegroup version %s does not have '
323 323 b'a known bundlespec'
324 324 )
325 325 % version,
326 326 hint=_(b'try upgrading your Mercurial client'),
327 327 )
328 328 elif part.type == b'stream2' and version is None:
329 329 # A stream2 part requires to be part of a v2 bundle
330 330 requirements = urlreq.unquote(part.params[b'requirements'])
331 331 splitted = requirements.split()
332 332 params = bundle2._formatrequirementsparams(splitted)
333 333 return b'none-v2;stream=v2;%s' % params
334 334
335 335 if not version:
336 336 raise error.Abort(
337 337 _(b'could not identify changegroup version in bundle')
338 338 )
339 339
340 340 return b'%s-%s' % (comp, version)
341 341 elif isinstance(b, streamclone.streamcloneapplier):
342 342 requirements = streamclone.readbundle1header(fh)[2]
343 343 formatted = bundle2._formatrequirementsparams(requirements)
344 344 return b'none-packed1;%s' % formatted
345 345 else:
346 346 raise error.Abort(_(b'unknown bundle type: %s') % b)
347 347
348 348
349 349 def _computeoutgoing(repo, heads, common):
350 350 """Computes which revs are outgoing given a set of common
351 351 and a set of heads.
352 352
353 353 This is a separate function so extensions can have access to
354 354 the logic.
355 355
356 356 Returns a discovery.outgoing object.
357 357 """
358 358 cl = repo.changelog
359 359 if common:
360 360 hasnode = cl.hasnode
361 361 common = [n for n in common if hasnode(n)]
362 362 else:
363 363 common = [nullid]
364 364 if not heads:
365 365 heads = cl.heads()
366 366 return discovery.outgoing(repo, common, heads)
367 367
368 368
369 369 def _checkpublish(pushop):
370 370 repo = pushop.repo
371 371 ui = repo.ui
372 372 behavior = ui.config(b'experimental', b'auto-publish')
373 373 if pushop.publish or behavior not in (b'warn', b'confirm', b'abort'):
374 374 return
375 375 remotephases = listkeys(pushop.remote, b'phases')
376 376 if not remotephases.get(b'publishing', False):
377 377 return
378 378
379 379 if pushop.revs is None:
380 380 published = repo.filtered(b'served').revs(b'not public()')
381 381 else:
382 382 published = repo.revs(b'::%ln - public()', pushop.revs)
383 383 if published:
384 384 if behavior == b'warn':
385 385 ui.warn(
386 386 _(b'%i changesets about to be published\n') % len(published)
387 387 )
388 388 elif behavior == b'confirm':
389 389 if ui.promptchoice(
390 390 _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No')
391 391 % len(published)
392 392 ):
393 393 raise error.Abort(_(b'user quit'))
394 394 elif behavior == b'abort':
395 395 msg = _(b'push would publish %i changesets') % len(published)
396 396 hint = _(
397 397 b"use --publish or adjust 'experimental.auto-publish'"
398 398 b" config"
399 399 )
400 400 raise error.Abort(msg, hint=hint)
401 401
402 402
403 403 def _forcebundle1(op):
404 404 """return true if a pull/push must use bundle1
405 405
406 406 This function is used to allow testing of the older bundle version"""
407 407 ui = op.repo.ui
408 408 # The goal is this config is to allow developer to choose the bundle
409 409 # version used during exchanged. This is especially handy during test.
410 410 # Value is a list of bundle version to be picked from, highest version
411 411 # should be used.
412 412 #
413 413 # developer config: devel.legacy.exchange
414 414 exchange = ui.configlist(b'devel', b'legacy.exchange')
415 415 forcebundle1 = b'bundle2' not in exchange and b'bundle1' in exchange
416 416 return forcebundle1 or not op.remote.capable(b'bundle2')
417 417
418 418
419 419 class pushoperation(object):
420 420 """A object that represent a single push operation
421 421
422 422 Its purpose is to carry push related state and very common operations.
423 423
424 424 A new pushoperation should be created at the beginning of each push and
425 425 discarded afterward.
426 426 """
427 427
428 428 def __init__(
429 429 self,
430 430 repo,
431 431 remote,
432 432 force=False,
433 433 revs=None,
434 434 newbranch=False,
435 435 bookmarks=(),
436 436 publish=False,
437 437 pushvars=None,
438 438 ):
439 439 # repo we push from
440 440 self.repo = repo
441 441 self.ui = repo.ui
442 442 # repo we push to
443 443 self.remote = remote
444 444 # force option provided
445 445 self.force = force
446 446 # revs to be pushed (None is "all")
447 447 self.revs = revs
448 448 # bookmark explicitly pushed
449 449 self.bookmarks = bookmarks
450 450 # allow push of new branch
451 451 self.newbranch = newbranch
452 452 # step already performed
453 453 # (used to check what steps have been already performed through bundle2)
454 454 self.stepsdone = set()
455 455 # Integer version of the changegroup push result
456 456 # - None means nothing to push
457 457 # - 0 means HTTP error
458 458 # - 1 means we pushed and remote head count is unchanged *or*
459 459 # we have outgoing changesets but refused to push
460 460 # - other values as described by addchangegroup()
461 461 self.cgresult = None
462 462 # Boolean value for the bookmark push
463 463 self.bkresult = None
464 464 # discover.outgoing object (contains common and outgoing data)
465 465 self.outgoing = None
466 466 # all remote topological heads before the push
467 467 self.remoteheads = None
468 468 # Details of the remote branch pre and post push
469 469 #
470 470 # mapping: {'branch': ([remoteheads],
471 471 # [newheads],
472 472 # [unsyncedheads],
473 473 # [discardedheads])}
474 474 # - branch: the branch name
475 475 # - remoteheads: the list of remote heads known locally
476 476 # None if the branch is new
477 477 # - newheads: the new remote heads (known locally) with outgoing pushed
478 478 # - unsyncedheads: the list of remote heads unknown locally.
479 479 # - discardedheads: the list of remote heads made obsolete by the push
480 480 self.pushbranchmap = None
481 481 # testable as a boolean indicating if any nodes are missing locally.
482 482 self.incoming = None
483 483 # summary of the remote phase situation
484 484 self.remotephases = None
485 485 # phases changes that must be pushed along side the changesets
486 486 self.outdatedphases = None
487 487 # phases changes that must be pushed if changeset push fails
488 488 self.fallbackoutdatedphases = None
489 489 # outgoing obsmarkers
490 490 self.outobsmarkers = set()
491 491 # outgoing bookmarks, list of (bm, oldnode | '', newnode | '')
492 492 self.outbookmarks = []
493 493 # transaction manager
494 494 self.trmanager = None
495 495 # map { pushkey partid -> callback handling failure}
496 496 # used to handle exception from mandatory pushkey part failure
497 497 self.pkfailcb = {}
498 498 # an iterable of pushvars or None
499 499 self.pushvars = pushvars
500 500 # publish pushed changesets
501 501 self.publish = publish
502 502
503 503 @util.propertycache
504 504 def futureheads(self):
505 505 """future remote heads if the changeset push succeeds"""
506 506 return self.outgoing.ancestorsof
507 507
508 508 @util.propertycache
509 509 def fallbackheads(self):
510 510 """future remote heads if the changeset push fails"""
511 511 if self.revs is None:
512 512 # not target to push, all common are relevant
513 513 return self.outgoing.commonheads
514 514 unfi = self.repo.unfiltered()
515 515 # I want cheads = heads(::ancestorsof and ::commonheads)
516 516 # (ancestorsof is revs with secret changeset filtered out)
517 517 #
518 518 # This can be expressed as:
519 519 # cheads = ( (ancestorsof and ::commonheads)
520 520 # + (commonheads and ::ancestorsof))"
521 521 # )
522 522 #
523 523 # while trying to push we already computed the following:
524 524 # common = (::commonheads)
525 525 # missing = ((commonheads::ancestorsof) - commonheads)
526 526 #
527 527 # We can pick:
528 528 # * ancestorsof part of common (::commonheads)
529 529 common = self.outgoing.common
530 530 rev = self.repo.changelog.index.rev
531 531 cheads = [node for node in self.revs if rev(node) in common]
532 532 # and
533 533 # * commonheads parents on missing
534 534 revset = unfi.set(
535 535 b'%ln and parents(roots(%ln))',
536 536 self.outgoing.commonheads,
537 537 self.outgoing.missing,
538 538 )
539 539 cheads.extend(c.node() for c in revset)
540 540 return cheads
541 541
542 542 @property
543 543 def commonheads(self):
544 544 """set of all common heads after changeset bundle push"""
545 545 if self.cgresult:
546 546 return self.futureheads
547 547 else:
548 548 return self.fallbackheads
549 549
550 550
551 551 # mapping of message used when pushing bookmark
552 552 bookmsgmap = {
553 553 b'update': (
554 554 _(b"updating bookmark %s\n"),
555 555 _(b'updating bookmark %s failed!\n'),
556 556 ),
557 557 b'export': (
558 558 _(b"exporting bookmark %s\n"),
559 559 _(b'exporting bookmark %s failed!\n'),
560 560 ),
561 561 b'delete': (
562 562 _(b"deleting remote bookmark %s\n"),
563 563 _(b'deleting remote bookmark %s failed!\n'),
564 564 ),
565 565 }
566 566
567 567
568 568 def push(
569 569 repo,
570 570 remote,
571 571 force=False,
572 572 revs=None,
573 573 newbranch=False,
574 574 bookmarks=(),
575 575 publish=False,
576 576 opargs=None,
577 577 ):
578 578 '''Push outgoing changesets (limited by revs) from a local
579 579 repository to remote. Return an integer:
580 580 - None means nothing to push
581 581 - 0 means HTTP error
582 582 - 1 means we pushed and remote head count is unchanged *or*
583 583 we have outgoing changesets but refused to push
584 584 - other values as described by addchangegroup()
585 585 '''
586 586 if opargs is None:
587 587 opargs = {}
588 588 pushop = pushoperation(
589 589 repo,
590 590 remote,
591 591 force,
592 592 revs,
593 593 newbranch,
594 594 bookmarks,
595 595 publish,
596 596 **pycompat.strkwargs(opargs)
597 597 )
598 598 if pushop.remote.local():
599 599 missing = (
600 600 set(pushop.repo.requirements) - pushop.remote.local().supported
601 601 )
602 602 if missing:
603 603 msg = _(
604 604 b"required features are not"
605 605 b" supported in the destination:"
606 606 b" %s"
607 607 ) % (b', '.join(sorted(missing)))
608 608 raise error.Abort(msg)
609 609
610 610 if not pushop.remote.canpush():
611 611 raise error.Abort(_(b"destination does not support push"))
612 612
613 613 if not pushop.remote.capable(b'unbundle'):
614 614 raise error.Abort(
615 615 _(
616 616 b'cannot push: destination does not support the '
617 617 b'unbundle wire protocol command'
618 618 )
619 619 )
620 620
621 621 # get lock as we might write phase data
622 622 wlock = lock = None
623 623 try:
624 624 # bundle2 push may receive a reply bundle touching bookmarks
625 625 # requiring the wlock. Take it now to ensure proper ordering.
626 626 maypushback = pushop.ui.configbool(b'experimental', b'bundle2.pushback')
627 627 if (
628 628 (not _forcebundle1(pushop))
629 629 and maypushback
630 630 and not bookmod.bookmarksinstore(repo)
631 631 ):
632 632 wlock = pushop.repo.wlock()
633 633 lock = pushop.repo.lock()
634 634 pushop.trmanager = transactionmanager(
635 635 pushop.repo, b'push-response', pushop.remote.url()
636 636 )
637 637 except error.LockUnavailable as err:
638 638 # source repo cannot be locked.
639 639 # We do not abort the push, but just disable the local phase
640 640 # synchronisation.
641 641 msg = b'cannot lock source repository: %s\n' % stringutil.forcebytestr(
642 642 err
643 643 )
644 644 pushop.ui.debug(msg)
645 645
646 646 with wlock or util.nullcontextmanager():
647 647 with lock or util.nullcontextmanager():
648 648 with pushop.trmanager or util.nullcontextmanager():
649 649 pushop.repo.checkpush(pushop)
650 650 _checkpublish(pushop)
651 651 _pushdiscovery(pushop)
652 652 if not pushop.force:
653 653 _checksubrepostate(pushop)
654 654 if not _forcebundle1(pushop):
655 655 _pushbundle2(pushop)
656 656 _pushchangeset(pushop)
657 657 _pushsyncphase(pushop)
658 658 _pushobsolete(pushop)
659 659 _pushbookmark(pushop)
660 660
661 661 if repo.ui.configbool(b'experimental', b'remotenames'):
662 662 logexchange.pullremotenames(repo, remote)
663 663
664 664 return pushop
665 665
666 666
667 667 # list of steps to perform discovery before push
668 668 pushdiscoveryorder = []
669 669
670 670 # Mapping between step name and function
671 671 #
672 672 # This exists to help extensions wrap steps if necessary
673 673 pushdiscoverymapping = {}
674 674
675 675
676 676 def pushdiscovery(stepname):
677 677 """decorator for function performing discovery before push
678 678
679 679 The function is added to the step -> function mapping and appended to the
680 680 list of steps. Beware that decorated function will be added in order (this
681 681 may matter).
682 682
683 683 You can only use this decorator for a new step, if you want to wrap a step
684 684 from an extension, change the pushdiscovery dictionary directly."""
685 685
686 686 def dec(func):
687 687 assert stepname not in pushdiscoverymapping
688 688 pushdiscoverymapping[stepname] = func
689 689 pushdiscoveryorder.append(stepname)
690 690 return func
691 691
692 692 return dec
693 693
694 694
695 695 def _pushdiscovery(pushop):
696 696 """Run all discovery steps"""
697 697 for stepname in pushdiscoveryorder:
698 698 step = pushdiscoverymapping[stepname]
699 699 step(pushop)
700 700
701 701
702 702 def _checksubrepostate(pushop):
703 703 """Ensure all outgoing referenced subrepo revisions are present locally"""
704 704 for n in pushop.outgoing.missing:
705 705 ctx = pushop.repo[n]
706 706
707 707 if b'.hgsub' in ctx.manifest() and b'.hgsubstate' in ctx.files():
708 708 for subpath in sorted(ctx.substate):
709 709 sub = ctx.sub(subpath)
710 710 sub.verify(onpush=True)
711 711
712 712
713 713 @pushdiscovery(b'changeset')
714 714 def _pushdiscoverychangeset(pushop):
715 715 """discover the changeset that need to be pushed"""
716 716 fci = discovery.findcommonincoming
717 717 if pushop.revs:
718 718 commoninc = fci(
719 719 pushop.repo,
720 720 pushop.remote,
721 721 force=pushop.force,
722 722 ancestorsof=pushop.revs,
723 723 )
724 724 else:
725 725 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
726 726 common, inc, remoteheads = commoninc
727 727 fco = discovery.findcommonoutgoing
728 728 outgoing = fco(
729 729 pushop.repo,
730 730 pushop.remote,
731 731 onlyheads=pushop.revs,
732 732 commoninc=commoninc,
733 733 force=pushop.force,
734 734 )
735 735 pushop.outgoing = outgoing
736 736 pushop.remoteheads = remoteheads
737 737 pushop.incoming = inc
738 738
739 739
740 740 @pushdiscovery(b'phase')
741 741 def _pushdiscoveryphase(pushop):
742 742 """discover the phase that needs to be pushed
743 743
744 744 (computed for both success and failure case for changesets push)"""
745 745 outgoing = pushop.outgoing
746 746 unfi = pushop.repo.unfiltered()
747 747 remotephases = listkeys(pushop.remote, b'phases')
748 748
749 749 if (
750 750 pushop.ui.configbool(b'ui', b'_usedassubrepo')
751 751 and remotephases # server supports phases
752 752 and not pushop.outgoing.missing # no changesets to be pushed
753 753 and remotephases.get(b'publishing', False)
754 754 ):
755 755 # When:
756 756 # - this is a subrepo push
757 757 # - and remote support phase
758 758 # - and no changeset are to be pushed
759 759 # - and remote is publishing
760 760 # We may be in issue 3781 case!
761 761 # We drop the possible phase synchronisation done by
762 762 # courtesy to publish changesets possibly locally draft
763 763 # on the remote.
764 764 pushop.outdatedphases = []
765 765 pushop.fallbackoutdatedphases = []
766 766 return
767 767
768 768 pushop.remotephases = phases.remotephasessummary(
769 769 pushop.repo, pushop.fallbackheads, remotephases
770 770 )
771 771 droots = pushop.remotephases.draftroots
772 772
773 773 extracond = b''
774 774 if not pushop.remotephases.publishing:
775 775 extracond = b' and public()'
776 776 revset = b'heads((%%ln::%%ln) %s)' % extracond
777 777 # Get the list of all revs draft on remote by public here.
778 778 # XXX Beware that revset break if droots is not strictly
779 779 # XXX root we may want to ensure it is but it is costly
780 780 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
781 781 if not pushop.remotephases.publishing and pushop.publish:
782 782 future = list(
783 783 unfi.set(
784 784 b'%ln and (not public() or %ln::)', pushop.futureheads, droots
785 785 )
786 786 )
787 787 elif not outgoing.missing:
788 788 future = fallback
789 789 else:
790 790 # adds changeset we are going to push as draft
791 791 #
792 792 # should not be necessary for publishing server, but because of an
793 793 # issue fixed in xxxxx we have to do it anyway.
794 794 fdroots = list(
795 795 unfi.set(b'roots(%ln + %ln::)', outgoing.missing, droots)
796 796 )
797 797 fdroots = [f.node() for f in fdroots]
798 798 future = list(unfi.set(revset, fdroots, pushop.futureheads))
799 799 pushop.outdatedphases = future
800 800 pushop.fallbackoutdatedphases = fallback
801 801
802 802
803 803 @pushdiscovery(b'obsmarker')
804 804 def _pushdiscoveryobsmarkers(pushop):
805 805 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
806 806 return
807 807
808 808 if not pushop.repo.obsstore:
809 809 return
810 810
811 811 if b'obsolete' not in listkeys(pushop.remote, b'namespaces'):
812 812 return
813 813
814 814 repo = pushop.repo
815 815 # very naive computation, that can be quite expensive on big repo.
816 816 # However: evolution is currently slow on them anyway.
817 817 nodes = (c.node() for c in repo.set(b'::%ln', pushop.futureheads))
818 818 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
819 819
820 820
821 821 @pushdiscovery(b'bookmarks')
822 822 def _pushdiscoverybookmarks(pushop):
823 823 ui = pushop.ui
824 824 repo = pushop.repo.unfiltered()
825 825 remote = pushop.remote
826 826 ui.debug(b"checking for updated bookmarks\n")
827 827 ancestors = ()
828 828 if pushop.revs:
829 829 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
830 830 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
831 831
832 832 remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, b'bookmarks'))
833 833
834 834 explicit = {
835 835 repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
836 836 }
837 837
838 838 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
839 839 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
840 840
841 841
842 842 def _processcompared(pushop, pushed, explicit, remotebms, comp):
843 843 """take decision on bookmarks to push to the remote repo
844 844
845 845 Exists to help extensions alter this behavior.
846 846 """
847 847 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
848 848
849 849 repo = pushop.repo
850 850
851 851 for b, scid, dcid in advsrc:
852 852 if b in explicit:
853 853 explicit.remove(b)
854 854 if not pushed or repo[scid].rev() in pushed:
855 855 pushop.outbookmarks.append((b, dcid, scid))
856 856 # search added bookmark
857 857 for b, scid, dcid in addsrc:
858 858 if b in explicit:
859 859 explicit.remove(b)
860 860 if bookmod.isdivergent(b):
861 861 pushop.ui.warn(_(b'cannot push divergent bookmark %s!\n') % b)
862 862 pushop.bkresult = 2
863 863 else:
864 864 pushop.outbookmarks.append((b, b'', scid))
865 865 # search for overwritten bookmark
866 866 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
867 867 if b in explicit:
868 868 explicit.remove(b)
869 869 pushop.outbookmarks.append((b, dcid, scid))
870 870 # search for bookmark to delete
871 871 for b, scid, dcid in adddst:
872 872 if b in explicit:
873 873 explicit.remove(b)
874 874 # treat as "deleted locally"
875 875 pushop.outbookmarks.append((b, dcid, b''))
876 876 # identical bookmarks shouldn't get reported
877 877 for b, scid, dcid in same:
878 878 if b in explicit:
879 879 explicit.remove(b)
880 880
881 881 if explicit:
882 882 explicit = sorted(explicit)
883 883 # we should probably list all of them
884 884 pushop.ui.warn(
885 885 _(
886 886 b'bookmark %s does not exist on the local '
887 887 b'or remote repository!\n'
888 888 )
889 889 % explicit[0]
890 890 )
891 891 pushop.bkresult = 2
892 892
893 893 pushop.outbookmarks.sort()
894 894
895 895
896 896 def _pushcheckoutgoing(pushop):
897 897 outgoing = pushop.outgoing
898 898 unfi = pushop.repo.unfiltered()
899 899 if not outgoing.missing:
900 900 # nothing to push
901 901 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
902 902 return False
903 903 # something to push
904 904 if not pushop.force:
905 905 # if repo.obsstore == False --> no obsolete
906 906 # then, save the iteration
907 907 if unfi.obsstore:
908 # this message are here for 80 char limit reason
909 mso = _(b"push includes obsolete changeset: %s!")
910 mspd = _(b"push includes phase-divergent changeset: %s!")
911 mscd = _(b"push includes content-divergent changeset: %s!")
912 mst = {
913 b"orphan": _(b"push includes orphan changeset: %s!"),
914 b"phase-divergent": mspd,
915 b"content-divergent": mscd,
916 }
917 # If we are to push if there is at least one
918 # obsolete or unstable changeset in missing, at
919 # least one of the missinghead will be obsolete or
920 # unstable. So checking heads only is ok
921 for node in outgoing.ancestorsof:
908 obsoletes = []
909 unstables = []
910 for node in outgoing.missing:
922 911 ctx = unfi[node]
923 912 if ctx.obsolete():
924 raise error.Abort(mso % ctx)
913 obsoletes.append(ctx)
925 914 elif ctx.isunstable():
926 # TODO print more than one instability in the abort
927 # message
928 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
915 unstables.append(ctx)
916 if obsoletes or unstables:
917 msg = b""
918 if obsoletes:
919 msg += _(b"push includes obsolete changesets:\n")
920 msg += b"\n".join(b' %s' % ctx for ctx in obsoletes)
921 if unstables:
922 if msg:
923 msg += b"\n"
924 msg += _(b"push includes unstable changesets:\n")
925 msg += b"\n".join(
926 b' %s (%s)'
927 % (
928 ctx,
929 b", ".join(_(ins) for ins in ctx.instabilities()),
930 )
931 for ctx in unstables
932 )
933 raise error.Abort(msg)
929 934
930 935 discovery.checkheads(pushop)
931 936 return True
932 937
933 938
934 939 # List of names of steps to perform for an outgoing bundle2, order matters.
935 940 b2partsgenorder = []
936 941
937 942 # Mapping between step name and function
938 943 #
939 944 # This exists to help extensions wrap steps if necessary
940 945 b2partsgenmapping = {}
941 946
942 947
943 948 def b2partsgenerator(stepname, idx=None):
944 949 """decorator for function generating bundle2 part
945 950
946 951 The function is added to the step -> function mapping and appended to the
947 952 list of steps. Beware that decorated functions will be added in order
948 953 (this may matter).
949 954
950 955 You can only use this decorator for new steps, if you want to wrap a step
951 956 from an extension, attack the b2partsgenmapping dictionary directly."""
952 957
953 958 def dec(func):
954 959 assert stepname not in b2partsgenmapping
955 960 b2partsgenmapping[stepname] = func
956 961 if idx is None:
957 962 b2partsgenorder.append(stepname)
958 963 else:
959 964 b2partsgenorder.insert(idx, stepname)
960 965 return func
961 966
962 967 return dec
963 968
964 969
965 970 def _pushb2ctxcheckheads(pushop, bundler):
966 971 """Generate race condition checking parts
967 972
968 973 Exists as an independent function to aid extensions
969 974 """
970 975 # * 'force' do not check for push race,
971 976 # * if we don't push anything, there are nothing to check.
972 977 if not pushop.force and pushop.outgoing.ancestorsof:
973 978 allowunrelated = b'related' in bundler.capabilities.get(
974 979 b'checkheads', ()
975 980 )
976 981 emptyremote = pushop.pushbranchmap is None
977 982 if not allowunrelated or emptyremote:
978 983 bundler.newpart(b'check:heads', data=iter(pushop.remoteheads))
979 984 else:
980 985 affected = set()
981 986 for branch, heads in pycompat.iteritems(pushop.pushbranchmap):
982 987 remoteheads, newheads, unsyncedheads, discardedheads = heads
983 988 if remoteheads is not None:
984 989 remote = set(remoteheads)
985 990 affected |= set(discardedheads) & remote
986 991 affected |= remote - set(newheads)
987 992 if affected:
988 993 data = iter(sorted(affected))
989 994 bundler.newpart(b'check:updated-heads', data=data)
990 995
991 996
992 997 def _pushing(pushop):
993 998 """return True if we are pushing anything"""
994 999 return bool(
995 1000 pushop.outgoing.missing
996 1001 or pushop.outdatedphases
997 1002 or pushop.outobsmarkers
998 1003 or pushop.outbookmarks
999 1004 )
1000 1005
1001 1006
1002 1007 @b2partsgenerator(b'check-bookmarks')
1003 1008 def _pushb2checkbookmarks(pushop, bundler):
1004 1009 """insert bookmark move checking"""
1005 1010 if not _pushing(pushop) or pushop.force:
1006 1011 return
1007 1012 b2caps = bundle2.bundle2caps(pushop.remote)
1008 1013 hasbookmarkcheck = b'bookmarks' in b2caps
1009 1014 if not (pushop.outbookmarks and hasbookmarkcheck):
1010 1015 return
1011 1016 data = []
1012 1017 for book, old, new in pushop.outbookmarks:
1013 1018 data.append((book, old))
1014 1019 checkdata = bookmod.binaryencode(data)
1015 1020 bundler.newpart(b'check:bookmarks', data=checkdata)
1016 1021
1017 1022
1018 1023 @b2partsgenerator(b'check-phases')
1019 1024 def _pushb2checkphases(pushop, bundler):
1020 1025 """insert phase move checking"""
1021 1026 if not _pushing(pushop) or pushop.force:
1022 1027 return
1023 1028 b2caps = bundle2.bundle2caps(pushop.remote)
1024 1029 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
1025 1030 if pushop.remotephases is not None and hasphaseheads:
1026 1031 # check that the remote phase has not changed
1027 1032 checks = {p: [] for p in phases.allphases}
1028 1033 checks[phases.public].extend(pushop.remotephases.publicheads)
1029 1034 checks[phases.draft].extend(pushop.remotephases.draftroots)
1030 1035 if any(pycompat.itervalues(checks)):
1031 1036 for phase in checks:
1032 1037 checks[phase].sort()
1033 1038 checkdata = phases.binaryencode(checks)
1034 1039 bundler.newpart(b'check:phases', data=checkdata)
1035 1040
1036 1041
1037 1042 @b2partsgenerator(b'changeset')
1038 1043 def _pushb2ctx(pushop, bundler):
1039 1044 """handle changegroup push through bundle2
1040 1045
1041 1046 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
1042 1047 """
1043 1048 if b'changesets' in pushop.stepsdone:
1044 1049 return
1045 1050 pushop.stepsdone.add(b'changesets')
1046 1051 # Send known heads to the server for race detection.
1047 1052 if not _pushcheckoutgoing(pushop):
1048 1053 return
1049 1054 pushop.repo.prepushoutgoinghooks(pushop)
1050 1055
1051 1056 _pushb2ctxcheckheads(pushop, bundler)
1052 1057
1053 1058 b2caps = bundle2.bundle2caps(pushop.remote)
1054 1059 version = b'01'
1055 1060 cgversions = b2caps.get(b'changegroup')
1056 1061 if cgversions: # 3.1 and 3.2 ship with an empty value
1057 1062 cgversions = [
1058 1063 v
1059 1064 for v in cgversions
1060 1065 if v in changegroup.supportedoutgoingversions(pushop.repo)
1061 1066 ]
1062 1067 if not cgversions:
1063 1068 raise error.Abort(_(b'no common changegroup version'))
1064 1069 version = max(cgversions)
1065 1070 cgstream = changegroup.makestream(
1066 1071 pushop.repo, pushop.outgoing, version, b'push'
1067 1072 )
1068 1073 cgpart = bundler.newpart(b'changegroup', data=cgstream)
1069 1074 if cgversions:
1070 1075 cgpart.addparam(b'version', version)
1071 1076 if b'treemanifest' in pushop.repo.requirements:
1072 1077 cgpart.addparam(b'treemanifest', b'1')
1073 1078 if b'exp-sidedata-flag' in pushop.repo.requirements:
1074 1079 cgpart.addparam(b'exp-sidedata', b'1')
1075 1080
1076 1081 def handlereply(op):
1077 1082 """extract addchangegroup returns from server reply"""
1078 1083 cgreplies = op.records.getreplies(cgpart.id)
1079 1084 assert len(cgreplies[b'changegroup']) == 1
1080 1085 pushop.cgresult = cgreplies[b'changegroup'][0][b'return']
1081 1086
1082 1087 return handlereply
1083 1088
1084 1089
1085 1090 @b2partsgenerator(b'phase')
1086 1091 def _pushb2phases(pushop, bundler):
1087 1092 """handle phase push through bundle2"""
1088 1093 if b'phases' in pushop.stepsdone:
1089 1094 return
1090 1095 b2caps = bundle2.bundle2caps(pushop.remote)
1091 1096 ui = pushop.repo.ui
1092 1097
1093 1098 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1094 1099 haspushkey = b'pushkey' in b2caps
1095 1100 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
1096 1101
1097 1102 if hasphaseheads and not legacyphase:
1098 1103 return _pushb2phaseheads(pushop, bundler)
1099 1104 elif haspushkey:
1100 1105 return _pushb2phasespushkey(pushop, bundler)
1101 1106
1102 1107
1103 1108 def _pushb2phaseheads(pushop, bundler):
1104 1109 """push phase information through a bundle2 - binary part"""
1105 1110 pushop.stepsdone.add(b'phases')
1106 1111 if pushop.outdatedphases:
1107 1112 updates = {p: [] for p in phases.allphases}
1108 1113 updates[0].extend(h.node() for h in pushop.outdatedphases)
1109 1114 phasedata = phases.binaryencode(updates)
1110 1115 bundler.newpart(b'phase-heads', data=phasedata)
1111 1116
1112 1117
1113 1118 def _pushb2phasespushkey(pushop, bundler):
1114 1119 """push phase information through a bundle2 - pushkey part"""
1115 1120 pushop.stepsdone.add(b'phases')
1116 1121 part2node = []
1117 1122
1118 1123 def handlefailure(pushop, exc):
1119 1124 targetid = int(exc.partid)
1120 1125 for partid, node in part2node:
1121 1126 if partid == targetid:
1122 1127 raise error.Abort(_(b'updating %s to public failed') % node)
1123 1128
1124 1129 enc = pushkey.encode
1125 1130 for newremotehead in pushop.outdatedphases:
1126 1131 part = bundler.newpart(b'pushkey')
1127 1132 part.addparam(b'namespace', enc(b'phases'))
1128 1133 part.addparam(b'key', enc(newremotehead.hex()))
1129 1134 part.addparam(b'old', enc(b'%d' % phases.draft))
1130 1135 part.addparam(b'new', enc(b'%d' % phases.public))
1131 1136 part2node.append((part.id, newremotehead))
1132 1137 pushop.pkfailcb[part.id] = handlefailure
1133 1138
1134 1139 def handlereply(op):
1135 1140 for partid, node in part2node:
1136 1141 partrep = op.records.getreplies(partid)
1137 1142 results = partrep[b'pushkey']
1138 1143 assert len(results) <= 1
1139 1144 msg = None
1140 1145 if not results:
1141 1146 msg = _(b'server ignored update of %s to public!\n') % node
1142 1147 elif not int(results[0][b'return']):
1143 1148 msg = _(b'updating %s to public failed!\n') % node
1144 1149 if msg is not None:
1145 1150 pushop.ui.warn(msg)
1146 1151
1147 1152 return handlereply
1148 1153
1149 1154
1150 1155 @b2partsgenerator(b'obsmarkers')
1151 1156 def _pushb2obsmarkers(pushop, bundler):
1152 1157 if b'obsmarkers' in pushop.stepsdone:
1153 1158 return
1154 1159 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
1155 1160 if obsolete.commonversion(remoteversions) is None:
1156 1161 return
1157 1162 pushop.stepsdone.add(b'obsmarkers')
1158 1163 if pushop.outobsmarkers:
1159 1164 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1160 1165 bundle2.buildobsmarkerspart(bundler, markers)
1161 1166
1162 1167
1163 1168 @b2partsgenerator(b'bookmarks')
1164 1169 def _pushb2bookmarks(pushop, bundler):
1165 1170 """handle bookmark push through bundle2"""
1166 1171 if b'bookmarks' in pushop.stepsdone:
1167 1172 return
1168 1173 b2caps = bundle2.bundle2caps(pushop.remote)
1169 1174
1170 1175 legacy = pushop.repo.ui.configlist(b'devel', b'legacy.exchange')
1171 1176 legacybooks = b'bookmarks' in legacy
1172 1177
1173 1178 if not legacybooks and b'bookmarks' in b2caps:
1174 1179 return _pushb2bookmarkspart(pushop, bundler)
1175 1180 elif b'pushkey' in b2caps:
1176 1181 return _pushb2bookmarkspushkey(pushop, bundler)
1177 1182
1178 1183
1179 1184 def _bmaction(old, new):
1180 1185 """small utility for bookmark pushing"""
1181 1186 if not old:
1182 1187 return b'export'
1183 1188 elif not new:
1184 1189 return b'delete'
1185 1190 return b'update'
1186 1191
1187 1192
1188 1193 def _abortonsecretctx(pushop, node, b):
1189 1194 """abort if a given bookmark points to a secret changeset"""
1190 1195 if node and pushop.repo[node].phase() == phases.secret:
1191 1196 raise error.Abort(
1192 1197 _(b'cannot push bookmark %s as it points to a secret changeset') % b
1193 1198 )
1194 1199
1195 1200
1196 1201 def _pushb2bookmarkspart(pushop, bundler):
1197 1202 pushop.stepsdone.add(b'bookmarks')
1198 1203 if not pushop.outbookmarks:
1199 1204 return
1200 1205
1201 1206 allactions = []
1202 1207 data = []
1203 1208 for book, old, new in pushop.outbookmarks:
1204 1209 _abortonsecretctx(pushop, new, book)
1205 1210 data.append((book, new))
1206 1211 allactions.append((book, _bmaction(old, new)))
1207 1212 checkdata = bookmod.binaryencode(data)
1208 1213 bundler.newpart(b'bookmarks', data=checkdata)
1209 1214
1210 1215 def handlereply(op):
1211 1216 ui = pushop.ui
1212 1217 # if success
1213 1218 for book, action in allactions:
1214 1219 ui.status(bookmsgmap[action][0] % book)
1215 1220
1216 1221 return handlereply
1217 1222
1218 1223
1219 1224 def _pushb2bookmarkspushkey(pushop, bundler):
1220 1225 pushop.stepsdone.add(b'bookmarks')
1221 1226 part2book = []
1222 1227 enc = pushkey.encode
1223 1228
1224 1229 def handlefailure(pushop, exc):
1225 1230 targetid = int(exc.partid)
1226 1231 for partid, book, action in part2book:
1227 1232 if partid == targetid:
1228 1233 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1229 1234 # we should not be called for part we did not generated
1230 1235 assert False
1231 1236
1232 1237 for book, old, new in pushop.outbookmarks:
1233 1238 _abortonsecretctx(pushop, new, book)
1234 1239 part = bundler.newpart(b'pushkey')
1235 1240 part.addparam(b'namespace', enc(b'bookmarks'))
1236 1241 part.addparam(b'key', enc(book))
1237 1242 part.addparam(b'old', enc(hex(old)))
1238 1243 part.addparam(b'new', enc(hex(new)))
1239 1244 action = b'update'
1240 1245 if not old:
1241 1246 action = b'export'
1242 1247 elif not new:
1243 1248 action = b'delete'
1244 1249 part2book.append((part.id, book, action))
1245 1250 pushop.pkfailcb[part.id] = handlefailure
1246 1251
1247 1252 def handlereply(op):
1248 1253 ui = pushop.ui
1249 1254 for partid, book, action in part2book:
1250 1255 partrep = op.records.getreplies(partid)
1251 1256 results = partrep[b'pushkey']
1252 1257 assert len(results) <= 1
1253 1258 if not results:
1254 1259 pushop.ui.warn(_(b'server ignored bookmark %s update\n') % book)
1255 1260 else:
1256 1261 ret = int(results[0][b'return'])
1257 1262 if ret:
1258 1263 ui.status(bookmsgmap[action][0] % book)
1259 1264 else:
1260 1265 ui.warn(bookmsgmap[action][1] % book)
1261 1266 if pushop.bkresult is not None:
1262 1267 pushop.bkresult = 1
1263 1268
1264 1269 return handlereply
1265 1270
1266 1271
1267 1272 @b2partsgenerator(b'pushvars', idx=0)
1268 1273 def _getbundlesendvars(pushop, bundler):
1269 1274 '''send shellvars via bundle2'''
1270 1275 pushvars = pushop.pushvars
1271 1276 if pushvars:
1272 1277 shellvars = {}
1273 1278 for raw in pushvars:
1274 1279 if b'=' not in raw:
1275 1280 msg = (
1276 1281 b"unable to parse variable '%s', should follow "
1277 1282 b"'KEY=VALUE' or 'KEY=' format"
1278 1283 )
1279 1284 raise error.Abort(msg % raw)
1280 1285 k, v = raw.split(b'=', 1)
1281 1286 shellvars[k] = v
1282 1287
1283 1288 part = bundler.newpart(b'pushvars')
1284 1289
1285 1290 for key, value in pycompat.iteritems(shellvars):
1286 1291 part.addparam(key, value, mandatory=False)
1287 1292
1288 1293
1289 1294 def _pushbundle2(pushop):
1290 1295 """push data to the remote using bundle2
1291 1296
1292 1297 The only currently supported type of data is changegroup but this will
1293 1298 evolve in the future."""
1294 1299 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1295 1300 pushback = pushop.trmanager and pushop.ui.configbool(
1296 1301 b'experimental', b'bundle2.pushback'
1297 1302 )
1298 1303
1299 1304 # create reply capability
1300 1305 capsblob = bundle2.encodecaps(
1301 1306 bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role=b'client')
1302 1307 )
1303 1308 bundler.newpart(b'replycaps', data=capsblob)
1304 1309 replyhandlers = []
1305 1310 for partgenname in b2partsgenorder:
1306 1311 partgen = b2partsgenmapping[partgenname]
1307 1312 ret = partgen(pushop, bundler)
1308 1313 if callable(ret):
1309 1314 replyhandlers.append(ret)
1310 1315 # do not push if nothing to push
1311 1316 if bundler.nbparts <= 1:
1312 1317 return
1313 1318 stream = util.chunkbuffer(bundler.getchunks())
1314 1319 try:
1315 1320 try:
1316 1321 with pushop.remote.commandexecutor() as e:
1317 1322 reply = e.callcommand(
1318 1323 b'unbundle',
1319 1324 {
1320 1325 b'bundle': stream,
1321 1326 b'heads': [b'force'],
1322 1327 b'url': pushop.remote.url(),
1323 1328 },
1324 1329 ).result()
1325 1330 except error.BundleValueError as exc:
1326 1331 raise error.Abort(_(b'missing support for %s') % exc)
1327 1332 try:
1328 1333 trgetter = None
1329 1334 if pushback:
1330 1335 trgetter = pushop.trmanager.transaction
1331 1336 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1332 1337 except error.BundleValueError as exc:
1333 1338 raise error.Abort(_(b'missing support for %s') % exc)
1334 1339 except bundle2.AbortFromPart as exc:
1335 1340 pushop.ui.status(_(b'remote: %s\n') % exc)
1336 1341 if exc.hint is not None:
1337 1342 pushop.ui.status(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
1338 1343 raise error.Abort(_(b'push failed on remote'))
1339 1344 except error.PushkeyFailed as exc:
1340 1345 partid = int(exc.partid)
1341 1346 if partid not in pushop.pkfailcb:
1342 1347 raise
1343 1348 pushop.pkfailcb[partid](pushop, exc)
1344 1349 for rephand in replyhandlers:
1345 1350 rephand(op)
1346 1351
1347 1352
1348 1353 def _pushchangeset(pushop):
1349 1354 """Make the actual push of changeset bundle to remote repo"""
1350 1355 if b'changesets' in pushop.stepsdone:
1351 1356 return
1352 1357 pushop.stepsdone.add(b'changesets')
1353 1358 if not _pushcheckoutgoing(pushop):
1354 1359 return
1355 1360
1356 1361 # Should have verified this in push().
1357 1362 assert pushop.remote.capable(b'unbundle')
1358 1363
1359 1364 pushop.repo.prepushoutgoinghooks(pushop)
1360 1365 outgoing = pushop.outgoing
1361 1366 # TODO: get bundlecaps from remote
1362 1367 bundlecaps = None
1363 1368 # create a changegroup from local
1364 1369 if pushop.revs is None and not (
1365 1370 outgoing.excluded or pushop.repo.changelog.filteredrevs
1366 1371 ):
1367 1372 # push everything,
1368 1373 # use the fast path, no race possible on push
1369 1374 cg = changegroup.makechangegroup(
1370 1375 pushop.repo,
1371 1376 outgoing,
1372 1377 b'01',
1373 1378 b'push',
1374 1379 fastpath=True,
1375 1380 bundlecaps=bundlecaps,
1376 1381 )
1377 1382 else:
1378 1383 cg = changegroup.makechangegroup(
1379 1384 pushop.repo, outgoing, b'01', b'push', bundlecaps=bundlecaps
1380 1385 )
1381 1386
1382 1387 # apply changegroup to remote
1383 1388 # local repo finds heads on server, finds out what
1384 1389 # revs it must push. once revs transferred, if server
1385 1390 # finds it has different heads (someone else won
1386 1391 # commit/push race), server aborts.
1387 1392 if pushop.force:
1388 1393 remoteheads = [b'force']
1389 1394 else:
1390 1395 remoteheads = pushop.remoteheads
1391 1396 # ssh: return remote's addchangegroup()
1392 1397 # http: return remote's addchangegroup() or 0 for error
1393 1398 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
1394 1399
1395 1400
1396 1401 def _pushsyncphase(pushop):
1397 1402 """synchronise phase information locally and remotely"""
1398 1403 cheads = pushop.commonheads
1399 1404 # even when we don't push, exchanging phase data is useful
1400 1405 remotephases = listkeys(pushop.remote, b'phases')
1401 1406 if (
1402 1407 pushop.ui.configbool(b'ui', b'_usedassubrepo')
1403 1408 and remotephases # server supports phases
1404 1409 and pushop.cgresult is None # nothing was pushed
1405 1410 and remotephases.get(b'publishing', False)
1406 1411 ):
1407 1412 # When:
1408 1413 # - this is a subrepo push
1409 1414 # - and remote support phase
1410 1415 # - and no changeset was pushed
1411 1416 # - and remote is publishing
1412 1417 # We may be in issue 3871 case!
1413 1418 # We drop the possible phase synchronisation done by
1414 1419 # courtesy to publish changesets possibly locally draft
1415 1420 # on the remote.
1416 1421 remotephases = {b'publishing': b'True'}
1417 1422 if not remotephases: # old server or public only reply from non-publishing
1418 1423 _localphasemove(pushop, cheads)
1419 1424 # don't push any phase data as there is nothing to push
1420 1425 else:
1421 1426 ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
1422 1427 pheads, droots = ana
1423 1428 ### Apply remote phase on local
1424 1429 if remotephases.get(b'publishing', False):
1425 1430 _localphasemove(pushop, cheads)
1426 1431 else: # publish = False
1427 1432 _localphasemove(pushop, pheads)
1428 1433 _localphasemove(pushop, cheads, phases.draft)
1429 1434 ### Apply local phase on remote
1430 1435
1431 1436 if pushop.cgresult:
1432 1437 if b'phases' in pushop.stepsdone:
1433 1438 # phases already pushed though bundle2
1434 1439 return
1435 1440 outdated = pushop.outdatedphases
1436 1441 else:
1437 1442 outdated = pushop.fallbackoutdatedphases
1438 1443
1439 1444 pushop.stepsdone.add(b'phases')
1440 1445
1441 1446 # filter heads already turned public by the push
1442 1447 outdated = [c for c in outdated if c.node() not in pheads]
1443 1448 # fallback to independent pushkey command
1444 1449 for newremotehead in outdated:
1445 1450 with pushop.remote.commandexecutor() as e:
1446 1451 r = e.callcommand(
1447 1452 b'pushkey',
1448 1453 {
1449 1454 b'namespace': b'phases',
1450 1455 b'key': newremotehead.hex(),
1451 1456 b'old': b'%d' % phases.draft,
1452 1457 b'new': b'%d' % phases.public,
1453 1458 },
1454 1459 ).result()
1455 1460
1456 1461 if not r:
1457 1462 pushop.ui.warn(
1458 1463 _(b'updating %s to public failed!\n') % newremotehead
1459 1464 )
1460 1465
1461 1466
1462 1467 def _localphasemove(pushop, nodes, phase=phases.public):
1463 1468 """move <nodes> to <phase> in the local source repo"""
1464 1469 if pushop.trmanager:
1465 1470 phases.advanceboundary(
1466 1471 pushop.repo, pushop.trmanager.transaction(), phase, nodes
1467 1472 )
1468 1473 else:
1469 1474 # repo is not locked, do not change any phases!
1470 1475 # Informs the user that phases should have been moved when
1471 1476 # applicable.
1472 1477 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1473 1478 phasestr = phases.phasenames[phase]
1474 1479 if actualmoves:
1475 1480 pushop.ui.status(
1476 1481 _(
1477 1482 b'cannot lock source repo, skipping '
1478 1483 b'local %s phase update\n'
1479 1484 )
1480 1485 % phasestr
1481 1486 )
1482 1487
1483 1488
1484 1489 def _pushobsolete(pushop):
1485 1490 """utility function to push obsolete markers to a remote"""
1486 1491 if b'obsmarkers' in pushop.stepsdone:
1487 1492 return
1488 1493 repo = pushop.repo
1489 1494 remote = pushop.remote
1490 1495 pushop.stepsdone.add(b'obsmarkers')
1491 1496 if pushop.outobsmarkers:
1492 1497 pushop.ui.debug(b'try to push obsolete markers to remote\n')
1493 1498 rslts = []
1494 1499 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1495 1500 remotedata = obsolete._pushkeyescape(markers)
1496 1501 for key in sorted(remotedata, reverse=True):
1497 1502 # reverse sort to ensure we end with dump0
1498 1503 data = remotedata[key]
1499 1504 rslts.append(remote.pushkey(b'obsolete', key, b'', data))
1500 1505 if [r for r in rslts if not r]:
1501 1506 msg = _(b'failed to push some obsolete markers!\n')
1502 1507 repo.ui.warn(msg)
1503 1508
1504 1509
1505 1510 def _pushbookmark(pushop):
1506 1511 """Update bookmark position on remote"""
1507 1512 if pushop.cgresult == 0 or b'bookmarks' in pushop.stepsdone:
1508 1513 return
1509 1514 pushop.stepsdone.add(b'bookmarks')
1510 1515 ui = pushop.ui
1511 1516 remote = pushop.remote
1512 1517
1513 1518 for b, old, new in pushop.outbookmarks:
1514 1519 action = b'update'
1515 1520 if not old:
1516 1521 action = b'export'
1517 1522 elif not new:
1518 1523 action = b'delete'
1519 1524
1520 1525 with remote.commandexecutor() as e:
1521 1526 r = e.callcommand(
1522 1527 b'pushkey',
1523 1528 {
1524 1529 b'namespace': b'bookmarks',
1525 1530 b'key': b,
1526 1531 b'old': hex(old),
1527 1532 b'new': hex(new),
1528 1533 },
1529 1534 ).result()
1530 1535
1531 1536 if r:
1532 1537 ui.status(bookmsgmap[action][0] % b)
1533 1538 else:
1534 1539 ui.warn(bookmsgmap[action][1] % b)
1535 1540 # discovery can have set the value form invalid entry
1536 1541 if pushop.bkresult is not None:
1537 1542 pushop.bkresult = 1
1538 1543
1539 1544
1540 1545 class pulloperation(object):
1541 1546 """A object that represent a single pull operation
1542 1547
1543 1548 It purpose is to carry pull related state and very common operation.
1544 1549
1545 1550 A new should be created at the beginning of each pull and discarded
1546 1551 afterward.
1547 1552 """
1548 1553
1549 1554 def __init__(
1550 1555 self,
1551 1556 repo,
1552 1557 remote,
1553 1558 heads=None,
1554 1559 force=False,
1555 1560 bookmarks=(),
1556 1561 remotebookmarks=None,
1557 1562 streamclonerequested=None,
1558 1563 includepats=None,
1559 1564 excludepats=None,
1560 1565 depth=None,
1561 1566 ):
1562 1567 # repo we pull into
1563 1568 self.repo = repo
1564 1569 # repo we pull from
1565 1570 self.remote = remote
1566 1571 # revision we try to pull (None is "all")
1567 1572 self.heads = heads
1568 1573 # bookmark pulled explicitly
1569 1574 self.explicitbookmarks = [
1570 1575 repo._bookmarks.expandname(bookmark) for bookmark in bookmarks
1571 1576 ]
1572 1577 # do we force pull?
1573 1578 self.force = force
1574 1579 # whether a streaming clone was requested
1575 1580 self.streamclonerequested = streamclonerequested
1576 1581 # transaction manager
1577 1582 self.trmanager = None
1578 1583 # set of common changeset between local and remote before pull
1579 1584 self.common = None
1580 1585 # set of pulled head
1581 1586 self.rheads = None
1582 1587 # list of missing changeset to fetch remotely
1583 1588 self.fetch = None
1584 1589 # remote bookmarks data
1585 1590 self.remotebookmarks = remotebookmarks
1586 1591 # result of changegroup pulling (used as return code by pull)
1587 1592 self.cgresult = None
1588 1593 # list of step already done
1589 1594 self.stepsdone = set()
1590 1595 # Whether we attempted a clone from pre-generated bundles.
1591 1596 self.clonebundleattempted = False
1592 1597 # Set of file patterns to include.
1593 1598 self.includepats = includepats
1594 1599 # Set of file patterns to exclude.
1595 1600 self.excludepats = excludepats
1596 1601 # Number of ancestor changesets to pull from each pulled head.
1597 1602 self.depth = depth
1598 1603
1599 1604 @util.propertycache
1600 1605 def pulledsubset(self):
1601 1606 """heads of the set of changeset target by the pull"""
1602 1607 # compute target subset
1603 1608 if self.heads is None:
1604 1609 # We pulled every thing possible
1605 1610 # sync on everything common
1606 1611 c = set(self.common)
1607 1612 ret = list(self.common)
1608 1613 for n in self.rheads:
1609 1614 if n not in c:
1610 1615 ret.append(n)
1611 1616 return ret
1612 1617 else:
1613 1618 # We pulled a specific subset
1614 1619 # sync on this subset
1615 1620 return self.heads
1616 1621
1617 1622 @util.propertycache
1618 1623 def canusebundle2(self):
1619 1624 return not _forcebundle1(self)
1620 1625
1621 1626 @util.propertycache
1622 1627 def remotebundle2caps(self):
1623 1628 return bundle2.bundle2caps(self.remote)
1624 1629
1625 1630 def gettransaction(self):
1626 1631 # deprecated; talk to trmanager directly
1627 1632 return self.trmanager.transaction()
1628 1633
1629 1634
1630 1635 class transactionmanager(util.transactional):
1631 1636 """An object to manage the life cycle of a transaction
1632 1637
1633 1638 It creates the transaction on demand and calls the appropriate hooks when
1634 1639 closing the transaction."""
1635 1640
1636 1641 def __init__(self, repo, source, url):
1637 1642 self.repo = repo
1638 1643 self.source = source
1639 1644 self.url = url
1640 1645 self._tr = None
1641 1646
1642 1647 def transaction(self):
1643 1648 """Return an open transaction object, constructing if necessary"""
1644 1649 if not self._tr:
1645 1650 trname = b'%s\n%s' % (self.source, util.hidepassword(self.url))
1646 1651 self._tr = self.repo.transaction(trname)
1647 1652 self._tr.hookargs[b'source'] = self.source
1648 1653 self._tr.hookargs[b'url'] = self.url
1649 1654 return self._tr
1650 1655
1651 1656 def close(self):
1652 1657 """close transaction if created"""
1653 1658 if self._tr is not None:
1654 1659 self._tr.close()
1655 1660
1656 1661 def release(self):
1657 1662 """release transaction if created"""
1658 1663 if self._tr is not None:
1659 1664 self._tr.release()
1660 1665
1661 1666
1662 1667 def listkeys(remote, namespace):
1663 1668 with remote.commandexecutor() as e:
1664 1669 return e.callcommand(b'listkeys', {b'namespace': namespace}).result()
1665 1670
1666 1671
1667 1672 def _fullpullbundle2(repo, pullop):
1668 1673 # The server may send a partial reply, i.e. when inlining
1669 1674 # pre-computed bundles. In that case, update the common
1670 1675 # set based on the results and pull another bundle.
1671 1676 #
1672 1677 # There are two indicators that the process is finished:
1673 1678 # - no changeset has been added, or
1674 1679 # - all remote heads are known locally.
1675 1680 # The head check must use the unfiltered view as obsoletion
1676 1681 # markers can hide heads.
1677 1682 unfi = repo.unfiltered()
1678 1683 unficl = unfi.changelog
1679 1684
1680 1685 def headsofdiff(h1, h2):
1681 1686 """Returns heads(h1 % h2)"""
1682 1687 res = unfi.set(b'heads(%ln %% %ln)', h1, h2)
1683 1688 return {ctx.node() for ctx in res}
1684 1689
1685 1690 def headsofunion(h1, h2):
1686 1691 """Returns heads((h1 + h2) - null)"""
1687 1692 res = unfi.set(b'heads((%ln + %ln - null))', h1, h2)
1688 1693 return {ctx.node() for ctx in res}
1689 1694
1690 1695 while True:
1691 1696 old_heads = unficl.heads()
1692 1697 clstart = len(unficl)
1693 1698 _pullbundle2(pullop)
1694 1699 if repository.NARROW_REQUIREMENT in repo.requirements:
1695 1700 # XXX narrow clones filter the heads on the server side during
1696 1701 # XXX getbundle and result in partial replies as well.
1697 1702 # XXX Disable pull bundles in this case as band aid to avoid
1698 1703 # XXX extra round trips.
1699 1704 break
1700 1705 if clstart == len(unficl):
1701 1706 break
1702 1707 if all(unficl.hasnode(n) for n in pullop.rheads):
1703 1708 break
1704 1709 new_heads = headsofdiff(unficl.heads(), old_heads)
1705 1710 pullop.common = headsofunion(new_heads, pullop.common)
1706 1711 pullop.rheads = set(pullop.rheads) - pullop.common
1707 1712
1708 1713
1709 1714 def add_confirm_callback(repo, pullop):
1710 1715 """ adds a finalize callback to transaction which can be used to show stats
1711 1716 to user and confirm the pull before committing transaction """
1712 1717
1713 1718 tr = pullop.trmanager.transaction()
1714 1719 scmutil.registersummarycallback(
1715 1720 repo, tr, txnname=b'pull', as_validator=True
1716 1721 )
1717 1722 reporef = weakref.ref(repo.unfiltered())
1718 1723
1719 1724 def prompt(tr):
1720 1725 repo = reporef()
1721 1726 cm = _(b'accept incoming changes (yn)?$$ &Yes $$ &No')
1722 1727 if repo.ui.promptchoice(cm):
1723 1728 raise error.Abort("user aborted")
1724 1729
1725 1730 tr.addvalidator(b'900-pull-prompt', prompt)
1726 1731
1727 1732
1728 1733 def pull(
1729 1734 repo,
1730 1735 remote,
1731 1736 heads=None,
1732 1737 force=False,
1733 1738 bookmarks=(),
1734 1739 opargs=None,
1735 1740 streamclonerequested=None,
1736 1741 includepats=None,
1737 1742 excludepats=None,
1738 1743 depth=None,
1739 1744 confirm=None,
1740 1745 ):
1741 1746 """Fetch repository data from a remote.
1742 1747
1743 1748 This is the main function used to retrieve data from a remote repository.
1744 1749
1745 1750 ``repo`` is the local repository to clone into.
1746 1751 ``remote`` is a peer instance.
1747 1752 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1748 1753 default) means to pull everything from the remote.
1749 1754 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1750 1755 default, all remote bookmarks are pulled.
1751 1756 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1752 1757 initialization.
1753 1758 ``streamclonerequested`` is a boolean indicating whether a "streaming
1754 1759 clone" is requested. A "streaming clone" is essentially a raw file copy
1755 1760 of revlogs from the server. This only works when the local repository is
1756 1761 empty. The default value of ``None`` means to respect the server
1757 1762 configuration for preferring stream clones.
1758 1763 ``includepats`` and ``excludepats`` define explicit file patterns to
1759 1764 include and exclude in storage, respectively. If not defined, narrow
1760 1765 patterns from the repo instance are used, if available.
1761 1766 ``depth`` is an integer indicating the DAG depth of history we're
1762 1767 interested in. If defined, for each revision specified in ``heads``, we
1763 1768 will fetch up to this many of its ancestors and data associated with them.
1764 1769 ``confirm`` is a boolean indicating whether the pull should be confirmed
1765 1770 before committing the transaction. This overrides HGPLAIN.
1766 1771
1767 1772 Returns the ``pulloperation`` created for this pull.
1768 1773 """
1769 1774 if opargs is None:
1770 1775 opargs = {}
1771 1776
1772 1777 # We allow the narrow patterns to be passed in explicitly to provide more
1773 1778 # flexibility for API consumers.
1774 1779 if includepats or excludepats:
1775 1780 includepats = includepats or set()
1776 1781 excludepats = excludepats or set()
1777 1782 else:
1778 1783 includepats, excludepats = repo.narrowpats
1779 1784
1780 1785 narrowspec.validatepatterns(includepats)
1781 1786 narrowspec.validatepatterns(excludepats)
1782 1787
1783 1788 pullop = pulloperation(
1784 1789 repo,
1785 1790 remote,
1786 1791 heads,
1787 1792 force,
1788 1793 bookmarks=bookmarks,
1789 1794 streamclonerequested=streamclonerequested,
1790 1795 includepats=includepats,
1791 1796 excludepats=excludepats,
1792 1797 depth=depth,
1793 1798 **pycompat.strkwargs(opargs)
1794 1799 )
1795 1800
1796 1801 peerlocal = pullop.remote.local()
1797 1802 if peerlocal:
1798 1803 missing = set(peerlocal.requirements) - pullop.repo.supported
1799 1804 if missing:
1800 1805 msg = _(
1801 1806 b"required features are not"
1802 1807 b" supported in the destination:"
1803 1808 b" %s"
1804 1809 ) % (b', '.join(sorted(missing)))
1805 1810 raise error.Abort(msg)
1806 1811
1807 1812 pullop.trmanager = transactionmanager(repo, b'pull', remote.url())
1808 1813 wlock = util.nullcontextmanager()
1809 1814 if not bookmod.bookmarksinstore(repo):
1810 1815 wlock = repo.wlock()
1811 1816 with wlock, repo.lock(), pullop.trmanager:
1812 1817 if confirm or (
1813 1818 repo.ui.configbool(b"pull", b"confirm") and not repo.ui.plain()
1814 1819 ):
1815 1820 add_confirm_callback(repo, pullop)
1816 1821
1817 1822 # Use the modern wire protocol, if available.
1818 1823 if remote.capable(b'command-changesetdata'):
1819 1824 exchangev2.pull(pullop)
1820 1825 else:
1821 1826 # This should ideally be in _pullbundle2(). However, it needs to run
1822 1827 # before discovery to avoid extra work.
1823 1828 _maybeapplyclonebundle(pullop)
1824 1829 streamclone.maybeperformlegacystreamclone(pullop)
1825 1830 _pulldiscovery(pullop)
1826 1831 if pullop.canusebundle2:
1827 1832 _fullpullbundle2(repo, pullop)
1828 1833 _pullchangeset(pullop)
1829 1834 _pullphase(pullop)
1830 1835 _pullbookmarks(pullop)
1831 1836 _pullobsolete(pullop)
1832 1837
1833 1838 # storing remotenames
1834 1839 if repo.ui.configbool(b'experimental', b'remotenames'):
1835 1840 logexchange.pullremotenames(repo, remote)
1836 1841
1837 1842 return pullop
1838 1843
1839 1844
1840 1845 # list of steps to perform discovery before pull
1841 1846 pulldiscoveryorder = []
1842 1847
1843 1848 # Mapping between step name and function
1844 1849 #
1845 1850 # This exists to help extensions wrap steps if necessary
1846 1851 pulldiscoverymapping = {}
1847 1852
1848 1853
1849 1854 def pulldiscovery(stepname):
1850 1855 """decorator for function performing discovery before pull
1851 1856
1852 1857 The function is added to the step -> function mapping and appended to the
1853 1858 list of steps. Beware that decorated function will be added in order (this
1854 1859 may matter).
1855 1860
1856 1861 You can only use this decorator for a new step, if you want to wrap a step
1857 1862 from an extension, change the pulldiscovery dictionary directly."""
1858 1863
1859 1864 def dec(func):
1860 1865 assert stepname not in pulldiscoverymapping
1861 1866 pulldiscoverymapping[stepname] = func
1862 1867 pulldiscoveryorder.append(stepname)
1863 1868 return func
1864 1869
1865 1870 return dec
1866 1871
1867 1872
1868 1873 def _pulldiscovery(pullop):
1869 1874 """Run all discovery steps"""
1870 1875 for stepname in pulldiscoveryorder:
1871 1876 step = pulldiscoverymapping[stepname]
1872 1877 step(pullop)
1873 1878
1874 1879
1875 1880 @pulldiscovery(b'b1:bookmarks')
1876 1881 def _pullbookmarkbundle1(pullop):
1877 1882 """fetch bookmark data in bundle1 case
1878 1883
1879 1884 If not using bundle2, we have to fetch bookmarks before changeset
1880 1885 discovery to reduce the chance and impact of race conditions."""
1881 1886 if pullop.remotebookmarks is not None:
1882 1887 return
1883 1888 if pullop.canusebundle2 and b'listkeys' in pullop.remotebundle2caps:
1884 1889 # all known bundle2 servers now support listkeys, but lets be nice with
1885 1890 # new implementation.
1886 1891 return
1887 1892 books = listkeys(pullop.remote, b'bookmarks')
1888 1893 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1889 1894
1890 1895
1891 1896 @pulldiscovery(b'changegroup')
1892 1897 def _pulldiscoverychangegroup(pullop):
1893 1898 """discovery phase for the pull
1894 1899
1895 1900 Current handle changeset discovery only, will change handle all discovery
1896 1901 at some point."""
1897 1902 tmp = discovery.findcommonincoming(
1898 1903 pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force
1899 1904 )
1900 1905 common, fetch, rheads = tmp
1901 1906 has_node = pullop.repo.unfiltered().changelog.index.has_node
1902 1907 if fetch and rheads:
1903 1908 # If a remote heads is filtered locally, put in back in common.
1904 1909 #
1905 1910 # This is a hackish solution to catch most of "common but locally
1906 1911 # hidden situation". We do not performs discovery on unfiltered
1907 1912 # repository because it end up doing a pathological amount of round
1908 1913 # trip for w huge amount of changeset we do not care about.
1909 1914 #
1910 1915 # If a set of such "common but filtered" changeset exist on the server
1911 1916 # but are not including a remote heads, we'll not be able to detect it,
1912 1917 scommon = set(common)
1913 1918 for n in rheads:
1914 1919 if has_node(n):
1915 1920 if n not in scommon:
1916 1921 common.append(n)
1917 1922 if set(rheads).issubset(set(common)):
1918 1923 fetch = []
1919 1924 pullop.common = common
1920 1925 pullop.fetch = fetch
1921 1926 pullop.rheads = rheads
1922 1927
1923 1928
1924 1929 def _pullbundle2(pullop):
1925 1930 """pull data using bundle2
1926 1931
1927 1932 For now, the only supported data are changegroup."""
1928 1933 kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')}
1929 1934
1930 1935 # make ui easier to access
1931 1936 ui = pullop.repo.ui
1932 1937
1933 1938 # At the moment we don't do stream clones over bundle2. If that is
1934 1939 # implemented then here's where the check for that will go.
1935 1940 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1936 1941
1937 1942 # declare pull perimeters
1938 1943 kwargs[b'common'] = pullop.common
1939 1944 kwargs[b'heads'] = pullop.heads or pullop.rheads
1940 1945
1941 1946 # check server supports narrow and then adding includepats and excludepats
1942 1947 servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
1943 1948 if servernarrow and pullop.includepats:
1944 1949 kwargs[b'includepats'] = pullop.includepats
1945 1950 if servernarrow and pullop.excludepats:
1946 1951 kwargs[b'excludepats'] = pullop.excludepats
1947 1952
1948 1953 if streaming:
1949 1954 kwargs[b'cg'] = False
1950 1955 kwargs[b'stream'] = True
1951 1956 pullop.stepsdone.add(b'changegroup')
1952 1957 pullop.stepsdone.add(b'phases')
1953 1958
1954 1959 else:
1955 1960 # pulling changegroup
1956 1961 pullop.stepsdone.add(b'changegroup')
1957 1962
1958 1963 kwargs[b'cg'] = pullop.fetch
1959 1964
1960 1965 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1961 1966 hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ())
1962 1967 if not legacyphase and hasbinaryphase:
1963 1968 kwargs[b'phases'] = True
1964 1969 pullop.stepsdone.add(b'phases')
1965 1970
1966 1971 if b'listkeys' in pullop.remotebundle2caps:
1967 1972 if b'phases' not in pullop.stepsdone:
1968 1973 kwargs[b'listkeys'] = [b'phases']
1969 1974
1970 1975 bookmarksrequested = False
1971 1976 legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange')
1972 1977 hasbinarybook = b'bookmarks' in pullop.remotebundle2caps
1973 1978
1974 1979 if pullop.remotebookmarks is not None:
1975 1980 pullop.stepsdone.add(b'request-bookmarks')
1976 1981
1977 1982 if (
1978 1983 b'request-bookmarks' not in pullop.stepsdone
1979 1984 and pullop.remotebookmarks is None
1980 1985 and not legacybookmark
1981 1986 and hasbinarybook
1982 1987 ):
1983 1988 kwargs[b'bookmarks'] = True
1984 1989 bookmarksrequested = True
1985 1990
1986 1991 if b'listkeys' in pullop.remotebundle2caps:
1987 1992 if b'request-bookmarks' not in pullop.stepsdone:
1988 1993 # make sure to always includes bookmark data when migrating
1989 1994 # `hg incoming --bundle` to using this function.
1990 1995 pullop.stepsdone.add(b'request-bookmarks')
1991 1996 kwargs.setdefault(b'listkeys', []).append(b'bookmarks')
1992 1997
1993 1998 # If this is a full pull / clone and the server supports the clone bundles
1994 1999 # feature, tell the server whether we attempted a clone bundle. The
1995 2000 # presence of this flag indicates the client supports clone bundles. This
1996 2001 # will enable the server to treat clients that support clone bundles
1997 2002 # differently from those that don't.
1998 2003 if (
1999 2004 pullop.remote.capable(b'clonebundles')
2000 2005 and pullop.heads is None
2001 2006 and list(pullop.common) == [nullid]
2002 2007 ):
2003 2008 kwargs[b'cbattempted'] = pullop.clonebundleattempted
2004 2009
2005 2010 if streaming:
2006 2011 pullop.repo.ui.status(_(b'streaming all changes\n'))
2007 2012 elif not pullop.fetch:
2008 2013 pullop.repo.ui.status(_(b"no changes found\n"))
2009 2014 pullop.cgresult = 0
2010 2015 else:
2011 2016 if pullop.heads is None and list(pullop.common) == [nullid]:
2012 2017 pullop.repo.ui.status(_(b"requesting all changes\n"))
2013 2018 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2014 2019 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
2015 2020 if obsolete.commonversion(remoteversions) is not None:
2016 2021 kwargs[b'obsmarkers'] = True
2017 2022 pullop.stepsdone.add(b'obsmarkers')
2018 2023 _pullbundle2extraprepare(pullop, kwargs)
2019 2024
2020 2025 with pullop.remote.commandexecutor() as e:
2021 2026 args = dict(kwargs)
2022 2027 args[b'source'] = b'pull'
2023 2028 bundle = e.callcommand(b'getbundle', args).result()
2024 2029
2025 2030 try:
2026 2031 op = bundle2.bundleoperation(
2027 2032 pullop.repo, pullop.gettransaction, source=b'pull'
2028 2033 )
2029 2034 op.modes[b'bookmarks'] = b'records'
2030 2035 bundle2.processbundle(pullop.repo, bundle, op=op)
2031 2036 except bundle2.AbortFromPart as exc:
2032 2037 pullop.repo.ui.status(_(b'remote: abort: %s\n') % exc)
2033 2038 raise error.Abort(_(b'pull failed on remote'), hint=exc.hint)
2034 2039 except error.BundleValueError as exc:
2035 2040 raise error.Abort(_(b'missing support for %s') % exc)
2036 2041
2037 2042 if pullop.fetch:
2038 2043 pullop.cgresult = bundle2.combinechangegroupresults(op)
2039 2044
2040 2045 # processing phases change
2041 2046 for namespace, value in op.records[b'listkeys']:
2042 2047 if namespace == b'phases':
2043 2048 _pullapplyphases(pullop, value)
2044 2049
2045 2050 # processing bookmark update
2046 2051 if bookmarksrequested:
2047 2052 books = {}
2048 2053 for record in op.records[b'bookmarks']:
2049 2054 books[record[b'bookmark']] = record[b"node"]
2050 2055 pullop.remotebookmarks = books
2051 2056 else:
2052 2057 for namespace, value in op.records[b'listkeys']:
2053 2058 if namespace == b'bookmarks':
2054 2059 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
2055 2060
2056 2061 # bookmark data were either already there or pulled in the bundle
2057 2062 if pullop.remotebookmarks is not None:
2058 2063 _pullbookmarks(pullop)
2059 2064
2060 2065
2061 2066 def _pullbundle2extraprepare(pullop, kwargs):
2062 2067 """hook function so that extensions can extend the getbundle call"""
2063 2068
2064 2069
2065 2070 def _pullchangeset(pullop):
2066 2071 """pull changeset from unbundle into the local repo"""
2067 2072 # We delay the open of the transaction as late as possible so we
2068 2073 # don't open transaction for nothing or you break future useful
2069 2074 # rollback call
2070 2075 if b'changegroup' in pullop.stepsdone:
2071 2076 return
2072 2077 pullop.stepsdone.add(b'changegroup')
2073 2078 if not pullop.fetch:
2074 2079 pullop.repo.ui.status(_(b"no changes found\n"))
2075 2080 pullop.cgresult = 0
2076 2081 return
2077 2082 tr = pullop.gettransaction()
2078 2083 if pullop.heads is None and list(pullop.common) == [nullid]:
2079 2084 pullop.repo.ui.status(_(b"requesting all changes\n"))
2080 2085 elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'):
2081 2086 # issue1320, avoid a race if remote changed after discovery
2082 2087 pullop.heads = pullop.rheads
2083 2088
2084 2089 if pullop.remote.capable(b'getbundle'):
2085 2090 # TODO: get bundlecaps from remote
2086 2091 cg = pullop.remote.getbundle(
2087 2092 b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
2088 2093 )
2089 2094 elif pullop.heads is None:
2090 2095 with pullop.remote.commandexecutor() as e:
2091 2096 cg = e.callcommand(
2092 2097 b'changegroup', {b'nodes': pullop.fetch, b'source': b'pull',}
2093 2098 ).result()
2094 2099
2095 2100 elif not pullop.remote.capable(b'changegroupsubset'):
2096 2101 raise error.Abort(
2097 2102 _(
2098 2103 b"partial pull cannot be done because "
2099 2104 b"other repository doesn't support "
2100 2105 b"changegroupsubset."
2101 2106 )
2102 2107 )
2103 2108 else:
2104 2109 with pullop.remote.commandexecutor() as e:
2105 2110 cg = e.callcommand(
2106 2111 b'changegroupsubset',
2107 2112 {
2108 2113 b'bases': pullop.fetch,
2109 2114 b'heads': pullop.heads,
2110 2115 b'source': b'pull',
2111 2116 },
2112 2117 ).result()
2113 2118
2114 2119 bundleop = bundle2.applybundle(
2115 2120 pullop.repo, cg, tr, b'pull', pullop.remote.url()
2116 2121 )
2117 2122 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
2118 2123
2119 2124
2120 2125 def _pullphase(pullop):
2121 2126 # Get remote phases data from remote
2122 2127 if b'phases' in pullop.stepsdone:
2123 2128 return
2124 2129 remotephases = listkeys(pullop.remote, b'phases')
2125 2130 _pullapplyphases(pullop, remotephases)
2126 2131
2127 2132
2128 2133 def _pullapplyphases(pullop, remotephases):
2129 2134 """apply phase movement from observed remote state"""
2130 2135 if b'phases' in pullop.stepsdone:
2131 2136 return
2132 2137 pullop.stepsdone.add(b'phases')
2133 2138 publishing = bool(remotephases.get(b'publishing', False))
2134 2139 if remotephases and not publishing:
2135 2140 # remote is new and non-publishing
2136 2141 pheads, _dr = phases.analyzeremotephases(
2137 2142 pullop.repo, pullop.pulledsubset, remotephases
2138 2143 )
2139 2144 dheads = pullop.pulledsubset
2140 2145 else:
2141 2146 # Remote is old or publishing all common changesets
2142 2147 # should be seen as public
2143 2148 pheads = pullop.pulledsubset
2144 2149 dheads = []
2145 2150 unfi = pullop.repo.unfiltered()
2146 2151 phase = unfi._phasecache.phase
2147 2152 rev = unfi.changelog.index.get_rev
2148 2153 public = phases.public
2149 2154 draft = phases.draft
2150 2155
2151 2156 # exclude changesets already public locally and update the others
2152 2157 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
2153 2158 if pheads:
2154 2159 tr = pullop.gettransaction()
2155 2160 phases.advanceboundary(pullop.repo, tr, public, pheads)
2156 2161
2157 2162 # exclude changesets already draft locally and update the others
2158 2163 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
2159 2164 if dheads:
2160 2165 tr = pullop.gettransaction()
2161 2166 phases.advanceboundary(pullop.repo, tr, draft, dheads)
2162 2167
2163 2168
2164 2169 def _pullbookmarks(pullop):
2165 2170 """process the remote bookmark information to update the local one"""
2166 2171 if b'bookmarks' in pullop.stepsdone:
2167 2172 return
2168 2173 pullop.stepsdone.add(b'bookmarks')
2169 2174 repo = pullop.repo
2170 2175 remotebookmarks = pullop.remotebookmarks
2171 2176 bookmod.updatefromremote(
2172 2177 repo.ui,
2173 2178 repo,
2174 2179 remotebookmarks,
2175 2180 pullop.remote.url(),
2176 2181 pullop.gettransaction,
2177 2182 explicit=pullop.explicitbookmarks,
2178 2183 )
2179 2184
2180 2185
2181 2186 def _pullobsolete(pullop):
2182 2187 """utility function to pull obsolete markers from a remote
2183 2188
2184 2189 The `gettransaction` is function that return the pull transaction, creating
2185 2190 one if necessary. We return the transaction to inform the calling code that
2186 2191 a new transaction have been created (when applicable).
2187 2192
2188 2193 Exists mostly to allow overriding for experimentation purpose"""
2189 2194 if b'obsmarkers' in pullop.stepsdone:
2190 2195 return
2191 2196 pullop.stepsdone.add(b'obsmarkers')
2192 2197 tr = None
2193 2198 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2194 2199 pullop.repo.ui.debug(b'fetching remote obsolete markers\n')
2195 2200 remoteobs = listkeys(pullop.remote, b'obsolete')
2196 2201 if b'dump0' in remoteobs:
2197 2202 tr = pullop.gettransaction()
2198 2203 markers = []
2199 2204 for key in sorted(remoteobs, reverse=True):
2200 2205 if key.startswith(b'dump'):
2201 2206 data = util.b85decode(remoteobs[key])
2202 2207 version, newmarks = obsolete._readmarkers(data)
2203 2208 markers += newmarks
2204 2209 if markers:
2205 2210 pullop.repo.obsstore.add(tr, markers)
2206 2211 pullop.repo.invalidatevolatilesets()
2207 2212 return tr
2208 2213
2209 2214
2210 2215 def applynarrowacl(repo, kwargs):
2211 2216 """Apply narrow fetch access control.
2212 2217
2213 2218 This massages the named arguments for getbundle wire protocol commands
2214 2219 so requested data is filtered through access control rules.
2215 2220 """
2216 2221 ui = repo.ui
2217 2222 # TODO this assumes existence of HTTP and is a layering violation.
2218 2223 username = ui.shortuser(ui.environ.get(b'REMOTE_USER') or ui.username())
2219 2224 user_includes = ui.configlist(
2220 2225 _NARROWACL_SECTION,
2221 2226 username + b'.includes',
2222 2227 ui.configlist(_NARROWACL_SECTION, b'default.includes'),
2223 2228 )
2224 2229 user_excludes = ui.configlist(
2225 2230 _NARROWACL_SECTION,
2226 2231 username + b'.excludes',
2227 2232 ui.configlist(_NARROWACL_SECTION, b'default.excludes'),
2228 2233 )
2229 2234 if not user_includes:
2230 2235 raise error.Abort(
2231 2236 _(b"%s configuration for user %s is empty")
2232 2237 % (_NARROWACL_SECTION, username)
2233 2238 )
2234 2239
2235 2240 user_includes = [
2236 2241 b'path:.' if p == b'*' else b'path:' + p for p in user_includes
2237 2242 ]
2238 2243 user_excludes = [
2239 2244 b'path:.' if p == b'*' else b'path:' + p for p in user_excludes
2240 2245 ]
2241 2246
2242 2247 req_includes = set(kwargs.get('includepats', []))
2243 2248 req_excludes = set(kwargs.get('excludepats', []))
2244 2249
2245 2250 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
2246 2251 req_includes, req_excludes, user_includes, user_excludes
2247 2252 )
2248 2253
2249 2254 if invalid_includes:
2250 2255 raise error.Abort(
2251 2256 _(b"The following includes are not accessible for %s: %s")
2252 2257 % (username, stringutil.pprint(invalid_includes))
2253 2258 )
2254 2259
2255 2260 new_args = {}
2256 2261 new_args.update(kwargs)
2257 2262 new_args['narrow'] = True
2258 2263 new_args['narrow_acl'] = True
2259 2264 new_args['includepats'] = req_includes
2260 2265 if req_excludes:
2261 2266 new_args['excludepats'] = req_excludes
2262 2267
2263 2268 return new_args
2264 2269
2265 2270
2266 2271 def _computeellipsis(repo, common, heads, known, match, depth=None):
2267 2272 """Compute the shape of a narrowed DAG.
2268 2273
2269 2274 Args:
2270 2275 repo: The repository we're transferring.
2271 2276 common: The roots of the DAG range we're transferring.
2272 2277 May be just [nullid], which means all ancestors of heads.
2273 2278 heads: The heads of the DAG range we're transferring.
2274 2279 match: The narrowmatcher that allows us to identify relevant changes.
2275 2280 depth: If not None, only consider nodes to be full nodes if they are at
2276 2281 most depth changesets away from one of heads.
2277 2282
2278 2283 Returns:
2279 2284 A tuple of (visitnodes, relevant_nodes, ellipsisroots) where:
2280 2285
2281 2286 visitnodes: The list of nodes (either full or ellipsis) which
2282 2287 need to be sent to the client.
2283 2288 relevant_nodes: The set of changelog nodes which change a file inside
2284 2289 the narrowspec. The client needs these as non-ellipsis nodes.
2285 2290 ellipsisroots: A dict of {rev: parents} that is used in
2286 2291 narrowchangegroup to produce ellipsis nodes with the
2287 2292 correct parents.
2288 2293 """
2289 2294 cl = repo.changelog
2290 2295 mfl = repo.manifestlog
2291 2296
2292 2297 clrev = cl.rev
2293 2298
2294 2299 commonrevs = {clrev(n) for n in common} | {nullrev}
2295 2300 headsrevs = {clrev(n) for n in heads}
2296 2301
2297 2302 if depth:
2298 2303 revdepth = {h: 0 for h in headsrevs}
2299 2304
2300 2305 ellipsisheads = collections.defaultdict(set)
2301 2306 ellipsisroots = collections.defaultdict(set)
2302 2307
2303 2308 def addroot(head, curchange):
2304 2309 """Add a root to an ellipsis head, splitting heads with 3 roots."""
2305 2310 ellipsisroots[head].add(curchange)
2306 2311 # Recursively split ellipsis heads with 3 roots by finding the
2307 2312 # roots' youngest common descendant which is an elided merge commit.
2308 2313 # That descendant takes 2 of the 3 roots as its own, and becomes a
2309 2314 # root of the head.
2310 2315 while len(ellipsisroots[head]) > 2:
2311 2316 child, roots = splithead(head)
2312 2317 splitroots(head, child, roots)
2313 2318 head = child # Recurse in case we just added a 3rd root
2314 2319
2315 2320 def splitroots(head, child, roots):
2316 2321 ellipsisroots[head].difference_update(roots)
2317 2322 ellipsisroots[head].add(child)
2318 2323 ellipsisroots[child].update(roots)
2319 2324 ellipsisroots[child].discard(child)
2320 2325
2321 2326 def splithead(head):
2322 2327 r1, r2, r3 = sorted(ellipsisroots[head])
2323 2328 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
2324 2329 mid = repo.revs(
2325 2330 b'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
2326 2331 )
2327 2332 for j in mid:
2328 2333 if j == nr2:
2329 2334 return nr2, (nr1, nr2)
2330 2335 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
2331 2336 return j, (nr1, nr2)
2332 2337 raise error.Abort(
2333 2338 _(
2334 2339 b'Failed to split up ellipsis node! head: %d, '
2335 2340 b'roots: %d %d %d'
2336 2341 )
2337 2342 % (head, r1, r2, r3)
2338 2343 )
2339 2344
2340 2345 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
2341 2346 visit = reversed(missing)
2342 2347 relevant_nodes = set()
2343 2348 visitnodes = [cl.node(m) for m in missing]
2344 2349 required = set(headsrevs) | known
2345 2350 for rev in visit:
2346 2351 clrev = cl.changelogrevision(rev)
2347 2352 ps = [prev for prev in cl.parentrevs(rev) if prev != nullrev]
2348 2353 if depth is not None:
2349 2354 curdepth = revdepth[rev]
2350 2355 for p in ps:
2351 2356 revdepth[p] = min(curdepth + 1, revdepth.get(p, depth + 1))
2352 2357 needed = False
2353 2358 shallow_enough = depth is None or revdepth[rev] <= depth
2354 2359 if shallow_enough:
2355 2360 curmf = mfl[clrev.manifest].read()
2356 2361 if ps:
2357 2362 # We choose to not trust the changed files list in
2358 2363 # changesets because it's not always correct. TODO: could
2359 2364 # we trust it for the non-merge case?
2360 2365 p1mf = mfl[cl.changelogrevision(ps[0]).manifest].read()
2361 2366 needed = bool(curmf.diff(p1mf, match))
2362 2367 if not needed and len(ps) > 1:
2363 2368 # For merge changes, the list of changed files is not
2364 2369 # helpful, since we need to emit the merge if a file
2365 2370 # in the narrow spec has changed on either side of the
2366 2371 # merge. As a result, we do a manifest diff to check.
2367 2372 p2mf = mfl[cl.changelogrevision(ps[1]).manifest].read()
2368 2373 needed = bool(curmf.diff(p2mf, match))
2369 2374 else:
2370 2375 # For a root node, we need to include the node if any
2371 2376 # files in the node match the narrowspec.
2372 2377 needed = any(curmf.walk(match))
2373 2378
2374 2379 if needed:
2375 2380 for head in ellipsisheads[rev]:
2376 2381 addroot(head, rev)
2377 2382 for p in ps:
2378 2383 required.add(p)
2379 2384 relevant_nodes.add(cl.node(rev))
2380 2385 else:
2381 2386 if not ps:
2382 2387 ps = [nullrev]
2383 2388 if rev in required:
2384 2389 for head in ellipsisheads[rev]:
2385 2390 addroot(head, rev)
2386 2391 for p in ps:
2387 2392 ellipsisheads[p].add(rev)
2388 2393 else:
2389 2394 for p in ps:
2390 2395 ellipsisheads[p] |= ellipsisheads[rev]
2391 2396
2392 2397 # add common changesets as roots of their reachable ellipsis heads
2393 2398 for c in commonrevs:
2394 2399 for head in ellipsisheads[c]:
2395 2400 addroot(head, c)
2396 2401 return visitnodes, relevant_nodes, ellipsisroots
2397 2402
2398 2403
2399 2404 def caps20to10(repo, role):
2400 2405 """return a set with appropriate options to use bundle20 during getbundle"""
2401 2406 caps = {b'HG20'}
2402 2407 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
2403 2408 caps.add(b'bundle2=' + urlreq.quote(capsblob))
2404 2409 return caps
2405 2410
2406 2411
2407 2412 # List of names of steps to perform for a bundle2 for getbundle, order matters.
2408 2413 getbundle2partsorder = []
2409 2414
2410 2415 # Mapping between step name and function
2411 2416 #
2412 2417 # This exists to help extensions wrap steps if necessary
2413 2418 getbundle2partsmapping = {}
2414 2419
2415 2420
2416 2421 def getbundle2partsgenerator(stepname, idx=None):
2417 2422 """decorator for function generating bundle2 part for getbundle
2418 2423
2419 2424 The function is added to the step -> function mapping and appended to the
2420 2425 list of steps. Beware that decorated functions will be added in order
2421 2426 (this may matter).
2422 2427
2423 2428 You can only use this decorator for new steps, if you want to wrap a step
2424 2429 from an extension, attack the getbundle2partsmapping dictionary directly."""
2425 2430
2426 2431 def dec(func):
2427 2432 assert stepname not in getbundle2partsmapping
2428 2433 getbundle2partsmapping[stepname] = func
2429 2434 if idx is None:
2430 2435 getbundle2partsorder.append(stepname)
2431 2436 else:
2432 2437 getbundle2partsorder.insert(idx, stepname)
2433 2438 return func
2434 2439
2435 2440 return dec
2436 2441
2437 2442
2438 2443 def bundle2requested(bundlecaps):
2439 2444 if bundlecaps is not None:
2440 2445 return any(cap.startswith(b'HG2') for cap in bundlecaps)
2441 2446 return False
2442 2447
2443 2448
2444 2449 def getbundlechunks(
2445 2450 repo, source, heads=None, common=None, bundlecaps=None, **kwargs
2446 2451 ):
2447 2452 """Return chunks constituting a bundle's raw data.
2448 2453
2449 2454 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
2450 2455 passed.
2451 2456
2452 2457 Returns a 2-tuple of a dict with metadata about the generated bundle
2453 2458 and an iterator over raw chunks (of varying sizes).
2454 2459 """
2455 2460 kwargs = pycompat.byteskwargs(kwargs)
2456 2461 info = {}
2457 2462 usebundle2 = bundle2requested(bundlecaps)
2458 2463 # bundle10 case
2459 2464 if not usebundle2:
2460 2465 if bundlecaps and not kwargs.get(b'cg', True):
2461 2466 raise ValueError(
2462 2467 _(b'request for bundle10 must include changegroup')
2463 2468 )
2464 2469
2465 2470 if kwargs:
2466 2471 raise ValueError(
2467 2472 _(b'unsupported getbundle arguments: %s')
2468 2473 % b', '.join(sorted(kwargs.keys()))
2469 2474 )
2470 2475 outgoing = _computeoutgoing(repo, heads, common)
2471 2476 info[b'bundleversion'] = 1
2472 2477 return (
2473 2478 info,
2474 2479 changegroup.makestream(
2475 2480 repo, outgoing, b'01', source, bundlecaps=bundlecaps
2476 2481 ),
2477 2482 )
2478 2483
2479 2484 # bundle20 case
2480 2485 info[b'bundleversion'] = 2
2481 2486 b2caps = {}
2482 2487 for bcaps in bundlecaps:
2483 2488 if bcaps.startswith(b'bundle2='):
2484 2489 blob = urlreq.unquote(bcaps[len(b'bundle2=') :])
2485 2490 b2caps.update(bundle2.decodecaps(blob))
2486 2491 bundler = bundle2.bundle20(repo.ui, b2caps)
2487 2492
2488 2493 kwargs[b'heads'] = heads
2489 2494 kwargs[b'common'] = common
2490 2495
2491 2496 for name in getbundle2partsorder:
2492 2497 func = getbundle2partsmapping[name]
2493 2498 func(
2494 2499 bundler,
2495 2500 repo,
2496 2501 source,
2497 2502 bundlecaps=bundlecaps,
2498 2503 b2caps=b2caps,
2499 2504 **pycompat.strkwargs(kwargs)
2500 2505 )
2501 2506
2502 2507 info[b'prefercompressed'] = bundler.prefercompressed
2503 2508
2504 2509 return info, bundler.getchunks()
2505 2510
2506 2511
2507 2512 @getbundle2partsgenerator(b'stream2')
2508 2513 def _getbundlestream2(bundler, repo, *args, **kwargs):
2509 2514 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
2510 2515
2511 2516
2512 2517 @getbundle2partsgenerator(b'changegroup')
2513 2518 def _getbundlechangegrouppart(
2514 2519 bundler,
2515 2520 repo,
2516 2521 source,
2517 2522 bundlecaps=None,
2518 2523 b2caps=None,
2519 2524 heads=None,
2520 2525 common=None,
2521 2526 **kwargs
2522 2527 ):
2523 2528 """add a changegroup part to the requested bundle"""
2524 2529 if not kwargs.get('cg', True) or not b2caps:
2525 2530 return
2526 2531
2527 2532 version = b'01'
2528 2533 cgversions = b2caps.get(b'changegroup')
2529 2534 if cgversions: # 3.1 and 3.2 ship with an empty value
2530 2535 cgversions = [
2531 2536 v
2532 2537 for v in cgversions
2533 2538 if v in changegroup.supportedoutgoingversions(repo)
2534 2539 ]
2535 2540 if not cgversions:
2536 2541 raise error.Abort(_(b'no common changegroup version'))
2537 2542 version = max(cgversions)
2538 2543
2539 2544 outgoing = _computeoutgoing(repo, heads, common)
2540 2545 if not outgoing.missing:
2541 2546 return
2542 2547
2543 2548 if kwargs.get('narrow', False):
2544 2549 include = sorted(filter(bool, kwargs.get('includepats', [])))
2545 2550 exclude = sorted(filter(bool, kwargs.get('excludepats', [])))
2546 2551 matcher = narrowspec.match(repo.root, include=include, exclude=exclude)
2547 2552 else:
2548 2553 matcher = None
2549 2554
2550 2555 cgstream = changegroup.makestream(
2551 2556 repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher
2552 2557 )
2553 2558
2554 2559 part = bundler.newpart(b'changegroup', data=cgstream)
2555 2560 if cgversions:
2556 2561 part.addparam(b'version', version)
2557 2562
2558 2563 part.addparam(b'nbchanges', b'%d' % len(outgoing.missing), mandatory=False)
2559 2564
2560 2565 if b'treemanifest' in repo.requirements:
2561 2566 part.addparam(b'treemanifest', b'1')
2562 2567
2563 2568 if b'exp-sidedata-flag' in repo.requirements:
2564 2569 part.addparam(b'exp-sidedata', b'1')
2565 2570
2566 2571 if (
2567 2572 kwargs.get('narrow', False)
2568 2573 and kwargs.get('narrow_acl', False)
2569 2574 and (include or exclude)
2570 2575 ):
2571 2576 # this is mandatory because otherwise ACL clients won't work
2572 2577 narrowspecpart = bundler.newpart(b'Narrow:responsespec')
2573 2578 narrowspecpart.data = b'%s\0%s' % (
2574 2579 b'\n'.join(include),
2575 2580 b'\n'.join(exclude),
2576 2581 )
2577 2582
2578 2583
2579 2584 @getbundle2partsgenerator(b'bookmarks')
2580 2585 def _getbundlebookmarkpart(
2581 2586 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2582 2587 ):
2583 2588 """add a bookmark part to the requested bundle"""
2584 2589 if not kwargs.get('bookmarks', False):
2585 2590 return
2586 2591 if not b2caps or b'bookmarks' not in b2caps:
2587 2592 raise error.Abort(_(b'no common bookmarks exchange method'))
2588 2593 books = bookmod.listbinbookmarks(repo)
2589 2594 data = bookmod.binaryencode(books)
2590 2595 if data:
2591 2596 bundler.newpart(b'bookmarks', data=data)
2592 2597
2593 2598
2594 2599 @getbundle2partsgenerator(b'listkeys')
2595 2600 def _getbundlelistkeysparts(
2596 2601 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2597 2602 ):
2598 2603 """add parts containing listkeys namespaces to the requested bundle"""
2599 2604 listkeys = kwargs.get('listkeys', ())
2600 2605 for namespace in listkeys:
2601 2606 part = bundler.newpart(b'listkeys')
2602 2607 part.addparam(b'namespace', namespace)
2603 2608 keys = repo.listkeys(namespace).items()
2604 2609 part.data = pushkey.encodekeys(keys)
2605 2610
2606 2611
2607 2612 @getbundle2partsgenerator(b'obsmarkers')
2608 2613 def _getbundleobsmarkerpart(
2609 2614 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2610 2615 ):
2611 2616 """add an obsolescence markers part to the requested bundle"""
2612 2617 if kwargs.get('obsmarkers', False):
2613 2618 if heads is None:
2614 2619 heads = repo.heads()
2615 2620 subset = [c.node() for c in repo.set(b'::%ln', heads)]
2616 2621 markers = repo.obsstore.relevantmarkers(subset)
2617 2622 markers = obsutil.sortedmarkers(markers)
2618 2623 bundle2.buildobsmarkerspart(bundler, markers)
2619 2624
2620 2625
2621 2626 @getbundle2partsgenerator(b'phases')
2622 2627 def _getbundlephasespart(
2623 2628 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2624 2629 ):
2625 2630 """add phase heads part to the requested bundle"""
2626 2631 if kwargs.get('phases', False):
2627 2632 if not b2caps or b'heads' not in b2caps.get(b'phases'):
2628 2633 raise error.Abort(_(b'no common phases exchange method'))
2629 2634 if heads is None:
2630 2635 heads = repo.heads()
2631 2636
2632 2637 headsbyphase = collections.defaultdict(set)
2633 2638 if repo.publishing():
2634 2639 headsbyphase[phases.public] = heads
2635 2640 else:
2636 2641 # find the appropriate heads to move
2637 2642
2638 2643 phase = repo._phasecache.phase
2639 2644 node = repo.changelog.node
2640 2645 rev = repo.changelog.rev
2641 2646 for h in heads:
2642 2647 headsbyphase[phase(repo, rev(h))].add(h)
2643 2648 seenphases = list(headsbyphase.keys())
2644 2649
2645 2650 # We do not handle anything but public and draft phase for now)
2646 2651 if seenphases:
2647 2652 assert max(seenphases) <= phases.draft
2648 2653
2649 2654 # if client is pulling non-public changesets, we need to find
2650 2655 # intermediate public heads.
2651 2656 draftheads = headsbyphase.get(phases.draft, set())
2652 2657 if draftheads:
2653 2658 publicheads = headsbyphase.get(phases.public, set())
2654 2659
2655 2660 revset = b'heads(only(%ln, %ln) and public())'
2656 2661 extraheads = repo.revs(revset, draftheads, publicheads)
2657 2662 for r in extraheads:
2658 2663 headsbyphase[phases.public].add(node(r))
2659 2664
2660 2665 # transform data in a format used by the encoding function
2661 2666 phasemapping = {
2662 2667 phase: sorted(headsbyphase[phase]) for phase in phases.allphases
2663 2668 }
2664 2669
2665 2670 # generate the actual part
2666 2671 phasedata = phases.binaryencode(phasemapping)
2667 2672 bundler.newpart(b'phase-heads', data=phasedata)
2668 2673
2669 2674
2670 2675 @getbundle2partsgenerator(b'hgtagsfnodes')
2671 2676 def _getbundletagsfnodes(
2672 2677 bundler,
2673 2678 repo,
2674 2679 source,
2675 2680 bundlecaps=None,
2676 2681 b2caps=None,
2677 2682 heads=None,
2678 2683 common=None,
2679 2684 **kwargs
2680 2685 ):
2681 2686 """Transfer the .hgtags filenodes mapping.
2682 2687
2683 2688 Only values for heads in this bundle will be transferred.
2684 2689
2685 2690 The part data consists of pairs of 20 byte changeset node and .hgtags
2686 2691 filenodes raw values.
2687 2692 """
2688 2693 # Don't send unless:
2689 2694 # - changeset are being exchanged,
2690 2695 # - the client supports it.
2691 2696 if not b2caps or not (kwargs.get('cg', True) and b'hgtagsfnodes' in b2caps):
2692 2697 return
2693 2698
2694 2699 outgoing = _computeoutgoing(repo, heads, common)
2695 2700 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2696 2701
2697 2702
2698 2703 @getbundle2partsgenerator(b'cache:rev-branch-cache')
2699 2704 def _getbundlerevbranchcache(
2700 2705 bundler,
2701 2706 repo,
2702 2707 source,
2703 2708 bundlecaps=None,
2704 2709 b2caps=None,
2705 2710 heads=None,
2706 2711 common=None,
2707 2712 **kwargs
2708 2713 ):
2709 2714 """Transfer the rev-branch-cache mapping
2710 2715
2711 2716 The payload is a series of data related to each branch
2712 2717
2713 2718 1) branch name length
2714 2719 2) number of open heads
2715 2720 3) number of closed heads
2716 2721 4) open heads nodes
2717 2722 5) closed heads nodes
2718 2723 """
2719 2724 # Don't send unless:
2720 2725 # - changeset are being exchanged,
2721 2726 # - the client supports it.
2722 2727 # - narrow bundle isn't in play (not currently compatible).
2723 2728 if (
2724 2729 not kwargs.get('cg', True)
2725 2730 or not b2caps
2726 2731 or b'rev-branch-cache' not in b2caps
2727 2732 or kwargs.get('narrow', False)
2728 2733 or repo.ui.has_section(_NARROWACL_SECTION)
2729 2734 ):
2730 2735 return
2731 2736
2732 2737 outgoing = _computeoutgoing(repo, heads, common)
2733 2738 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2734 2739
2735 2740
2736 2741 def check_heads(repo, their_heads, context):
2737 2742 """check if the heads of a repo have been modified
2738 2743
2739 2744 Used by peer for unbundling.
2740 2745 """
2741 2746 heads = repo.heads()
2742 2747 heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest()
2743 2748 if not (
2744 2749 their_heads == [b'force']
2745 2750 or their_heads == heads
2746 2751 or their_heads == [b'hashed', heads_hash]
2747 2752 ):
2748 2753 # someone else committed/pushed/unbundled while we
2749 2754 # were transferring data
2750 2755 raise error.PushRaced(
2751 2756 b'repository changed while %s - please try again' % context
2752 2757 )
2753 2758
2754 2759
2755 2760 def unbundle(repo, cg, heads, source, url):
2756 2761 """Apply a bundle to a repo.
2757 2762
2758 2763 this function makes sure the repo is locked during the application and have
2759 2764 mechanism to check that no push race occurred between the creation of the
2760 2765 bundle and its application.
2761 2766
2762 2767 If the push was raced as PushRaced exception is raised."""
2763 2768 r = 0
2764 2769 # need a transaction when processing a bundle2 stream
2765 2770 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2766 2771 lockandtr = [None, None, None]
2767 2772 recordout = None
2768 2773 # quick fix for output mismatch with bundle2 in 3.4
2769 2774 captureoutput = repo.ui.configbool(
2770 2775 b'experimental', b'bundle2-output-capture'
2771 2776 )
2772 2777 if url.startswith(b'remote:http:') or url.startswith(b'remote:https:'):
2773 2778 captureoutput = True
2774 2779 try:
2775 2780 # note: outside bundle1, 'heads' is expected to be empty and this
2776 2781 # 'check_heads' call wil be a no-op
2777 2782 check_heads(repo, heads, b'uploading changes')
2778 2783 # push can proceed
2779 2784 if not isinstance(cg, bundle2.unbundle20):
2780 2785 # legacy case: bundle1 (changegroup 01)
2781 2786 txnname = b"\n".join([source, util.hidepassword(url)])
2782 2787 with repo.lock(), repo.transaction(txnname) as tr:
2783 2788 op = bundle2.applybundle(repo, cg, tr, source, url)
2784 2789 r = bundle2.combinechangegroupresults(op)
2785 2790 else:
2786 2791 r = None
2787 2792 try:
2788 2793
2789 2794 def gettransaction():
2790 2795 if not lockandtr[2]:
2791 2796 if not bookmod.bookmarksinstore(repo):
2792 2797 lockandtr[0] = repo.wlock()
2793 2798 lockandtr[1] = repo.lock()
2794 2799 lockandtr[2] = repo.transaction(source)
2795 2800 lockandtr[2].hookargs[b'source'] = source
2796 2801 lockandtr[2].hookargs[b'url'] = url
2797 2802 lockandtr[2].hookargs[b'bundle2'] = b'1'
2798 2803 return lockandtr[2]
2799 2804
2800 2805 # Do greedy locking by default until we're satisfied with lazy
2801 2806 # locking.
2802 2807 if not repo.ui.configbool(
2803 2808 b'experimental', b'bundle2lazylocking'
2804 2809 ):
2805 2810 gettransaction()
2806 2811
2807 2812 op = bundle2.bundleoperation(
2808 2813 repo,
2809 2814 gettransaction,
2810 2815 captureoutput=captureoutput,
2811 2816 source=b'push',
2812 2817 )
2813 2818 try:
2814 2819 op = bundle2.processbundle(repo, cg, op=op)
2815 2820 finally:
2816 2821 r = op.reply
2817 2822 if captureoutput and r is not None:
2818 2823 repo.ui.pushbuffer(error=True, subproc=True)
2819 2824
2820 2825 def recordout(output):
2821 2826 r.newpart(b'output', data=output, mandatory=False)
2822 2827
2823 2828 if lockandtr[2] is not None:
2824 2829 lockandtr[2].close()
2825 2830 except BaseException as exc:
2826 2831 exc.duringunbundle2 = True
2827 2832 if captureoutput and r is not None:
2828 2833 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2829 2834
2830 2835 def recordout(output):
2831 2836 part = bundle2.bundlepart(
2832 2837 b'output', data=output, mandatory=False
2833 2838 )
2834 2839 parts.append(part)
2835 2840
2836 2841 raise
2837 2842 finally:
2838 2843 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2839 2844 if recordout is not None:
2840 2845 recordout(repo.ui.popbuffer())
2841 2846 return r
2842 2847
2843 2848
2844 2849 def _maybeapplyclonebundle(pullop):
2845 2850 """Apply a clone bundle from a remote, if possible."""
2846 2851
2847 2852 repo = pullop.repo
2848 2853 remote = pullop.remote
2849 2854
2850 2855 if not repo.ui.configbool(b'ui', b'clonebundles'):
2851 2856 return
2852 2857
2853 2858 # Only run if local repo is empty.
2854 2859 if len(repo):
2855 2860 return
2856 2861
2857 2862 if pullop.heads:
2858 2863 return
2859 2864
2860 2865 if not remote.capable(b'clonebundles'):
2861 2866 return
2862 2867
2863 2868 with remote.commandexecutor() as e:
2864 2869 res = e.callcommand(b'clonebundles', {}).result()
2865 2870
2866 2871 # If we call the wire protocol command, that's good enough to record the
2867 2872 # attempt.
2868 2873 pullop.clonebundleattempted = True
2869 2874
2870 2875 entries = parseclonebundlesmanifest(repo, res)
2871 2876 if not entries:
2872 2877 repo.ui.note(
2873 2878 _(
2874 2879 b'no clone bundles available on remote; '
2875 2880 b'falling back to regular clone\n'
2876 2881 )
2877 2882 )
2878 2883 return
2879 2884
2880 2885 entries = filterclonebundleentries(
2881 2886 repo, entries, streamclonerequested=pullop.streamclonerequested
2882 2887 )
2883 2888
2884 2889 if not entries:
2885 2890 # There is a thundering herd concern here. However, if a server
2886 2891 # operator doesn't advertise bundles appropriate for its clients,
2887 2892 # they deserve what's coming. Furthermore, from a client's
2888 2893 # perspective, no automatic fallback would mean not being able to
2889 2894 # clone!
2890 2895 repo.ui.warn(
2891 2896 _(
2892 2897 b'no compatible clone bundles available on server; '
2893 2898 b'falling back to regular clone\n'
2894 2899 )
2895 2900 )
2896 2901 repo.ui.warn(
2897 2902 _(b'(you may want to report this to the server operator)\n')
2898 2903 )
2899 2904 return
2900 2905
2901 2906 entries = sortclonebundleentries(repo.ui, entries)
2902 2907
2903 2908 url = entries[0][b'URL']
2904 2909 repo.ui.status(_(b'applying clone bundle from %s\n') % url)
2905 2910 if trypullbundlefromurl(repo.ui, repo, url):
2906 2911 repo.ui.status(_(b'finished applying clone bundle\n'))
2907 2912 # Bundle failed.
2908 2913 #
2909 2914 # We abort by default to avoid the thundering herd of
2910 2915 # clients flooding a server that was expecting expensive
2911 2916 # clone load to be offloaded.
2912 2917 elif repo.ui.configbool(b'ui', b'clonebundlefallback'):
2913 2918 repo.ui.warn(_(b'falling back to normal clone\n'))
2914 2919 else:
2915 2920 raise error.Abort(
2916 2921 _(b'error applying bundle'),
2917 2922 hint=_(
2918 2923 b'if this error persists, consider contacting '
2919 2924 b'the server operator or disable clone '
2920 2925 b'bundles via '
2921 2926 b'"--config ui.clonebundles=false"'
2922 2927 ),
2923 2928 )
2924 2929
2925 2930
2926 2931 def parseclonebundlesmanifest(repo, s):
2927 2932 """Parses the raw text of a clone bundles manifest.
2928 2933
2929 2934 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2930 2935 to the URL and other keys are the attributes for the entry.
2931 2936 """
2932 2937 m = []
2933 2938 for line in s.splitlines():
2934 2939 fields = line.split()
2935 2940 if not fields:
2936 2941 continue
2937 2942 attrs = {b'URL': fields[0]}
2938 2943 for rawattr in fields[1:]:
2939 2944 key, value = rawattr.split(b'=', 1)
2940 2945 key = urlreq.unquote(key)
2941 2946 value = urlreq.unquote(value)
2942 2947 attrs[key] = value
2943 2948
2944 2949 # Parse BUNDLESPEC into components. This makes client-side
2945 2950 # preferences easier to specify since you can prefer a single
2946 2951 # component of the BUNDLESPEC.
2947 2952 if key == b'BUNDLESPEC':
2948 2953 try:
2949 2954 bundlespec = parsebundlespec(repo, value)
2950 2955 attrs[b'COMPRESSION'] = bundlespec.compression
2951 2956 attrs[b'VERSION'] = bundlespec.version
2952 2957 except error.InvalidBundleSpecification:
2953 2958 pass
2954 2959 except error.UnsupportedBundleSpecification:
2955 2960 pass
2956 2961
2957 2962 m.append(attrs)
2958 2963
2959 2964 return m
2960 2965
2961 2966
2962 2967 def isstreamclonespec(bundlespec):
2963 2968 # Stream clone v1
2964 2969 if bundlespec.wirecompression == b'UN' and bundlespec.wireversion == b's1':
2965 2970 return True
2966 2971
2967 2972 # Stream clone v2
2968 2973 if (
2969 2974 bundlespec.wirecompression == b'UN'
2970 2975 and bundlespec.wireversion == b'02'
2971 2976 and bundlespec.contentopts.get(b'streamv2')
2972 2977 ):
2973 2978 return True
2974 2979
2975 2980 return False
2976 2981
2977 2982
2978 2983 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2979 2984 """Remove incompatible clone bundle manifest entries.
2980 2985
2981 2986 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2982 2987 and returns a new list consisting of only the entries that this client
2983 2988 should be able to apply.
2984 2989
2985 2990 There is no guarantee we'll be able to apply all returned entries because
2986 2991 the metadata we use to filter on may be missing or wrong.
2987 2992 """
2988 2993 newentries = []
2989 2994 for entry in entries:
2990 2995 spec = entry.get(b'BUNDLESPEC')
2991 2996 if spec:
2992 2997 try:
2993 2998 bundlespec = parsebundlespec(repo, spec, strict=True)
2994 2999
2995 3000 # If a stream clone was requested, filter out non-streamclone
2996 3001 # entries.
2997 3002 if streamclonerequested and not isstreamclonespec(bundlespec):
2998 3003 repo.ui.debug(
2999 3004 b'filtering %s because not a stream clone\n'
3000 3005 % entry[b'URL']
3001 3006 )
3002 3007 continue
3003 3008
3004 3009 except error.InvalidBundleSpecification as e:
3005 3010 repo.ui.debug(stringutil.forcebytestr(e) + b'\n')
3006 3011 continue
3007 3012 except error.UnsupportedBundleSpecification as e:
3008 3013 repo.ui.debug(
3009 3014 b'filtering %s because unsupported bundle '
3010 3015 b'spec: %s\n' % (entry[b'URL'], stringutil.forcebytestr(e))
3011 3016 )
3012 3017 continue
3013 3018 # If we don't have a spec and requested a stream clone, we don't know
3014 3019 # what the entry is so don't attempt to apply it.
3015 3020 elif streamclonerequested:
3016 3021 repo.ui.debug(
3017 3022 b'filtering %s because cannot determine if a stream '
3018 3023 b'clone bundle\n' % entry[b'URL']
3019 3024 )
3020 3025 continue
3021 3026
3022 3027 if b'REQUIRESNI' in entry and not sslutil.hassni:
3023 3028 repo.ui.debug(
3024 3029 b'filtering %s because SNI not supported\n' % entry[b'URL']
3025 3030 )
3026 3031 continue
3027 3032
3028 3033 if b'REQUIREDRAM' in entry:
3029 3034 try:
3030 3035 requiredram = util.sizetoint(entry[b'REQUIREDRAM'])
3031 3036 except error.ParseError:
3032 3037 repo.ui.debug(
3033 3038 b'filtering %s due to a bad REQUIREDRAM attribute\n'
3034 3039 % entry[b'URL']
3035 3040 )
3036 3041 continue
3037 3042 actualram = repo.ui.estimatememory()
3038 3043 if actualram is not None and actualram * 0.66 < requiredram:
3039 3044 repo.ui.debug(
3040 3045 b'filtering %s as it needs more than 2/3 of system memory\n'
3041 3046 % entry[b'URL']
3042 3047 )
3043 3048 continue
3044 3049
3045 3050 newentries.append(entry)
3046 3051
3047 3052 return newentries
3048 3053
3049 3054
3050 3055 class clonebundleentry(object):
3051 3056 """Represents an item in a clone bundles manifest.
3052 3057
3053 3058 This rich class is needed to support sorting since sorted() in Python 3
3054 3059 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
3055 3060 won't work.
3056 3061 """
3057 3062
3058 3063 def __init__(self, value, prefers):
3059 3064 self.value = value
3060 3065 self.prefers = prefers
3061 3066
3062 3067 def _cmp(self, other):
3063 3068 for prefkey, prefvalue in self.prefers:
3064 3069 avalue = self.value.get(prefkey)
3065 3070 bvalue = other.value.get(prefkey)
3066 3071
3067 3072 # Special case for b missing attribute and a matches exactly.
3068 3073 if avalue is not None and bvalue is None and avalue == prefvalue:
3069 3074 return -1
3070 3075
3071 3076 # Special case for a missing attribute and b matches exactly.
3072 3077 if bvalue is not None and avalue is None and bvalue == prefvalue:
3073 3078 return 1
3074 3079
3075 3080 # We can't compare unless attribute present on both.
3076 3081 if avalue is None or bvalue is None:
3077 3082 continue
3078 3083
3079 3084 # Same values should fall back to next attribute.
3080 3085 if avalue == bvalue:
3081 3086 continue
3082 3087
3083 3088 # Exact matches come first.
3084 3089 if avalue == prefvalue:
3085 3090 return -1
3086 3091 if bvalue == prefvalue:
3087 3092 return 1
3088 3093
3089 3094 # Fall back to next attribute.
3090 3095 continue
3091 3096
3092 3097 # If we got here we couldn't sort by attributes and prefers. Fall
3093 3098 # back to index order.
3094 3099 return 0
3095 3100
3096 3101 def __lt__(self, other):
3097 3102 return self._cmp(other) < 0
3098 3103
3099 3104 def __gt__(self, other):
3100 3105 return self._cmp(other) > 0
3101 3106
3102 3107 def __eq__(self, other):
3103 3108 return self._cmp(other) == 0
3104 3109
3105 3110 def __le__(self, other):
3106 3111 return self._cmp(other) <= 0
3107 3112
3108 3113 def __ge__(self, other):
3109 3114 return self._cmp(other) >= 0
3110 3115
3111 3116 def __ne__(self, other):
3112 3117 return self._cmp(other) != 0
3113 3118
3114 3119
3115 3120 def sortclonebundleentries(ui, entries):
3116 3121 prefers = ui.configlist(b'ui', b'clonebundleprefers')
3117 3122 if not prefers:
3118 3123 return list(entries)
3119 3124
3120 3125 def _split(p):
3121 3126 if b'=' not in p:
3122 3127 hint = _(b"each comma separated item should be key=value pairs")
3123 3128 raise error.Abort(
3124 3129 _(b"invalid ui.clonebundleprefers item: %s") % p, hint=hint
3125 3130 )
3126 3131 return p.split(b'=', 1)
3127 3132
3128 3133 prefers = [_split(p) for p in prefers]
3129 3134
3130 3135 items = sorted(clonebundleentry(v, prefers) for v in entries)
3131 3136 return [i.value for i in items]
3132 3137
3133 3138
3134 3139 def trypullbundlefromurl(ui, repo, url):
3135 3140 """Attempt to apply a bundle from a URL."""
3136 3141 with repo.lock(), repo.transaction(b'bundleurl') as tr:
3137 3142 try:
3138 3143 fh = urlmod.open(ui, url)
3139 3144 cg = readbundle(ui, fh, b'stream')
3140 3145
3141 3146 if isinstance(cg, streamclone.streamcloneapplier):
3142 3147 cg.apply(repo)
3143 3148 else:
3144 3149 bundle2.applybundle(repo, cg, tr, b'clonebundles', url)
3145 3150 return True
3146 3151 except urlerr.httperror as e:
3147 3152 ui.warn(
3148 3153 _(b'HTTP error fetching bundle: %s\n')
3149 3154 % stringutil.forcebytestr(e)
3150 3155 )
3151 3156 except urlerr.urlerror as e:
3152 3157 ui.warn(
3153 3158 _(b'error fetching bundle: %s\n')
3154 3159 % stringutil.forcebytestr(e.reason)
3155 3160 )
3156 3161
3157 3162 return False
@@ -1,798 +1,800 b''
1 1 Test file dedicated to testing the divergent troubles from obsolete changeset.
2 2
3 3 This is the most complex troubles from far so we isolate it in a dedicated
4 4 file.
5 5
6 6 Enable obsolete
7 7
8 8 $ cat >> $HGRCPATH << EOF
9 9 > [ui]
10 10 > logtemplate = {rev}:{node|short} {desc}{if(obsfate, " [{join(obsfate, "; ")}]")}\n
11 11 > [experimental]
12 12 > evolution.createmarkers=True
13 13 > [extensions]
14 14 > drawdag=$TESTDIR/drawdag.py
15 15 > [alias]
16 16 > debugobsolete = debugobsolete -d '0 0'
17 17 > [phases]
18 18 > publish=False
19 19 > [templates]
20 20 > wuentryshort = '{instability}:{if(divergentnodes, " ")}{divergentnodes} {reason} {node|shortest}\n'
21 21 > whyunstableshort = '{whyunstable % wuentryshort}'
22 22 > wuentryshorter = '{instability}:{divergentnodes % " {node|shortest} ({phase})"} {reason} {node|shortest}\n'
23 23 > whyunstableshorter = '{whyunstable % wuentryshorter}'
24 24 > EOF
25 25
26 26
27 27 $ mkcommit() {
28 28 > echo "$1" > "$1"
29 29 > hg add "$1"
30 30 > hg ci -m "$1"
31 31 > }
32 32 $ getid() {
33 33 > hg log --hidden -r "desc('$1')" -T '{node}\n'
34 34 > }
35 35
36 36 setup repo
37 37
38 38 $ hg init reference
39 39 $ cd reference
40 40 $ mkcommit base
41 41 $ mkcommit A_0
42 42 $ hg up 0
43 43 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
44 44 $ mkcommit A_1
45 45 created new head
46 46 $ hg up 0
47 47 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
48 48 $ mkcommit A_2
49 49 created new head
50 50 $ hg up 0
51 51 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
52 52 $ cd ..
53 53
54 54
55 55 $ newcase() {
56 56 > hg clone -u 0 -q reference $1
57 57 > cd $1
58 58 > }
59 59
60 60 direct divergence
61 61 -----------------
62 62
63 63 A_1 have two direct and divergent successors A_1 and A_1
64 64
65 65 $ newcase direct
66 66 $ hg debugobsolete `getid A_0` `getid A_1`
67 67 1 new obsolescence markers
68 68 obsoleted 1 changesets
69 69 $ hg debugobsolete `getid A_0` `getid A_2`
70 70 1 new obsolescence markers
71 71 2 new content-divergent changesets
72 72 $ hg log -G --hidden
73 73 * 3:392fd25390da A_2
74 74 |
75 75 | * 2:82623d38b9ba A_1
76 76 |/
77 77 | x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
78 78 |/
79 79 @ 0:d20a80d4def3 base
80 80
81 81 $ hg debugsuccessorssets --hidden 'all()'
82 82 d20a80d4def3
83 83 d20a80d4def3
84 84 007dc284c1f8
85 85 82623d38b9ba
86 86 392fd25390da
87 87 82623d38b9ba
88 88 82623d38b9ba
89 89 392fd25390da
90 90 392fd25390da
91 91 $ hg log -r 'contentdivergent()'
92 92 2:82623d38b9ba A_1
93 93 3:392fd25390da A_2
94 94 $ hg log -r 'unstable()'
95 95 2:82623d38b9ba A_1
96 96 3:392fd25390da A_2
97 97 $ hg debugsuccessorssets 'all()' --closest
98 98 d20a80d4def3
99 99 d20a80d4def3
100 100 82623d38b9ba
101 101 82623d38b9ba
102 102 392fd25390da
103 103 392fd25390da
104 104 $ hg debugsuccessorssets 'all()' --closest --hidden
105 105 d20a80d4def3
106 106 d20a80d4def3
107 107 007dc284c1f8
108 108 82623d38b9ba
109 109 392fd25390da
110 110 82623d38b9ba
111 111 82623d38b9ba
112 112 392fd25390da
113 113 392fd25390da
114 114
115 115 check that mercurial refuse to push
116 116
117 117 $ hg init ../other
118 118 $ hg push ../other
119 119 pushing to ../other
120 120 searching for changes
121 abort: push includes content-divergent changeset: 392fd25390da!
121 abort: push includes unstable changesets:
122 82623d38b9ba (content-divergent)
123 392fd25390da (content-divergent)
122 124 [255]
123 125
124 126 $ cd ..
125 127
126 128
127 129 indirect divergence with known changeset
128 130 -------------------------------------------
129 131
130 132 $ newcase indirect_known
131 133 $ hg debugobsolete `getid A_0` `getid A_1`
132 134 1 new obsolescence markers
133 135 obsoleted 1 changesets
134 136 $ hg debugobsolete `getid A_0` `getid A_2`
135 137 1 new obsolescence markers
136 138 2 new content-divergent changesets
137 139 $ mkcommit A_3
138 140 created new head
139 141 $ hg debugobsolete `getid A_2` `getid A_3`
140 142 1 new obsolescence markers
141 143 obsoleted 1 changesets
142 144 $ hg log -G --hidden
143 145 @ 4:01f36c5a8fda A_3
144 146 |
145 147 | x 3:392fd25390da A_2 [rewritten as 4:01f36c5a8fda]
146 148 |/
147 149 | * 2:82623d38b9ba A_1
148 150 |/
149 151 | x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
150 152 |/
151 153 o 0:d20a80d4def3 base
152 154
153 155 $ hg debugsuccessorssets --hidden 'all()'
154 156 d20a80d4def3
155 157 d20a80d4def3
156 158 007dc284c1f8
157 159 82623d38b9ba
158 160 01f36c5a8fda
159 161 82623d38b9ba
160 162 82623d38b9ba
161 163 392fd25390da
162 164 01f36c5a8fda
163 165 01f36c5a8fda
164 166 01f36c5a8fda
165 167 $ hg log -r 'contentdivergent()'
166 168 2:82623d38b9ba A_1
167 169 4:01f36c5a8fda A_3
168 170 $ hg debugsuccessorssets 'all()' --closest
169 171 d20a80d4def3
170 172 d20a80d4def3
171 173 82623d38b9ba
172 174 82623d38b9ba
173 175 01f36c5a8fda
174 176 01f36c5a8fda
175 177 $ hg debugsuccessorssets 'all()' --closest --hidden
176 178 d20a80d4def3
177 179 d20a80d4def3
178 180 007dc284c1f8
179 181 82623d38b9ba
180 182 392fd25390da
181 183 82623d38b9ba
182 184 82623d38b9ba
183 185 392fd25390da
184 186 392fd25390da
185 187 01f36c5a8fda
186 188 01f36c5a8fda
187 189 $ cd ..
188 190
189 191
190 192 indirect divergence with known changeset
191 193 -------------------------------------------
192 194
193 195 $ newcase indirect_unknown
194 196 $ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
195 197 1 new obsolescence markers
196 198 obsoleted 1 changesets
197 199 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
198 200 1 new obsolescence markers
199 201 $ hg debugobsolete `getid A_0` `getid A_2`
200 202 1 new obsolescence markers
201 203 2 new content-divergent changesets
202 204 $ hg log -G --hidden
203 205 * 3:392fd25390da A_2
204 206 |
205 207 | * 2:82623d38b9ba A_1
206 208 |/
207 209 | x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
208 210 |/
209 211 @ 0:d20a80d4def3 base
210 212
211 213 $ hg debugsuccessorssets --hidden 'all()'
212 214 d20a80d4def3
213 215 d20a80d4def3
214 216 007dc284c1f8
215 217 82623d38b9ba
216 218 392fd25390da
217 219 82623d38b9ba
218 220 82623d38b9ba
219 221 392fd25390da
220 222 392fd25390da
221 223 $ hg log -r 'contentdivergent()'
222 224 2:82623d38b9ba A_1
223 225 3:392fd25390da A_2
224 226 $ hg debugsuccessorssets 'all()' --closest
225 227 d20a80d4def3
226 228 d20a80d4def3
227 229 82623d38b9ba
228 230 82623d38b9ba
229 231 392fd25390da
230 232 392fd25390da
231 233 $ hg debugsuccessorssets 'all()' --closest --hidden
232 234 d20a80d4def3
233 235 d20a80d4def3
234 236 007dc284c1f8
235 237 82623d38b9ba
236 238 392fd25390da
237 239 82623d38b9ba
238 240 82623d38b9ba
239 241 392fd25390da
240 242 392fd25390da
241 243 $ cd ..
242 244
243 245 do not take unknown node in account if they are final
244 246 -----------------------------------------------------
245 247
246 248 $ newcase final-unknown
247 249 $ hg debugobsolete `getid A_0` `getid A_1`
248 250 1 new obsolescence markers
249 251 obsoleted 1 changesets
250 252 $ hg debugobsolete `getid A_1` `getid A_2`
251 253 1 new obsolescence markers
252 254 obsoleted 1 changesets
253 255 $ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
254 256 1 new obsolescence markers
255 257 $ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc
256 258 1 new obsolescence markers
257 259 $ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd
258 260 1 new obsolescence markers
259 261
260 262 $ hg debugsuccessorssets --hidden 'desc('A_0')'
261 263 007dc284c1f8
262 264 392fd25390da
263 265 $ hg debugsuccessorssets 'desc('A_0')' --closest
264 266 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
265 267 007dc284c1f8
266 268 82623d38b9ba
267 269
268 270 $ cd ..
269 271
270 272 divergence that converge again is not divergence anymore
271 273 -----------------------------------------------------
272 274
273 275 $ newcase converged_divergence
274 276 $ hg debugobsolete `getid A_0` `getid A_1`
275 277 1 new obsolescence markers
276 278 obsoleted 1 changesets
277 279 $ hg debugobsolete `getid A_0` `getid A_2`
278 280 1 new obsolescence markers
279 281 2 new content-divergent changesets
280 282 $ mkcommit A_3
281 283 created new head
282 284 $ hg debugobsolete `getid A_1` `getid A_3`
283 285 1 new obsolescence markers
284 286 obsoleted 1 changesets
285 287 $ hg debugobsolete `getid A_2` `getid A_3`
286 288 1 new obsolescence markers
287 289 obsoleted 1 changesets
288 290 $ hg log -G --hidden
289 291 @ 4:01f36c5a8fda A_3
290 292 |
291 293 | x 3:392fd25390da A_2 [rewritten as 4:01f36c5a8fda]
292 294 |/
293 295 | x 2:82623d38b9ba A_1 [rewritten as 4:01f36c5a8fda]
294 296 |/
295 297 | x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
296 298 |/
297 299 o 0:d20a80d4def3 base
298 300
299 301 $ hg debugsuccessorssets --hidden 'all()'
300 302 d20a80d4def3
301 303 d20a80d4def3
302 304 007dc284c1f8
303 305 01f36c5a8fda
304 306 82623d38b9ba
305 307 01f36c5a8fda
306 308 392fd25390da
307 309 01f36c5a8fda
308 310 01f36c5a8fda
309 311 01f36c5a8fda
310 312 $ hg log -r 'contentdivergent()'
311 313 $ hg debugsuccessorssets 'all()' --closest
312 314 d20a80d4def3
313 315 d20a80d4def3
314 316 01f36c5a8fda
315 317 01f36c5a8fda
316 318 $ hg debugsuccessorssets 'all()' --closest --hidden
317 319 d20a80d4def3
318 320 d20a80d4def3
319 321 007dc284c1f8
320 322 82623d38b9ba
321 323 392fd25390da
322 324 82623d38b9ba
323 325 82623d38b9ba
324 326 392fd25390da
325 327 392fd25390da
326 328 01f36c5a8fda
327 329 01f36c5a8fda
328 330 $ cd ..
329 331
330 332 split is not divergences
331 333 -----------------------------
332 334
333 335 $ newcase split
334 336 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
335 337 1 new obsolescence markers
336 338 obsoleted 1 changesets
337 339 $ hg log -G --hidden
338 340 o 3:392fd25390da A_2
339 341 |
340 342 | o 2:82623d38b9ba A_1
341 343 |/
342 344 | x 1:007dc284c1f8 A_0 [split as 2:82623d38b9ba, 3:392fd25390da]
343 345 |/
344 346 @ 0:d20a80d4def3 base
345 347
346 348 $ hg debugsuccessorssets --hidden 'all()'
347 349 d20a80d4def3
348 350 d20a80d4def3
349 351 007dc284c1f8
350 352 82623d38b9ba 392fd25390da
351 353 82623d38b9ba
352 354 82623d38b9ba
353 355 392fd25390da
354 356 392fd25390da
355 357 $ hg log -r 'contentdivergent()'
356 358 $ hg debugsuccessorssets 'all()' --closest
357 359 d20a80d4def3
358 360 d20a80d4def3
359 361 82623d38b9ba
360 362 82623d38b9ba
361 363 392fd25390da
362 364 392fd25390da
363 365 $ hg debugsuccessorssets 'all()' --closest --hidden
364 366 d20a80d4def3
365 367 d20a80d4def3
366 368 007dc284c1f8
367 369 82623d38b9ba 392fd25390da
368 370 82623d38b9ba
369 371 82623d38b9ba
370 372 392fd25390da
371 373 392fd25390da
372 374
373 375 Even when subsequent rewriting happen
374 376
375 377 $ mkcommit A_3
376 378 created new head
377 379 $ hg debugobsolete `getid A_1` `getid A_3`
378 380 1 new obsolescence markers
379 381 obsoleted 1 changesets
380 382 $ hg up 0
381 383 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
382 384 $ mkcommit A_4
383 385 created new head
384 386 $ hg debugobsolete `getid A_2` `getid A_4`
385 387 1 new obsolescence markers
386 388 obsoleted 1 changesets
387 389 $ hg up 0
388 390 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
389 391 $ mkcommit A_5
390 392 created new head
391 393 $ hg debugobsolete `getid A_4` `getid A_5`
392 394 1 new obsolescence markers
393 395 obsoleted 1 changesets
394 396 $ hg log -G --hidden
395 397 @ 6:e442cfc57690 A_5
396 398 |
397 399 | x 5:6a411f0d7a0a A_4 [rewritten as 6:e442cfc57690]
398 400 |/
399 401 | o 4:01f36c5a8fda A_3
400 402 |/
401 403 | x 3:392fd25390da A_2 [rewritten as 5:6a411f0d7a0a]
402 404 |/
403 405 | x 2:82623d38b9ba A_1 [rewritten as 4:01f36c5a8fda]
404 406 |/
405 407 | x 1:007dc284c1f8 A_0 [split as 2:82623d38b9ba, 3:392fd25390da]
406 408 |/
407 409 o 0:d20a80d4def3 base
408 410
409 411 $ hg debugsuccessorssets --hidden 'all()'
410 412 d20a80d4def3
411 413 d20a80d4def3
412 414 007dc284c1f8
413 415 01f36c5a8fda e442cfc57690
414 416 82623d38b9ba
415 417 01f36c5a8fda
416 418 392fd25390da
417 419 e442cfc57690
418 420 01f36c5a8fda
419 421 01f36c5a8fda
420 422 6a411f0d7a0a
421 423 e442cfc57690
422 424 e442cfc57690
423 425 e442cfc57690
424 426 $ hg debugsuccessorssets 'all()' --closest
425 427 d20a80d4def3
426 428 d20a80d4def3
427 429 01f36c5a8fda
428 430 01f36c5a8fda
429 431 e442cfc57690
430 432 e442cfc57690
431 433 $ hg debugsuccessorssets 'all()' --closest --hidden
432 434 d20a80d4def3
433 435 d20a80d4def3
434 436 007dc284c1f8
435 437 82623d38b9ba 392fd25390da
436 438 82623d38b9ba
437 439 82623d38b9ba
438 440 392fd25390da
439 441 392fd25390da
440 442 01f36c5a8fda
441 443 01f36c5a8fda
442 444 6a411f0d7a0a
443 445 e442cfc57690
444 446 e442cfc57690
445 447 e442cfc57690
446 448 $ hg log -r 'contentdivergent()'
447 449
448 450 Check more complex obsolescence graft (with divergence)
449 451
450 452 $ mkcommit B_0; hg up 0
451 453 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
452 454 $ hg debugobsolete `getid B_0` `getid A_2`
453 455 1 new obsolescence markers
454 456 obsoleted 1 changesets
455 457 $ mkcommit A_7; hg up 0
456 458 created new head
457 459 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
458 460 $ mkcommit A_8; hg up 0
459 461 created new head
460 462 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
461 463 $ hg debugobsolete `getid A_5` `getid A_7` `getid A_8`
462 464 1 new obsolescence markers
463 465 obsoleted 1 changesets
464 466 $ mkcommit A_9; hg up 0
465 467 created new head
466 468 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
467 469 $ hg debugobsolete `getid A_5` `getid A_9`
468 470 1 new obsolescence markers
469 471 4 new content-divergent changesets
470 472 $ hg log -G --hidden
471 473 * 10:bed64f5d2f5a A_9
472 474 |
473 475 | * 9:14608b260df8 A_8
474 476 |/
475 477 | * 8:7ae126973a96 A_7
476 478 |/
477 479 | x 7:3750ebee865d B_0 [rewritten as 3:392fd25390da]
478 480 | |
479 481 | x 6:e442cfc57690 A_5 [rewritten as 10:bed64f5d2f5a; split as 8:7ae126973a96, 9:14608b260df8]
480 482 |/
481 483 | x 5:6a411f0d7a0a A_4 [rewritten as 6:e442cfc57690]
482 484 |/
483 485 | * 4:01f36c5a8fda A_3
484 486 |/
485 487 | x 3:392fd25390da A_2 [rewritten as 5:6a411f0d7a0a]
486 488 |/
487 489 | x 2:82623d38b9ba A_1 [rewritten as 4:01f36c5a8fda]
488 490 |/
489 491 | x 1:007dc284c1f8 A_0 [split as 2:82623d38b9ba, 3:392fd25390da]
490 492 |/
491 493 @ 0:d20a80d4def3 base
492 494
493 495 $ hg debugsuccessorssets --hidden 'all()'
494 496 d20a80d4def3
495 497 d20a80d4def3
496 498 007dc284c1f8
497 499 01f36c5a8fda bed64f5d2f5a
498 500 01f36c5a8fda 7ae126973a96 14608b260df8
499 501 82623d38b9ba
500 502 01f36c5a8fda
501 503 392fd25390da
502 504 bed64f5d2f5a
503 505 7ae126973a96 14608b260df8
504 506 01f36c5a8fda
505 507 01f36c5a8fda
506 508 6a411f0d7a0a
507 509 bed64f5d2f5a
508 510 7ae126973a96 14608b260df8
509 511 e442cfc57690
510 512 bed64f5d2f5a
511 513 7ae126973a96 14608b260df8
512 514 3750ebee865d
513 515 bed64f5d2f5a
514 516 7ae126973a96 14608b260df8
515 517 7ae126973a96
516 518 7ae126973a96
517 519 14608b260df8
518 520 14608b260df8
519 521 bed64f5d2f5a
520 522 bed64f5d2f5a
521 523 $ hg debugsuccessorssets 'all()' --closest
522 524 d20a80d4def3
523 525 d20a80d4def3
524 526 01f36c5a8fda
525 527 01f36c5a8fda
526 528 7ae126973a96
527 529 7ae126973a96
528 530 14608b260df8
529 531 14608b260df8
530 532 bed64f5d2f5a
531 533 bed64f5d2f5a
532 534 $ hg debugsuccessorssets 'all()' --closest --hidden
533 535 d20a80d4def3
534 536 d20a80d4def3
535 537 007dc284c1f8
536 538 82623d38b9ba 392fd25390da
537 539 82623d38b9ba
538 540 82623d38b9ba
539 541 392fd25390da
540 542 392fd25390da
541 543 01f36c5a8fda
542 544 01f36c5a8fda
543 545 6a411f0d7a0a
544 546 e442cfc57690
545 547 e442cfc57690
546 548 e442cfc57690
547 549 3750ebee865d
548 550 392fd25390da
549 551 7ae126973a96
550 552 7ae126973a96
551 553 14608b260df8
552 554 14608b260df8
553 555 bed64f5d2f5a
554 556 bed64f5d2f5a
555 557 $ hg log -r 'contentdivergent()'
556 558 4:01f36c5a8fda A_3
557 559 8:7ae126973a96 A_7
558 560 9:14608b260df8 A_8
559 561 10:bed64f5d2f5a A_9
560 562
561 563 $ hg log -r bed64f5d2f5a -T '{whyunstable}\n' | sort
562 564 content-divergent: 4:01f36c5a8fda (draft) 8:7ae126973a96 (draft) 9:14608b260df8 (draft) predecessor 007dc284c1f8
563 565 content-divergent: 8:7ae126973a96 (draft) 9:14608b260df8 (draft) predecessor e442cfc57690
564 566 $ hg log -r bed64f5d2f5a -T whyunstableshort | sort
565 567 content-divergent: 4:01f36c5a8fda (draft) 8:7ae126973a96 (draft) 9:14608b260df8 (draft) predecessor 007d
566 568 content-divergent: 8:7ae126973a96 (draft) 9:14608b260df8 (draft) predecessor e442
567 569 $ hg log -r bed64f5d2f5a -T whyunstableshorter | sort
568 570 content-divergent: 01f3 (draft) 7ae1 (draft) 1460 (draft) predecessor 007d
569 571 content-divergent: 7ae1 (draft) 1460 (draft) predecessor e442
570 572
571 573 fix the divergence
572 574
573 575 $ mkcommit A_A; hg up 0
574 576 created new head
575 577 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
576 578 $ hg debugobsolete `getid A_9` `getid A_A`
577 579 1 new obsolescence markers
578 580 obsoleted 1 changesets
579 581 $ hg debugobsolete `getid A_7` `getid A_A`
580 582 1 new obsolescence markers
581 583 obsoleted 1 changesets
582 584 $ hg debugobsolete `getid A_8` `getid A_A`
583 585 1 new obsolescence markers
584 586 obsoleted 1 changesets
585 587 $ hg log -G --hidden
586 588 o 11:a139f71be9da A_A
587 589 |
588 590 | x 10:bed64f5d2f5a A_9 [rewritten as 11:a139f71be9da]
589 591 |/
590 592 | x 9:14608b260df8 A_8 [rewritten as 11:a139f71be9da]
591 593 |/
592 594 | x 8:7ae126973a96 A_7 [rewritten as 11:a139f71be9da]
593 595 |/
594 596 | x 7:3750ebee865d B_0 [rewritten as 3:392fd25390da]
595 597 | |
596 598 | x 6:e442cfc57690 A_5 [rewritten as 10:bed64f5d2f5a; split as 8:7ae126973a96, 9:14608b260df8]
597 599 |/
598 600 | x 5:6a411f0d7a0a A_4 [rewritten as 6:e442cfc57690]
599 601 |/
600 602 | o 4:01f36c5a8fda A_3
601 603 |/
602 604 | x 3:392fd25390da A_2 [rewritten as 5:6a411f0d7a0a]
603 605 |/
604 606 | x 2:82623d38b9ba A_1 [rewritten as 4:01f36c5a8fda]
605 607 |/
606 608 | x 1:007dc284c1f8 A_0 [split as 2:82623d38b9ba, 3:392fd25390da]
607 609 |/
608 610 @ 0:d20a80d4def3 base
609 611
610 612 $ hg debugsuccessorssets --hidden 'all()'
611 613 d20a80d4def3
612 614 d20a80d4def3
613 615 007dc284c1f8
614 616 01f36c5a8fda a139f71be9da
615 617 82623d38b9ba
616 618 01f36c5a8fda
617 619 392fd25390da
618 620 a139f71be9da
619 621 01f36c5a8fda
620 622 01f36c5a8fda
621 623 6a411f0d7a0a
622 624 a139f71be9da
623 625 e442cfc57690
624 626 a139f71be9da
625 627 3750ebee865d
626 628 a139f71be9da
627 629 7ae126973a96
628 630 a139f71be9da
629 631 14608b260df8
630 632 a139f71be9da
631 633 bed64f5d2f5a
632 634 a139f71be9da
633 635 a139f71be9da
634 636 a139f71be9da
635 637 $ hg debugsuccessorssets 'all()' --closest
636 638 d20a80d4def3
637 639 d20a80d4def3
638 640 01f36c5a8fda
639 641 01f36c5a8fda
640 642 a139f71be9da
641 643 a139f71be9da
642 644 $ hg debugsuccessorssets 'all()' --closest --hidden
643 645 d20a80d4def3
644 646 d20a80d4def3
645 647 007dc284c1f8
646 648 82623d38b9ba 392fd25390da
647 649 82623d38b9ba
648 650 82623d38b9ba
649 651 392fd25390da
650 652 392fd25390da
651 653 01f36c5a8fda
652 654 01f36c5a8fda
653 655 6a411f0d7a0a
654 656 e442cfc57690
655 657 e442cfc57690
656 658 e442cfc57690
657 659 3750ebee865d
658 660 392fd25390da
659 661 7ae126973a96
660 662 a139f71be9da
661 663 14608b260df8
662 664 a139f71be9da
663 665 bed64f5d2f5a
664 666 a139f71be9da
665 667 a139f71be9da
666 668 a139f71be9da
667 669 $ hg log -r 'contentdivergent()'
668 670
669 671 #if serve
670 672
671 673 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid --config web.view=all \
672 674 > -A access.log -E errors.log
673 675 $ cat hg.pid >> $DAEMON_PIDS
674 676
675 677 check an obsolete changeset that was rewritten and also split
676 678
677 679 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=paper' | egrep 'rewritten|split'
678 680 <td>rewritten as <a href="/rev/bed64f5d2f5a?style=paper">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span><br>
679 681 split as <a href="/rev/7ae126973a96?style=paper">7ae126973a96</a> <a href="/rev/14608b260df8?style=paper">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
680 682 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=coal' | egrep 'rewritten|split'
681 683 <td>rewritten as <a href="/rev/bed64f5d2f5a?style=coal">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span><br>
682 684 split as <a href="/rev/7ae126973a96?style=coal">7ae126973a96</a> <a href="/rev/14608b260df8?style=coal">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
683 685 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=gitweb' | egrep 'rewritten|split'
684 686 <td>rewritten as <a class="list" href="/rev/bed64f5d2f5a?style=gitweb">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
685 687 <td>split as <a class="list" href="/rev/7ae126973a96?style=gitweb">7ae126973a96</a> <a class="list" href="/rev/14608b260df8?style=gitweb">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
686 688 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=monoblue' | egrep 'rewritten|split'
687 689 <dd>rewritten as <a href="/rev/bed64f5d2f5a?style=monoblue">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></dd>
688 690 <dd>split as <a href="/rev/7ae126973a96?style=monoblue">7ae126973a96</a> <a href="/rev/14608b260df8?style=monoblue">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></dd>
689 691 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=spartan' | egrep 'rewritten|split'
690 692 <td class="obsolete">rewritten as <a href="/rev/bed64f5d2f5a?style=spartan">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
691 693 <td class="obsolete">split as <a href="/rev/7ae126973a96?style=spartan">7ae126973a96</a> <a href="/rev/14608b260df8?style=spartan">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
692 694
693 695 $ killdaemons.py
694 696
695 697 #endif
696 698
697 699 $ cd ..
698 700
699 701
700 702 Subset does not diverge
701 703 ------------------------------
702 704
703 705 Do not report divergent successors-set if it is a subset of another
704 706 successors-set. (report [A,B] not [A] + [A,B])
705 707
706 708 $ newcase subset
707 709 $ hg debugobsolete `getid A_0` `getid A_2`
708 710 1 new obsolescence markers
709 711 obsoleted 1 changesets
710 712 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
711 713 1 new obsolescence markers
712 714 $ hg debugsuccessorssets --hidden 'desc('A_0')'
713 715 007dc284c1f8
714 716 82623d38b9ba 392fd25390da
715 717 $ hg debugsuccessorssets 'desc('A_0')' --closest
716 718 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
717 719 007dc284c1f8
718 720 82623d38b9ba 392fd25390da
719 721
720 722 $ cd ..
721 723
722 724 Use scmutil.cleanupnodes API to create divergence
723 725
724 726 $ hg init cleanupnodes
725 727 $ cd cleanupnodes
726 728 $ hg debugdrawdag <<'EOS'
727 729 > B1 B3 B4
728 730 > | \|
729 731 > A Z
730 732 > EOS
731 733
732 734 $ hg update -q B1
733 735 $ echo 3 >> B
734 736 $ hg commit --amend -m B2
735 737 $ cat > $TESTTMP/scmutilcleanup.py <<EOF
736 738 > from mercurial import registrar, scmutil
737 739 > cmdtable = {}
738 740 > command = registrar.command(cmdtable)
739 741 > @command(b'cleanup')
740 742 > def cleanup(ui, repo):
741 743 > def node(expr):
742 744 > unfi = repo.unfiltered()
743 745 > rev = unfi.revs(expr).first()
744 746 > return unfi.changelog.node(rev)
745 747 > with repo.wlock(), repo.lock(), repo.transaction(b'delayedstrip'):
746 748 > mapping = {node(b'desc(B1)'): [node(b'desc(B3)')],
747 749 > node(b'desc(B3)'): [node(b'desc(B4)')]}
748 750 > scmutil.cleanupnodes(repo, mapping, b'test')
749 751 > EOF
750 752
751 753 $ rm .hg/localtags
752 754 $ hg cleanup --config extensions.t=$TESTTMP/scmutilcleanup.py
753 755 2 new content-divergent changesets
754 756 $ hg log -G -T '{rev}:{node|short} {desc} {instabilities}' -r 'sort(all(), topo)'
755 757 @ 5:1a2a9b5b0030 B2 content-divergent
756 758 |
757 759 | * 4:70d5a63ca112 B4 content-divergent
758 760 | |
759 761 | o 1:48b9aae0607f Z
760 762 |
761 763 o 0:426bada5c675 A
762 764
763 765 $ hg debugobsolete
764 766 a178212c3433c4e77b573f6011e29affb8aefa33 1a2a9b5b0030632400aa78e00388c20f99d3ec44 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
765 767 a178212c3433c4e77b573f6011e29affb8aefa33 ad6478fb94ecec98b86daae98722865d494ac561 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'operation': 'test', 'user': 'test'}
766 768 ad6478fb94ecec98b86daae98722865d494ac561 70d5a63ca112acb3764bc1d7320ca90ea688d671 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '9', 'operation': 'test', 'user': 'test'}
767 769
768 770 $ hg debugwhyunstable 1a2a9b5b0030
769 771 content-divergent: 70d5a63ca112acb3764bc1d7320ca90ea688d671 (draft) predecessor a178212c3433c4e77b573f6011e29affb8aefa33
770 772
771 773 $ hg log -r 1a2a9b5b0030 -T '{whyunstable}\n'
772 774 content-divergent: 4:70d5a63ca112 (draft) predecessor a178212c3433
773 775 $ hg log -r 1a2a9b5b0030 -T whyunstableshort
774 776 content-divergent: 4:70d5a63ca112 (draft) predecessor a178
775 777 $ hg log -r 1a2a9b5b0030 -T whyunstableshorter
776 778 content-divergent: 70d5 (draft) predecessor a178
777 779
778 780 #if serve
779 781
780 782 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
781 783 $ cat hg.pid >> $DAEMON_PIDS
782 784
783 785 check explanation for a content-divergent changeset
784 786
785 787 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=paper' | grep divergent:
786 788 <td>content-divergent: <a href="/rev/70d5a63ca112?style=paper">70d5a63ca112</a> (draft) predecessor <a href="/rev/a178212c3433?style=paper">a178212c3433</a></td>
787 789 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=coal' | grep divergent:
788 790 <td>content-divergent: <a href="/rev/70d5a63ca112?style=coal">70d5a63ca112</a> (draft) predecessor <a href="/rev/a178212c3433?style=coal">a178212c3433</a></td>
789 791 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=gitweb' | grep divergent:
790 792 <td>content-divergent: <a class="list" href="/rev/70d5a63ca112?style=gitweb">70d5a63ca112</a> (draft) predecessor <a class="list" href="/rev/a178212c3433?style=gitweb">a178212c3433</a></td>
791 793 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=monoblue' | grep divergent:
792 794 <dd>content-divergent: <a href="/rev/70d5a63ca112?style=monoblue">70d5a63ca112</a> (draft) predecessor <a href="/rev/a178212c3433?style=monoblue">a178212c3433</a></dd>
793 795 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=spartan' | grep divergent:
794 796 <td class="unstable">content-divergent: <a href="/rev/70d5a63ca112?style=spartan">70d5a63ca112</a> (draft) predecessor <a href="/rev/a178212c3433?style=spartan">a178212c3433</a></td>
795 797
796 798 $ killdaemons.py
797 799
798 800 #endif
@@ -1,1792 +1,1825 b''
1 1 $ cat >> $HGRCPATH << EOF
2 2 > [phases]
3 3 > # public changeset are not obsolete
4 4 > publish=false
5 5 > [ui]
6 6 > logtemplate="{rev}:{node|short} ({phase}{if(obsolete, ' *{obsolete}*')}{if(instabilities, ' {instabilities}')}) [{tags} {bookmarks}] {desc|firstline}{if(obsfate, " [{join(obsfate, "; ")}]")}\n"
7 7 > EOF
8 8 $ mkcommit() {
9 9 > echo "$1" > "$1"
10 10 > hg add "$1"
11 11 > hg ci -m "add $1"
12 12 > }
13 13 $ getid() {
14 14 > hg log -T "{node}\n" --hidden -r "desc('$1')"
15 15 > }
16 16
17 17 $ cat > debugkeys.py <<EOF
18 18 > def reposetup(ui, repo):
19 19 > class debugkeysrepo(repo.__class__):
20 20 > def listkeys(self, namespace):
21 21 > ui.write(b'listkeys %s\n' % (namespace,))
22 22 > return super(debugkeysrepo, self).listkeys(namespace)
23 23 >
24 24 > if repo.local():
25 25 > repo.__class__ = debugkeysrepo
26 26 > EOF
27 27
28 28 $ hg init tmpa
29 29 $ cd tmpa
30 30 $ mkcommit kill_me
31 31
32 32 Checking that the feature is properly disabled
33 33
34 34 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
35 35 abort: creating obsolete markers is not enabled on this repo
36 36 [255]
37 37
38 38 Enabling it
39 39
40 40 $ cat >> $HGRCPATH << EOF
41 41 > [experimental]
42 42 > evolution=exchange
43 43 > evolution.createmarkers=True
44 44 > EOF
45 45
46 46 Killing a single changeset without replacement
47 47
48 48 $ hg debugobsolete 0
49 49 abort: changeset references must be full hexadecimal node identifiers
50 50 [255]
51 51 $ hg debugobsolete '00'
52 52 abort: changeset references must be full hexadecimal node identifiers
53 53 [255]
54 54 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
55 55 1 new obsolescence markers
56 56 obsoleted 1 changesets
57 57 $ hg debugobsolete
58 58 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'babar'}
59 59
60 60 (test that mercurial is not confused)
61 61
62 62 $ hg up null --quiet # having 0 as parent prevents it to be hidden
63 63 $ hg tip
64 64 -1:000000000000 (public) [tip ]
65 65 $ hg up --hidden tip --quiet
66 66 updated to hidden changeset 97b7c2d76b18
67 67 (hidden revision '97b7c2d76b18' is pruned)
68 68
69 69 Killing a single changeset with itself should fail
70 70 (simple local safeguard)
71 71
72 72 $ hg debugobsolete `getid kill_me` `getid kill_me`
73 73 abort: bad obsmarker input: in-marker cycle with 97b7c2d76b1845ed3eb988cd612611e72406cef0
74 74 [255]
75 75
76 76 $ cd ..
77 77
78 78 Killing a single changeset with replacement
79 79 (and testing the format option)
80 80
81 81 $ hg init tmpb
82 82 $ cd tmpb
83 83 $ mkcommit a
84 84 $ mkcommit b
85 85 $ mkcommit original_c
86 86 $ hg up "desc('b')"
87 87 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
88 88 $ mkcommit new_c
89 89 created new head
90 90 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
91 91 $ hg debugobsolete --config format.obsstore-version=0 --flag 12 `getid original_c` `getid new_c` -d '121 120'
92 92 1 new obsolescence markers
93 93 obsoleted 1 changesets
94 94 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
95 95 2:245bde4270cd add original_c
96 96 $ hg debugrevlog -cd
97 97 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
98 98 0 -1 -1 0 59 0 0 0 0 58 58 0 1 0
99 99 1 0 -1 59 118 59 59 0 0 58 116 0 1 0
100 100 2 1 -1 118 193 118 118 59 0 76 192 0 1 0
101 101 3 1 -1 193 260 193 193 59 0 66 258 0 2 0
102 102 $ hg debugobsolete
103 103 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
104 104
105 105 (check for version number of the obsstore)
106 106
107 107 $ dd bs=1 count=1 if=.hg/store/obsstore 2>/dev/null
108 108 \x00 (no-eol) (esc)
109 109
110 110 do it again (it read the obsstore before adding new changeset)
111 111
112 112 $ hg up '.^'
113 113 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
114 114 $ mkcommit new_2_c
115 115 created new head
116 116 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
117 117 1 new obsolescence markers
118 118 obsoleted 1 changesets
119 119 $ hg debugobsolete
120 120 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
121 121 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
122 122
123 123 Register two markers with a missing node
124 124
125 125 $ hg up '.^'
126 126 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
127 127 $ mkcommit new_3_c
128 128 created new head
129 129 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
130 130 1 new obsolescence markers
131 131 obsoleted 1 changesets
132 132 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
133 133 1 new obsolescence markers
134 134 $ hg debugobsolete
135 135 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
136 136 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
137 137 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
138 138 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
139 139
140 140 Test the --index option of debugobsolete command
141 141 $ hg debugobsolete --index
142 142 0 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
143 143 1 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
144 144 2 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
145 145 3 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
146 146
147 147 Refuse pathological nullid successors
148 148 $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000
149 149 transaction abort!
150 150 rollback completed
151 151 abort: bad obsolescence marker detected: invalid successors nullid
152 152 [255]
153 153
154 154 Check that graphlog detect that a changeset is obsolete:
155 155
156 156 $ hg log -G
157 157 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
158 158 |
159 159 o 1:7c3bad9141dc (draft) [ ] add b
160 160 |
161 161 o 0:1f0dee641bb7 (draft) [ ] add a
162 162
163 163
164 164 check that heads does not report them
165 165
166 166 $ hg heads
167 167 5:5601fb93a350 (draft) [tip ] add new_3_c
168 168 $ hg heads --hidden
169 169 5:5601fb93a350 (draft) [tip ] add new_3_c
170 170 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c [rewritten as 5:5601fb93a350]
171 171 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c [rewritten as 4:ca819180edb9]
172 172 2:245bde4270cd (draft *obsolete*) [ ] add original_c [rewritten as 3:cdbce2fbb163]
173 173
174 174
175 175 check that summary does not report them
176 176
177 177 $ hg init ../sink
178 178 $ echo '[paths]' >> .hg/hgrc
179 179 $ echo 'default=../sink' >> .hg/hgrc
180 180 $ hg summary --remote
181 181 parent: 5:5601fb93a350 tip
182 182 add new_3_c
183 183 branch: default
184 184 commit: (clean)
185 185 update: (current)
186 186 phases: 3 draft
187 187 remote: 3 outgoing
188 188
189 189 $ hg summary --remote --hidden
190 190 parent: 5:5601fb93a350 tip
191 191 add new_3_c
192 192 branch: default
193 193 commit: (clean)
194 194 update: 3 new changesets, 4 branch heads (merge)
195 195 phases: 6 draft
196 196 remote: 3 outgoing
197 197
198 198 check that various commands work well with filtering
199 199
200 200 $ hg tip
201 201 5:5601fb93a350 (draft) [tip ] add new_3_c
202 202 $ hg log -r 6
203 203 abort: unknown revision '6'!
204 204 [255]
205 205 $ hg log -r 4
206 206 abort: hidden revision '4' was rewritten as: 5601fb93a350!
207 207 (use --hidden to access hidden revisions)
208 208 [255]
209 209 $ hg debugrevspec 'rev(6)'
210 210 $ hg debugrevspec 'rev(4)'
211 211 $ hg debugrevspec 'null'
212 212 -1
213 213
214 214 Check that public changeset are not accounted as obsolete:
215 215
216 216 $ hg --hidden phase --public 2
217 217 1 new phase-divergent changesets
218 218 $ hg log -G
219 219 @ 5:5601fb93a350 (draft phase-divergent) [tip ] add new_3_c
220 220 |
221 221 | o 2:245bde4270cd (public) [ ] add original_c
222 222 |/
223 223 o 1:7c3bad9141dc (public) [ ] add b
224 224 |
225 225 o 0:1f0dee641bb7 (public) [ ] add a
226 226
227 227 $ hg log -r 'unstable()'
228 228 5:5601fb93a350 (draft phase-divergent) [tip ] add new_3_c
229 229
230 230
231 231 And that bumped changeset are detected
232 232 --------------------------------------
233 233
234 234 If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also
235 235 note that the bumped changeset (5:5601fb93a350) is not a direct successor of
236 236 the public changeset
237 237
238 238 $ hg log --hidden -r 'phasedivergent()'
239 239 5:5601fb93a350 (draft phase-divergent) [tip ] add new_3_c
240 240
241 241 And that we can't push bumped changeset
242 242
243 243 $ hg push ../tmpa -r 0 --force #(make repo related)
244 244 pushing to ../tmpa
245 245 searching for changes
246 246 warning: repository is unrelated
247 247 adding changesets
248 248 adding manifests
249 249 adding file changes
250 250 added 1 changesets with 1 changes to 1 files (+1 heads)
251 251 $ hg push ../tmpa
252 252 pushing to ../tmpa
253 253 searching for changes
254 abort: push includes phase-divergent changeset: 5601fb93a350!
254 abort: push includes unstable changesets:
255 5601fb93a350 (phase-divergent)
255 256 [255]
256 257
257 258 Fixing "bumped" situation
258 259 We need to create a clone of 5 and add a special marker with a flag
259 260
260 261 $ hg summary
261 262 parent: 5:5601fb93a350 tip (phase-divergent)
262 263 add new_3_c
263 264 branch: default
264 265 commit: (clean)
265 266 update: 1 new changesets, 2 branch heads (merge)
266 267 phases: 1 draft
267 268 phase-divergent: 1 changesets
268 269 $ hg up '5^'
269 270 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
270 271 $ hg revert -ar 5
271 272 adding new_3_c
272 273 $ hg ci -m 'add n3w_3_c'
273 274 created new head
274 275 $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
275 276 1 new obsolescence markers
276 277 obsoleted 1 changesets
277 278 $ hg log -r 'phasedivergent()'
278 279 $ hg log -G
279 280 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
280 281 |
281 282 | o 2:245bde4270cd (public) [ ] add original_c
282 283 |/
283 284 o 1:7c3bad9141dc (public) [ ] add b
284 285 |
285 286 o 0:1f0dee641bb7 (public) [ ] add a
286 287
287 288
288 289 Basic exclusive testing
289 290
290 291 $ hg log -G --hidden
291 292 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
292 293 |
293 294 | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c [rewritten as 6:6f9641995072]
294 295 |/
295 296 | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c [rewritten as 5:5601fb93a350]
296 297 |/
297 298 | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c [rewritten as 4:ca819180edb9]
298 299 |/
299 300 | o 2:245bde4270cd (public) [ ] add original_c
300 301 |/
301 302 o 1:7c3bad9141dc (public) [ ] add b
302 303 |
303 304 o 0:1f0dee641bb7 (public) [ ] add a
304 305
305 306 $ hg debugobsolete --rev 6f9641995072
306 307 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
307 308 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
308 309 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
309 310 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
310 311 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
311 312 $ hg debugobsolete --rev 6f9641995072 --exclusive
312 313 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
313 314 $ hg debugobsolete --rev 5601fb93a350 --hidden
314 315 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
315 316 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
316 317 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
317 318 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
318 319 $ hg debugobsolete --rev 5601fb93a350 --hidden --exclusive
319 320 $ hg debugobsolete --rev 5601fb93a350+6f9641995072 --hidden --exclusive
320 321 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
321 322 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
322 323 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
323 324
324 325 $ cd ..
325 326
326 327 Revision 0 is hidden
327 328 --------------------
328 329
329 330 $ hg init rev0hidden
330 331 $ cd rev0hidden
331 332
332 333 $ mkcommit kill0
333 334 $ hg up -q null
334 335 $ hg debugobsolete `getid kill0`
335 336 1 new obsolescence markers
336 337 obsoleted 1 changesets
337 338 $ mkcommit a
338 339 $ mkcommit b
339 340
340 341 Should pick the first visible revision as "repo" node
341 342
342 343 $ hg archive ../archive-null
343 344 $ cat ../archive-null/.hg_archival.txt
344 345 repo: 1f0dee641bb7258c56bd60e93edfa2405381c41e
345 346 node: 7c3bad9141dcb46ff89abf5f61856facd56e476c
346 347 branch: default
347 348 latesttag: null
348 349 latesttagdistance: 2
349 350 changessincelatesttag: 2
350 351
351 352
352 353 $ cd ..
353 354
354 355 Can disable transaction summary report
355 356
356 357 $ hg init transaction-summary
357 358 $ cd transaction-summary
358 359 $ mkcommit a
359 360 $ mkcommit b
360 361 $ hg up -q null
361 362 $ hg --config experimental.evolution.report-instabilities=false debugobsolete `getid a`
362 363 1 new obsolescence markers
363 364 obsoleted 1 changesets
364 365 $ cd ..
365 366
366 367 Exchange Test
367 368 ============================
368 369
369 370 Destination repo does not have any data
370 371 ---------------------------------------
371 372
372 373 Simple incoming test
373 374
374 375 $ hg init tmpc
375 376 $ cd tmpc
376 377 $ hg incoming ../tmpb
377 378 comparing with ../tmpb
378 379 0:1f0dee641bb7 (public) [ ] add a
379 380 1:7c3bad9141dc (public) [ ] add b
380 381 2:245bde4270cd (public) [ ] add original_c
381 382 6:6f9641995072 (draft) [tip ] add n3w_3_c
382 383
383 384 Try to pull markers while testing pull --confirm
384 385 (extinct changeset are excluded but marker are pushed)
385 386
386 387 $ hg pull ../tmpb --confirm --config ui.interactive=true <<EOF
387 388 > n
388 389 > EOF
389 390 pulling from ../tmpb
390 391 requesting all changes
391 392 adding changesets
392 393 adding manifests
393 394 adding file changes
394 395 adding 4 changesets with 4 changes to 4 files (+1 heads)
395 396 5 new obsolescence markers
396 397 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
397 398 accept incoming changes (yn)? n
398 399 transaction abort!
399 400 rollback completed
400 401 abort: user aborted
401 402 [255]
402 403 $ HGPLAIN=1 hg pull ../tmpb --confirm --config ui.interactive=true <<EOF
403 404 > n
404 405 > EOF
405 406 pulling from ../tmpb
406 407 requesting all changes
407 408 adding changesets
408 409 adding manifests
409 410 adding file changes
410 411 adding 4 changesets with 4 changes to 4 files (+1 heads)
411 412 5 new obsolescence markers
412 413 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
413 414 accept incoming changes (yn)? n
414 415 transaction abort!
415 416 rollback completed
416 417 abort: user aborted
417 418 [255]
418 419 $ hg pull ../tmpb --confirm --config ui.interactive=true <<EOF
419 420 > y
420 421 > EOF
421 422 pulling from ../tmpb
422 423 requesting all changes
423 424 adding changesets
424 425 adding manifests
425 426 adding file changes
426 427 adding 4 changesets with 4 changes to 4 files (+1 heads)
427 428 5 new obsolescence markers
428 429 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
429 430 accept incoming changes (yn)? y
430 431 added 4 changesets with 4 changes to 4 files (+1 heads)
431 432 5 new obsolescence markers
432 433 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
433 434 (run 'hg heads' to see heads, 'hg merge' to merge)
434 435 $ hg debugobsolete
435 436 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
436 437 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
437 438 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
438 439 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
439 440 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
440 441
441 442 Rollback//Transaction support
442 443
443 444 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
444 445 1 new obsolescence markers
445 446 $ hg debugobsolete
446 447 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
447 448 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
448 449 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
449 450 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
450 451 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
451 452 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 (Thu Jan 01 00:22:20 1970 +0000) {'user': 'test'}
452 453 $ hg rollback -n
453 454 repository tip rolled back to revision 3 (undo debugobsolete)
454 455 $ hg rollback
455 456 repository tip rolled back to revision 3 (undo debugobsolete)
456 457 $ hg debugobsolete
457 458 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
458 459 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
459 460 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
460 461 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
461 462 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
462 463
463 464 $ cd ..
464 465
465 466 Try to push markers
466 467
467 468 $ hg init tmpd
468 469 $ hg -R tmpb push tmpd
469 470 pushing to tmpd
470 471 searching for changes
471 472 adding changesets
472 473 adding manifests
473 474 adding file changes
474 475 added 4 changesets with 4 changes to 4 files (+1 heads)
475 476 5 new obsolescence markers
476 477 $ hg -R tmpd debugobsolete | sort
477 478 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
478 479 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
479 480 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
480 481 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
481 482 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
482 483
483 484 Check obsolete keys are exchanged only if source has an obsolete store
484 485
485 486 $ hg init empty
486 487 $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd
487 488 pushing to tmpd
488 489 listkeys phases
489 490 listkeys bookmarks
490 491 no changes found
491 492 listkeys phases
492 493 [1]
493 494
494 495 clone support
495 496 (markers are copied and extinct changesets are included to allow hardlinks)
496 497
497 498 $ hg clone tmpb clone-dest
498 499 updating to branch default
499 500 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
500 501 $ hg -R clone-dest log -G --hidden
501 502 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
502 503 |
503 504 | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c [rewritten as 6:6f9641995072]
504 505 |/
505 506 | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c [rewritten as 5:5601fb93a350]
506 507 |/
507 508 | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c [rewritten as 4:ca819180edb9]
508 509 |/
509 510 | o 2:245bde4270cd (public) [ ] add original_c
510 511 |/
511 512 o 1:7c3bad9141dc (public) [ ] add b
512 513 |
513 514 o 0:1f0dee641bb7 (public) [ ] add a
514 515
515 516 $ hg -R clone-dest debugobsolete
516 517 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
517 518 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
518 519 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
519 520 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
520 521 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
521 522
522 523
523 524 Destination repo have existing data
524 525 ---------------------------------------
525 526
526 527 On pull
527 528
528 529 $ hg init tmpe
529 530 $ cd tmpe
530 531 $ hg debugobsolete -d '1339 0' 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00
531 532 1 new obsolescence markers
532 533 $ hg pull ../tmpb
533 534 pulling from ../tmpb
534 535 requesting all changes
535 536 adding changesets
536 537 adding manifests
537 538 adding file changes
538 539 added 4 changesets with 4 changes to 4 files (+1 heads)
539 540 5 new obsolescence markers
540 541 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
541 542 (run 'hg heads' to see heads, 'hg merge' to merge)
542 543 $ hg debugobsolete
543 544 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
544 545 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
545 546 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
546 547 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
547 548 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
548 549 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
549 550
550 551
551 552 On push
552 553
553 554 $ hg push ../tmpc
554 555 pushing to ../tmpc
555 556 searching for changes
556 557 no changes found
557 558 1 new obsolescence markers
558 559 [1]
559 560 $ hg -R ../tmpc debugobsolete
560 561 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
561 562 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
562 563 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
563 564 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
564 565 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
565 566 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
566 567
567 568 detect outgoing obsolete and unstable
568 569 ---------------------------------------
569 570
570 571
571 572 $ hg log -G
572 573 o 3:6f9641995072 (draft) [tip ] add n3w_3_c
573 574 |
574 575 | o 2:245bde4270cd (public) [ ] add original_c
575 576 |/
576 577 o 1:7c3bad9141dc (public) [ ] add b
577 578 |
578 579 o 0:1f0dee641bb7 (public) [ ] add a
579 580
580 581 $ hg up 'desc("n3w_3_c")'
581 582 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
582 583 $ mkcommit original_d
583 584 $ mkcommit original_e
584 585 $ hg debugobsolete --record-parents `getid original_d` -d '0 0'
585 586 1 new obsolescence markers
586 587 obsoleted 1 changesets
587 588 1 new orphan changesets
588 589 $ hg log -r 'unstable()'
589 590 5:cda648ca50f5 (draft orphan) [tip ] add original_e
590 591 $ hg debugobsolete | grep `getid original_d`
591 592 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
592 593 $ hg log -r 'obsolete()'
593 594 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
594 595 $ hg summary
595 596 parent: 5:cda648ca50f5 tip (orphan)
596 597 add original_e
597 598 branch: default
598 599 commit: (clean)
599 600 update: 1 new changesets, 2 branch heads (merge)
600 601 phases: 3 draft
601 602 orphan: 1 changesets
602 603 $ hg log -G -r '::orphan()'
603 604 @ 5:cda648ca50f5 (draft orphan) [tip ] add original_e
604 605 |
605 606 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
606 607 |
607 608 o 3:6f9641995072 (draft) [ ] add n3w_3_c
608 609 |
609 610 o 1:7c3bad9141dc (public) [ ] add b
610 611 |
611 612 o 0:1f0dee641bb7 (public) [ ] add a
612 613
613 614
614 615 refuse to push obsolete changeset
615 616
616 617 $ hg push ../tmpc/ -r 'desc("original_d")'
617 618 pushing to ../tmpc/
618 619 searching for changes
619 abort: push includes obsolete changeset: 94b33453f93b!
620 abort: push includes obsolete changesets:
621 94b33453f93b
620 622 [255]
621 623
622 624 refuse to push unstable changeset
623 625
624 626 $ hg push ../tmpc/
625 627 pushing to ../tmpc/
626 628 searching for changes
627 abort: push includes orphan changeset: cda648ca50f5!
629 abort: push includes obsolete changesets:
630 94b33453f93b
631 push includes unstable changesets:
632 cda648ca50f5 (orphan)
628 633 [255]
629 634
630 635 with --force it will work anyway
631 636
632 637 $ hg push ../tmpc/ --force
633 638 pushing to ../tmpc/
634 639 searching for changes
635 640 adding changesets
636 641 adding manifests
637 642 adding file changes
638 643 added 2 changesets with 2 changes to 2 files
639 644 1 new obsolescence markers
640 645 1 new orphan changesets
641 646
642 647 if the orphan changeset is already on the server, pushing should work
643 648
644 649 $ hg push ../tmpc/
645 650 pushing to ../tmpc/
646 651 searching for changes
647 652 no changes found
648 653 [1]
649 654
655 pushing should work even if the outgoing changes contain an unrelated changeset
656 (neither obsolete nor unstable) (issue6372)
657
658 $ hg up 1 -q
659 $ hg branch new -q
660 $ mkcommit c
661
662 $ hg push ../tmpc/ --new-branch
663 pushing to ../tmpc/
664 searching for changes
665 adding changesets
666 adding manifests
667 adding file changes
668 added 1 changesets with 1 changes to 1 files (+1 heads)
669
670 make later tests work unmodified
671
672 $ hg --config extensions.strip= strip tip -q
673 $ hg up 5 -q
674
650 675 Test that extinct changeset are properly detected
651 676
652 677 $ hg log -r 'extinct()'
653 678
654 679 Don't try to push extinct changeset
655 680
656 681 $ hg init ../tmpf
657 682 $ hg out ../tmpf
658 683 comparing with ../tmpf
659 684 searching for changes
660 685 0:1f0dee641bb7 (public) [ ] add a
661 686 1:7c3bad9141dc (public) [ ] add b
662 687 2:245bde4270cd (public) [ ] add original_c
663 688 3:6f9641995072 (draft) [ ] add n3w_3_c
664 689 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
665 690 5:cda648ca50f5 (draft orphan) [tip ] add original_e
666 691 $ hg push ../tmpf -f # -f because be push unstable too
667 692 pushing to ../tmpf
668 693 searching for changes
669 694 adding changesets
670 695 adding manifests
671 696 adding file changes
672 697 added 6 changesets with 6 changes to 6 files (+1 heads)
673 698 7 new obsolescence markers
674 699 1 new orphan changesets
675 700
676 701 no warning displayed
677 702
678 703 $ hg push ../tmpf
679 704 pushing to ../tmpf
680 705 searching for changes
681 706 no changes found
682 707 [1]
683 708
684 709 Do not warn about new head when the new head is a successors of a remote one
685 710
686 711 $ hg log -G
687 712 @ 5:cda648ca50f5 (draft orphan) [tip ] add original_e
688 713 |
689 714 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
690 715 |
691 716 o 3:6f9641995072 (draft) [ ] add n3w_3_c
692 717 |
693 718 | o 2:245bde4270cd (public) [ ] add original_c
694 719 |/
695 720 o 1:7c3bad9141dc (public) [ ] add b
696 721 |
697 722 o 0:1f0dee641bb7 (public) [ ] add a
698 723
699 724 $ hg up -q 'desc(n3w_3_c)'
700 725 $ mkcommit obsolete_e
701 726 created new head
702 727 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'` \
703 728 > -u 'test <test@example.net>'
704 729 1 new obsolescence markers
705 730 obsoleted 1 changesets
706 731 $ hg outgoing ../tmpf # parasite hg outgoing testin
707 732 comparing with ../tmpf
708 733 searching for changes
709 734 6:3de5eca88c00 (draft) [tip ] add obsolete_e
710 735 $ hg push ../tmpf
711 736 pushing to ../tmpf
712 737 searching for changes
713 738 adding changesets
714 739 adding manifests
715 740 adding file changes
716 741 added 1 changesets with 1 changes to 1 files (+1 heads)
717 742 1 new obsolescence markers
718 743 obsoleted 1 changesets
719 744
720 745 test relevance computation
721 746 ---------------------------------------
722 747
723 748 Checking simple case of "marker relevance".
724 749
725 750
726 751 Reminder of the repo situation
727 752
728 753 $ hg log --hidden --graph
729 754 @ 6:3de5eca88c00 (draft) [tip ] add obsolete_e
730 755 |
731 756 | x 5:cda648ca50f5 (draft *obsolete*) [ ] add original_e [rewritten as 6:3de5eca88c00 by test <test@example.net>]
732 757 | |
733 758 | x 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
734 759 |/
735 760 o 3:6f9641995072 (draft) [ ] add n3w_3_c
736 761 |
737 762 | o 2:245bde4270cd (public) [ ] add original_c
738 763 |/
739 764 o 1:7c3bad9141dc (public) [ ] add b
740 765 |
741 766 o 0:1f0dee641bb7 (public) [ ] add a
742 767
743 768
744 769 List of all markers
745 770
746 771 $ hg debugobsolete
747 772 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
748 773 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
749 774 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
750 775 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
751 776 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
752 777 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
753 778 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
754 779 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
755 780
756 781 List of changesets with no chain
757 782
758 783 $ hg debugobsolete --hidden --rev ::2
759 784
760 785 List of changesets that are included on marker chain
761 786
762 787 $ hg debugobsolete --hidden --rev 6
763 788 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
764 789
765 790 List of changesets with a longer chain, (including a pruned children)
766 791
767 792 $ hg debugobsolete --hidden --rev 3
768 793 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
769 794 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
770 795 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
771 796 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
772 797 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
773 798 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
774 799 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
775 800
776 801 List of both
777 802
778 803 $ hg debugobsolete --hidden --rev 3::6
779 804 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
780 805 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
781 806 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
782 807 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
783 808 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
784 809 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
785 810 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
786 811 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
787 812
788 813 List of all markers in JSON
789 814
790 815 $ hg debugobsolete -Tjson
791 816 [
792 817 {
793 818 "date": [1339, 0],
794 819 "flag": 0,
795 820 "metadata": {"user": "test"},
796 821 "prednode": "1339133913391339133913391339133913391339",
797 822 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
798 823 },
799 824 {
800 825 "date": [1339, 0],
801 826 "flag": 0,
802 827 "metadata": {"user": "test"},
803 828 "prednode": "1337133713371337133713371337133713371337",
804 829 "succnodes": ["5601fb93a350734d935195fee37f4054c529ff39"]
805 830 },
806 831 {
807 832 "date": [121, 120],
808 833 "flag": 12,
809 834 "metadata": {"user": "test"},
810 835 "prednode": "245bde4270cd1072a27757984f9cda8ba26f08ca",
811 836 "succnodes": ["cdbce2fbb16313928851e97e0d85413f3f7eb77f"]
812 837 },
813 838 {
814 839 "date": [1338, 0],
815 840 "flag": 1,
816 841 "metadata": {"user": "test"},
817 842 "prednode": "5601fb93a350734d935195fee37f4054c529ff39",
818 843 "succnodes": ["6f96419950729f3671185b847352890f074f7557"]
819 844 },
820 845 {
821 846 "date": [1338, 0],
822 847 "flag": 0,
823 848 "metadata": {"user": "test"},
824 849 "prednode": "ca819180edb99ed25ceafb3e9584ac287e240b00",
825 850 "succnodes": ["1337133713371337133713371337133713371337"]
826 851 },
827 852 {
828 853 "date": [1337, 0],
829 854 "flag": 0,
830 855 "metadata": {"user": "test"},
831 856 "prednode": "cdbce2fbb16313928851e97e0d85413f3f7eb77f",
832 857 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
833 858 },
834 859 {
835 860 "date": [0, 0],
836 861 "flag": 0,
837 862 "metadata": {"user": "test"},
838 863 "parentnodes": ["6f96419950729f3671185b847352890f074f7557"],
839 864 "prednode": "94b33453f93bdb8d457ef9b770851a618bf413e1",
840 865 "succnodes": []
841 866 },
842 867 {
843 868 "date": *, (glob)
844 869 "flag": 0,
845 870 "metadata": {"user": "test <test@example.net>"},
846 871 "prednode": "cda648ca50f50482b7055c0b0c4c117bba6733d9",
847 872 "succnodes": ["3de5eca88c00aa039da7399a220f4a5221faa585"]
848 873 }
849 874 ]
850 875
851 876 Template keywords
852 877
853 878 $ hg debugobsolete -r6 -T '{succnodes % "{node|short}"} {date|shortdate}\n'
854 879 3de5eca88c00 ????-??-?? (glob)
855 880 $ hg debugobsolete -r6 -T '{join(metadata % "{key}={value}", " ")}\n'
856 881 user=test <test@example.net>
857 882 $ hg debugobsolete -r6 -T '{metadata}\n{metadata}\n'
858 883 'user': 'test <test@example.net>'
859 884 'user': 'test <test@example.net>'
860 885 $ hg debugobsolete -r6 -T '{succnodes}\n{succnodes}\n'
861 886 3de5eca88c00aa039da7399a220f4a5221faa585
862 887 3de5eca88c00aa039da7399a220f4a5221faa585
863 888 $ hg debugobsolete -r6 -T '{flag} {get(metadata, "user")}\n'
864 889 0 test <test@example.net>
865 890
866 891 Test the debug output for exchange
867 892 ----------------------------------
868 893
869 894 $ hg pull ../tmpb --config 'experimental.obsmarkers-exchange-debug=True' # bundle2
870 895 pulling from ../tmpb
871 896 searching for changes
872 897 no changes found
873 898 obsmarker-exchange: 346 bytes received
874 899
875 900 check hgweb does not explode
876 901 ====================================
877 902
878 903 $ hg unbundle $TESTDIR/bundles/hgweb+obs.hg
879 904 adding changesets
880 905 adding manifests
881 906 adding file changes
882 907 added 62 changesets with 63 changes to 9 files (+60 heads)
883 908 new changesets 50c51b361e60:c15e9edfca13 (62 drafts)
884 909 (2 other changesets obsolete on arrival)
885 910 (run 'hg heads .' to see heads, 'hg merge' to merge)
886 911 $ for node in `hg log -r 'desc(babar_)' --template '{node}\n'`;
887 912 > do
888 913 > hg debugobsolete $node
889 914 > done
890 915 1 new obsolescence markers
891 916 obsoleted 1 changesets
892 917 1 new obsolescence markers
893 918 obsoleted 1 changesets
894 919 1 new obsolescence markers
895 920 obsoleted 1 changesets
896 921 1 new obsolescence markers
897 922 obsoleted 1 changesets
898 923 1 new obsolescence markers
899 924 obsoleted 1 changesets
900 925 1 new obsolescence markers
901 926 obsoleted 1 changesets
902 927 1 new obsolescence markers
903 928 obsoleted 1 changesets
904 929 1 new obsolescence markers
905 930 obsoleted 1 changesets
906 931 1 new obsolescence markers
907 932 obsoleted 1 changesets
908 933 1 new obsolescence markers
909 934 obsoleted 1 changesets
910 935 1 new obsolescence markers
911 936 obsoleted 1 changesets
912 937 1 new obsolescence markers
913 938 obsoleted 1 changesets
914 939 1 new obsolescence markers
915 940 obsoleted 1 changesets
916 941 1 new obsolescence markers
917 942 obsoleted 1 changesets
918 943 1 new obsolescence markers
919 944 obsoleted 1 changesets
920 945 1 new obsolescence markers
921 946 obsoleted 1 changesets
922 947 1 new obsolescence markers
923 948 obsoleted 1 changesets
924 949 1 new obsolescence markers
925 950 obsoleted 1 changesets
926 951 1 new obsolescence markers
927 952 obsoleted 1 changesets
928 953 1 new obsolescence markers
929 954 obsoleted 1 changesets
930 955 1 new obsolescence markers
931 956 obsoleted 1 changesets
932 957 1 new obsolescence markers
933 958 obsoleted 1 changesets
934 959 1 new obsolescence markers
935 960 obsoleted 1 changesets
936 961 1 new obsolescence markers
937 962 obsoleted 1 changesets
938 963 1 new obsolescence markers
939 964 obsoleted 1 changesets
940 965 1 new obsolescence markers
941 966 obsoleted 1 changesets
942 967 1 new obsolescence markers
943 968 obsoleted 1 changesets
944 969 1 new obsolescence markers
945 970 obsoleted 1 changesets
946 971 1 new obsolescence markers
947 972 obsoleted 1 changesets
948 973 1 new obsolescence markers
949 974 obsoleted 1 changesets
950 975 1 new obsolescence markers
951 976 obsoleted 1 changesets
952 977 1 new obsolescence markers
953 978 obsoleted 1 changesets
954 979 1 new obsolescence markers
955 980 obsoleted 1 changesets
956 981 1 new obsolescence markers
957 982 obsoleted 1 changesets
958 983 1 new obsolescence markers
959 984 obsoleted 1 changesets
960 985 1 new obsolescence markers
961 986 obsoleted 1 changesets
962 987 1 new obsolescence markers
963 988 obsoleted 1 changesets
964 989 1 new obsolescence markers
965 990 obsoleted 1 changesets
966 991 1 new obsolescence markers
967 992 obsoleted 1 changesets
968 993 1 new obsolescence markers
969 994 obsoleted 1 changesets
970 995 1 new obsolescence markers
971 996 obsoleted 1 changesets
972 997 1 new obsolescence markers
973 998 obsoleted 1 changesets
974 999 1 new obsolescence markers
975 1000 obsoleted 1 changesets
976 1001 1 new obsolescence markers
977 1002 obsoleted 1 changesets
978 1003 1 new obsolescence markers
979 1004 obsoleted 1 changesets
980 1005 1 new obsolescence markers
981 1006 obsoleted 1 changesets
982 1007 1 new obsolescence markers
983 1008 obsoleted 1 changesets
984 1009 1 new obsolescence markers
985 1010 obsoleted 1 changesets
986 1011 1 new obsolescence markers
987 1012 obsoleted 1 changesets
988 1013 1 new obsolescence markers
989 1014 obsoleted 1 changesets
990 1015 1 new obsolescence markers
991 1016 obsoleted 1 changesets
992 1017 1 new obsolescence markers
993 1018 obsoleted 1 changesets
994 1019 1 new obsolescence markers
995 1020 obsoleted 1 changesets
996 1021 1 new obsolescence markers
997 1022 obsoleted 1 changesets
998 1023 1 new obsolescence markers
999 1024 obsoleted 1 changesets
1000 1025 1 new obsolescence markers
1001 1026 obsoleted 1 changesets
1002 1027 1 new obsolescence markers
1003 1028 obsoleted 1 changesets
1004 1029 1 new obsolescence markers
1005 1030 obsoleted 1 changesets
1006 1031 1 new obsolescence markers
1007 1032 obsoleted 1 changesets
1008 1033 1 new obsolescence markers
1009 1034 obsoleted 1 changesets
1010 1035 $ hg up tip
1011 1036 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1012 1037
1013 1038 #if serve
1014 1039
1015 1040 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1016 1041 $ cat hg.pid >> $DAEMON_PIDS
1017 1042
1018 1043 check changelog view
1019 1044
1020 1045 $ get-with-headers.py --headeronly localhost:$HGPORT 'shortlog/'
1021 1046 200 Script output follows
1022 1047
1023 1048 check graph view
1024 1049
1025 1050 $ get-with-headers.py --headeronly localhost:$HGPORT 'graph'
1026 1051 200 Script output follows
1027 1052
1028 1053 check filelog view
1029 1054
1030 1055 $ get-with-headers.py --headeronly localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar'
1031 1056 200 Script output follows
1032 1057
1033 1058 check filelog view for hidden commits (obsolete ones are hidden here)
1034 1059
1035 1060 $ get-with-headers.py localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar' | grep obsolete
1036 1061 [1]
1037 1062
1038 1063 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/68'
1039 1064 200 Script output follows
1040 1065 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
1041 1066 404 Not Found
1042 1067 [1]
1043 1068
1044 1069 check that web.view config option:
1045 1070
1046 1071 $ killdaemons.py hg.pid
1047 1072 $ cat >> .hg/hgrc << EOF
1048 1073 > [web]
1049 1074 > view=all
1050 1075 > EOF
1051 1076 $ wait
1052 1077 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1053 1078 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
1054 1079 200 Script output follows
1055 1080 $ killdaemons.py hg.pid
1056 1081
1057 1082 Checking _enable=False warning if obsolete marker exists
1058 1083
1059 1084 $ echo '[experimental]' >> $HGRCPATH
1060 1085 $ echo "evolution=" >> $HGRCPATH
1061 1086 $ hg log -r tip
1062 1087 68:c15e9edfca13 (draft) [tip ] add celestine
1063 1088
1064 1089 reenable for later test
1065 1090
1066 1091 $ echo '[experimental]' >> $HGRCPATH
1067 1092 $ echo "evolution.exchange=True" >> $HGRCPATH
1068 1093 $ echo "evolution.createmarkers=True" >> $HGRCPATH
1069 1094
1070 1095 $ rm access.log errors.log
1071 1096 #endif
1072 1097
1073 1098 Several troubles on the same changeset (create an unstable and bumped and content-divergent changeset)
1074 1099
1075 1100 $ hg debugobsolete `getid obsolete_e`
1076 1101 1 new obsolescence markers
1077 1102 obsoleted 1 changesets
1078 1103 2 new orphan changesets
1079 1104 $ hg debugobsolete `getid original_c` `getid babar`
1080 1105 1 new obsolescence markers
1081 1106 1 new phase-divergent changesets
1082 1107 2 new content-divergent changesets
1083 1108 $ hg log --config ui.logtemplate= -r 'phasedivergent() and orphan() and contentdivergent()'
1084 1109 changeset: 7:50c51b361e60
1085 1110 user: test
1086 1111 date: Thu Jan 01 00:00:00 1970 +0000
1087 1112 instability: orphan, phase-divergent, content-divergent
1088 1113 summary: add babar
1089 1114
1090 1115 test the "obsolete" templatekw
1091 1116
1092 1117 $ hg log -r 'obsolete()'
1093 1118 6:3de5eca88c00 (draft *obsolete*) [ ] add obsolete_e [pruned]
1094 1119
1095 1120 test the "troubles" templatekw
1096 1121
1097 1122 $ hg log -r 'phasedivergent() and orphan()'
1098 1123 7:50c51b361e60 (draft orphan phase-divergent content-divergent) [ ] add babar
1099 1124
1100 1125 test the default cmdline template
1101 1126
1102 1127 $ hg log -T default -r 'phasedivergent()'
1103 1128 changeset: 7:50c51b361e60
1104 1129 user: test
1105 1130 date: Thu Jan 01 00:00:00 1970 +0000
1106 1131 instability: orphan, phase-divergent, content-divergent
1107 1132 summary: add babar
1108 1133
1109 1134 $ hg log -T default -r 'obsolete()'
1110 1135 changeset: 6:3de5eca88c00
1111 1136 parent: 3:6f9641995072
1112 1137 user: test
1113 1138 date: Thu Jan 01 00:00:00 1970 +0000
1114 1139 obsolete: pruned
1115 1140 summary: add obsolete_e
1116 1141
1117 1142
1118 1143 test the obsolete labels
1119 1144
1120 1145 $ hg log --config ui.logtemplate= --color=debug -r 'phasedivergent()'
1121 1146 [log.changeset changeset.draft changeset.unstable instability.orphan instability.phase-divergent instability.content-divergent|changeset: 7:50c51b361e60]
1122 1147 [log.user|user: test]
1123 1148 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
1124 1149 [log.instability|instability: orphan, phase-divergent, content-divergent]
1125 1150 [log.summary|summary: add babar]
1126 1151
1127 1152
1128 1153 $ hg log -T default -r 'phasedivergent()' --color=debug
1129 1154 [log.changeset changeset.draft changeset.unstable instability.orphan instability.phase-divergent instability.content-divergent|changeset: 7:50c51b361e60]
1130 1155 [log.user|user: test]
1131 1156 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
1132 1157 [log.instability|instability: orphan, phase-divergent, content-divergent]
1133 1158 [log.summary|summary: add babar]
1134 1159
1135 1160
1136 1161 $ hg log --config ui.logtemplate= --color=debug -r "obsolete()"
1137 1162 [log.changeset changeset.draft changeset.obsolete|changeset: 6:3de5eca88c00]
1138 1163 [log.parent changeset.draft|parent: 3:6f9641995072]
1139 1164 [log.user|user: test]
1140 1165 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
1141 1166 [log.obsfate|obsolete: pruned]
1142 1167 [log.summary|summary: add obsolete_e]
1143 1168
1144 1169
1145 1170 $ hg log -T default -r 'obsolete()' --color=debug
1146 1171 [log.changeset changeset.draft changeset.obsolete|changeset: 6:3de5eca88c00]
1147 1172 [log.parent changeset.draft|parent: 3:6f9641995072]
1148 1173 [log.user|user: test]
1149 1174 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
1150 1175 [log.obsfate|obsolete: pruned]
1151 1176 [log.summary|summary: add obsolete_e]
1152 1177
1153 1178
1154 1179 test summary output
1155 1180
1156 1181 $ hg up -r 'phasedivergent() and orphan()'
1157 1182 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1158 1183 $ hg summary
1159 1184 parent: 7:50c51b361e60 (orphan, phase-divergent, content-divergent)
1160 1185 add babar
1161 1186 branch: default
1162 1187 commit: (clean)
1163 1188 update: 2 new changesets (update)
1164 1189 phases: 4 draft
1165 1190 orphan: 2 changesets
1166 1191 content-divergent: 2 changesets
1167 1192 phase-divergent: 1 changesets
1168 1193 $ hg up -r 'obsolete()'
1169 1194 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1170 1195 $ hg summary
1171 1196 parent: 6:3de5eca88c00 (obsolete)
1172 1197 add obsolete_e
1173 1198 branch: default
1174 1199 commit: (clean)
1175 1200 update: 3 new changesets (update)
1176 1201 phases: 4 draft
1177 1202 orphan: 2 changesets
1178 1203 content-divergent: 2 changesets
1179 1204 phase-divergent: 1 changesets
1180 1205
1181 1206 test debugwhyunstable output
1182 1207
1183 1208 $ hg debugwhyunstable 50c51b361e60
1184 1209 orphan: obsolete parent 3de5eca88c00aa039da7399a220f4a5221faa585
1185 1210 phase-divergent: immutable predecessor 245bde4270cd1072a27757984f9cda8ba26f08ca
1186 1211 content-divergent: 6f96419950729f3671185b847352890f074f7557 (draft) predecessor 245bde4270cd1072a27757984f9cda8ba26f08ca
1187 1212
1188 1213 test whyunstable template keyword
1189 1214
1190 1215 $ hg log -r 50c51b361e60 -T '{whyunstable}\n'
1191 1216 orphan: obsolete parent 3de5eca88c00
1192 1217 phase-divergent: immutable predecessor 245bde4270cd
1193 1218 content-divergent: 3:6f9641995072 (draft) predecessor 245bde4270cd
1194 1219 $ hg log -r 50c51b361e60 -T '{whyunstable % "{instability}: {reason} {node|shortest}\n"}'
1195 1220 orphan: obsolete parent 3de5
1196 1221 phase-divergent: immutable predecessor 245b
1197 1222 content-divergent: predecessor 245b
1198 1223
1224 $ hg push ../tmpf -r 50c51b361e60
1225 pushing to ../tmpf
1226 searching for changes
1227 abort: push includes unstable changesets:
1228 50c51b361e60 (orphan, phase-divergent, content-divergent)
1229 [255]
1230
1231
1199 1232 #if serve
1200 1233
1201 1234 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1202 1235 $ cat hg.pid >> $DAEMON_PIDS
1203 1236
1204 1237 check obsolete changeset
1205 1238
1206 1239 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=paper' | grep '<span class="obsolete">'
1207 1240 <span class="phase">draft</span> <span class="obsolete">obsolete</span>
1208 1241 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=coal' | grep '<span class="obsolete">'
1209 1242 <span class="phase">draft</span> <span class="obsolete">obsolete</span>
1210 1243 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=gitweb' | grep '<span class="logtags">'
1211 1244 <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="obsoletetag" title="obsolete">obsolete</span> </span>
1212 1245 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=monoblue' | grep '<span class="logtags">'
1213 1246 <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="obsoletetag" title="obsolete">obsolete</span> </span>
1214 1247 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=spartan' | grep 'class="obsolete"'
1215 1248 <th class="obsolete">obsolete:</th>
1216 1249 <td class="obsolete">pruned by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
1217 1250
1218 1251 check changeset with instabilities
1219 1252
1220 1253 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=paper' | grep '<span class="instability">'
1221 1254 <span class="phase">draft</span> <span class="instability">orphan</span> <span class="instability">phase-divergent</span> <span class="instability">content-divergent</span>
1222 1255 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=coal' | grep '<span class="instability">'
1223 1256 <span class="phase">draft</span> <span class="instability">orphan</span> <span class="instability">phase-divergent</span> <span class="instability">content-divergent</span>
1224 1257 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=gitweb' | grep '<span class="logtags">'
1225 1258 <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="instabilitytag" title="orphan">orphan</span> <span class="instabilitytag" title="phase-divergent">phase-divergent</span> <span class="instabilitytag" title="content-divergent">content-divergent</span> </span>
1226 1259 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=monoblue' | grep '<span class="logtags">'
1227 1260 <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="instabilitytag" title="orphan">orphan</span> <span class="instabilitytag" title="phase-divergent">phase-divergent</span> <span class="instabilitytag" title="content-divergent">content-divergent</span> </span>
1228 1261 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=spartan' | grep 'class="unstable"'
1229 1262 <th class="unstable">unstable:</th>
1230 1263 <td class="unstable">orphan: obsolete parent <a href="/rev/3de5eca88c00?style=spartan">3de5eca88c00</a></td>
1231 1264 <th class="unstable">unstable:</th>
1232 1265 <td class="unstable">phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=spartan">245bde4270cd</a></td>
1233 1266 <th class="unstable">unstable:</th>
1234 1267 <td class="unstable">content-divergent: <a href="/rev/6f9641995072?style=spartan">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=spartan">245bde4270cd</a></td>
1235 1268
1236 1269 check explanation for an orphan, phase-divergent and content-divergent changeset
1237 1270
1238 1271 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=paper' | egrep '(orphan|phase-divergent|content-divergent):'
1239 1272 <td>orphan: obsolete parent <a href="/rev/3de5eca88c00?style=paper">3de5eca88c00</a><br>
1240 1273 phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=paper">245bde4270cd</a><br>
1241 1274 content-divergent: <a href="/rev/6f9641995072?style=paper">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=paper">245bde4270cd</a></td>
1242 1275 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=coal' | egrep '(orphan|phase-divergent|content-divergent):'
1243 1276 <td>orphan: obsolete parent <a href="/rev/3de5eca88c00?style=coal">3de5eca88c00</a><br>
1244 1277 phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=coal">245bde4270cd</a><br>
1245 1278 content-divergent: <a href="/rev/6f9641995072?style=coal">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=coal">245bde4270cd</a></td>
1246 1279 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=gitweb' | egrep '(orphan|phase-divergent|content-divergent):'
1247 1280 <td>orphan: obsolete parent <a class="list" href="/rev/3de5eca88c00?style=gitweb">3de5eca88c00</a></td>
1248 1281 <td>phase-divergent: immutable predecessor <a class="list" href="/rev/245bde4270cd?style=gitweb">245bde4270cd</a></td>
1249 1282 <td>content-divergent: <a class="list" href="/rev/6f9641995072?style=gitweb">6f9641995072</a> (draft) predecessor <a class="list" href="/rev/245bde4270cd?style=gitweb">245bde4270cd</a></td>
1250 1283 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=monoblue' | egrep '(orphan|phase-divergent|content-divergent):'
1251 1284 <dd>orphan: obsolete parent <a href="/rev/3de5eca88c00?style=monoblue">3de5eca88c00</a></dd>
1252 1285 <dd>phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=monoblue">245bde4270cd</a></dd>
1253 1286 <dd>content-divergent: <a href="/rev/6f9641995072?style=monoblue">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=monoblue">245bde4270cd</a></dd>
1254 1287 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=spartan' | egrep '(orphan|phase-divergent|content-divergent):'
1255 1288 <td class="unstable">orphan: obsolete parent <a href="/rev/3de5eca88c00?style=spartan">3de5eca88c00</a></td>
1256 1289 <td class="unstable">phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=spartan">245bde4270cd</a></td>
1257 1290 <td class="unstable">content-divergent: <a href="/rev/6f9641995072?style=spartan">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=spartan">245bde4270cd</a></td>
1258 1291
1259 1292 $ killdaemons.py
1260 1293
1261 1294 $ rm hg.pid access.log errors.log
1262 1295
1263 1296 #endif
1264 1297
1265 1298 Test incoming/outcoming with changesets obsoleted remotely, known locally
1266 1299 ===============================================================================
1267 1300
1268 1301 This test issue 3805
1269 1302
1270 1303 $ hg init repo-issue3805
1271 1304 $ cd repo-issue3805
1272 1305 $ echo "base" > base
1273 1306 $ hg ci -Am "base"
1274 1307 adding base
1275 1308 $ echo "foo" > foo
1276 1309 $ hg ci -Am "A"
1277 1310 adding foo
1278 1311 $ hg clone . ../other-issue3805
1279 1312 updating to branch default
1280 1313 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1281 1314 $ echo "bar" >> foo
1282 1315 $ hg ci --amend
1283 1316 $ cd ../other-issue3805
1284 1317 $ hg log -G
1285 1318 @ 1:29f0c6921ddd (draft) [tip ] A
1286 1319 |
1287 1320 o 0:d20a80d4def3 (draft) [ ] base
1288 1321
1289 1322 $ hg log -G -R ../repo-issue3805
1290 1323 @ 2:323a9c3ddd91 (draft) [tip ] A
1291 1324 |
1292 1325 o 0:d20a80d4def3 (draft) [ ] base
1293 1326
1294 1327 $ hg incoming
1295 1328 comparing with $TESTTMP/tmpe/repo-issue3805
1296 1329 searching for changes
1297 1330 2:323a9c3ddd91 (draft) [tip ] A
1298 1331 $ hg incoming --bundle ../issue3805.hg
1299 1332 comparing with $TESTTMP/tmpe/repo-issue3805
1300 1333 searching for changes
1301 1334 2:323a9c3ddd91 (draft) [tip ] A
1302 1335 $ hg outgoing
1303 1336 comparing with $TESTTMP/tmpe/repo-issue3805
1304 1337 searching for changes
1305 1338 1:29f0c6921ddd (draft) [tip ] A
1306 1339
1307 1340 #if serve
1308 1341
1309 1342 $ hg serve -R ../repo-issue3805 -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1310 1343 $ cat hg.pid >> $DAEMON_PIDS
1311 1344
1312 1345 $ hg incoming http://localhost:$HGPORT
1313 1346 comparing with http://localhost:$HGPORT/
1314 1347 searching for changes
1315 1348 2:323a9c3ddd91 (draft) [tip ] A
1316 1349 $ hg outgoing http://localhost:$HGPORT
1317 1350 comparing with http://localhost:$HGPORT/
1318 1351 searching for changes
1319 1352 1:29f0c6921ddd (draft) [tip ] A
1320 1353
1321 1354 $ killdaemons.py
1322 1355
1323 1356 #endif
1324 1357
1325 1358 This test issue 3814
1326 1359
1327 1360 (nothing to push but locally hidden changeset)
1328 1361
1329 1362 $ cd ..
1330 1363 $ hg init repo-issue3814
1331 1364 $ cd repo-issue3805
1332 1365 $ hg push -r 323a9c3ddd91 ../repo-issue3814
1333 1366 pushing to ../repo-issue3814
1334 1367 searching for changes
1335 1368 adding changesets
1336 1369 adding manifests
1337 1370 adding file changes
1338 1371 added 2 changesets with 2 changes to 2 files
1339 1372 1 new obsolescence markers
1340 1373 $ hg out ../repo-issue3814
1341 1374 comparing with ../repo-issue3814
1342 1375 searching for changes
1343 1376 no changes found
1344 1377 [1]
1345 1378
1346 1379 Test that a local tag blocks a changeset from being hidden
1347 1380
1348 1381 $ hg tag -l visible -r 1 --hidden
1349 1382 $ hg log -G
1350 1383 @ 2:323a9c3ddd91 (draft) [tip ] A
1351 1384 |
1352 1385 | x 1:29f0c6921ddd (draft *obsolete*) [visible ] A [rewritten using amend as 2:323a9c3ddd91]
1353 1386 |/
1354 1387 o 0:d20a80d4def3 (draft) [ ] base
1355 1388
1356 1389 Test that removing a local tag does not cause some commands to fail
1357 1390
1358 1391 $ hg tag -l -r tip tiptag
1359 1392 $ hg tags
1360 1393 tiptag 2:323a9c3ddd91
1361 1394 tip 2:323a9c3ddd91
1362 1395 visible 1:29f0c6921ddd
1363 1396 $ hg --config extensions.strip= strip -r tip --no-backup
1364 1397 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1365 1398 $ hg tags
1366 1399 visible 1:29f0c6921ddd
1367 1400 tip 1:29f0c6921ddd
1368 1401
1369 1402 Test bundle overlay onto hidden revision
1370 1403
1371 1404 $ cd ..
1372 1405 $ hg init repo-bundleoverlay
1373 1406 $ cd repo-bundleoverlay
1374 1407 $ echo "A" > foo
1375 1408 $ hg ci -Am "A"
1376 1409 adding foo
1377 1410 $ echo "B" >> foo
1378 1411 $ hg ci -m "B"
1379 1412 $ hg up 0
1380 1413 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1381 1414 $ echo "C" >> foo
1382 1415 $ hg ci -m "C"
1383 1416 created new head
1384 1417 $ hg log -G
1385 1418 @ 2:c186d7714947 (draft) [tip ] C
1386 1419 |
1387 1420 | o 1:44526ebb0f98 (draft) [ ] B
1388 1421 |/
1389 1422 o 0:4b34ecfb0d56 (draft) [ ] A
1390 1423
1391 1424
1392 1425 $ hg clone -r1 . ../other-bundleoverlay
1393 1426 adding changesets
1394 1427 adding manifests
1395 1428 adding file changes
1396 1429 added 2 changesets with 2 changes to 1 files
1397 1430 new changesets 4b34ecfb0d56:44526ebb0f98 (2 drafts)
1398 1431 updating to branch default
1399 1432 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1400 1433 $ cd ../other-bundleoverlay
1401 1434 $ echo "B+" >> foo
1402 1435 $ hg ci --amend -m "B+"
1403 1436 $ hg log -G --hidden
1404 1437 @ 2:b7d587542d40 (draft) [tip ] B+
1405 1438 |
1406 1439 | x 1:44526ebb0f98 (draft *obsolete*) [ ] B [rewritten using amend as 2:b7d587542d40]
1407 1440 |/
1408 1441 o 0:4b34ecfb0d56 (draft) [ ] A
1409 1442
1410 1443
1411 1444 #if repobundlerepo
1412 1445 $ hg incoming ../repo-bundleoverlay --bundle ../bundleoverlay.hg
1413 1446 comparing with ../repo-bundleoverlay
1414 1447 searching for changes
1415 1448 1:44526ebb0f98 (draft) [ ] B
1416 1449 2:c186d7714947 (draft) [tip ] C
1417 1450 $ hg log -G -R ../bundleoverlay.hg
1418 1451 o 3:c186d7714947 (draft) [tip ] C
1419 1452 |
1420 1453 | @ 2:b7d587542d40 (draft) [ ] B+
1421 1454 |/
1422 1455 o 0:4b34ecfb0d56 (draft) [ ] A
1423 1456
1424 1457 #endif
1425 1458
1426 1459 #if serve
1427 1460
1428 1461 Test issue 4506
1429 1462
1430 1463 $ cd ..
1431 1464 $ hg init repo-issue4506
1432 1465 $ cd repo-issue4506
1433 1466 $ echo "0" > foo
1434 1467 $ hg add foo
1435 1468 $ hg ci -m "content-0"
1436 1469
1437 1470 $ hg up null
1438 1471 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1439 1472 $ echo "1" > bar
1440 1473 $ hg add bar
1441 1474 $ hg ci -m "content-1"
1442 1475 created new head
1443 1476 $ hg up 0
1444 1477 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1445 1478 $ hg graft 1
1446 1479 grafting 1:1c9eddb02162 "content-1" (tip)
1447 1480
1448 1481 $ hg debugobsolete `hg log -r1 -T'{node}'` `hg log -r2 -T'{node}'`
1449 1482 1 new obsolescence markers
1450 1483 obsoleted 1 changesets
1451 1484
1452 1485 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1453 1486 $ cat hg.pid >> $DAEMON_PIDS
1454 1487
1455 1488 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/1'
1456 1489 404 Not Found
1457 1490 [1]
1458 1491 $ get-with-headers.py --headeronly localhost:$HGPORT 'file/tip/bar'
1459 1492 200 Script output follows
1460 1493 $ get-with-headers.py --headeronly localhost:$HGPORT 'annotate/tip/bar'
1461 1494 200 Script output follows
1462 1495
1463 1496 $ killdaemons.py
1464 1497
1465 1498 #endif
1466 1499
1467 1500 Test heads computation on pending index changes with obsolescence markers
1468 1501 $ cd ..
1469 1502 $ cat >$TESTTMP/test_extension.py << EOF
1470 1503 > from __future__ import absolute_import
1471 1504 > from mercurial.i18n import _
1472 1505 > from mercurial import cmdutil, pycompat, registrar
1473 1506 > from mercurial.utils import stringutil
1474 1507 >
1475 1508 > cmdtable = {}
1476 1509 > command = registrar.command(cmdtable)
1477 1510 > @command(b"amendtransient",[], _(b'hg amendtransient [rev]'))
1478 1511 > def amend(ui, repo, *pats, **opts):
1479 1512 > opts = pycompat.byteskwargs(opts)
1480 1513 > opts[b'message'] = b'Test'
1481 1514 > opts[b'logfile'] = None
1482 1515 > cmdutil.amend(ui, repo, repo[b'.'], {}, pats, opts)
1483 1516 > ui.write(b'%s\n' % stringutil.pprint(repo.changelog.headrevs()))
1484 1517 > EOF
1485 1518 $ cat >> $HGRCPATH << EOF
1486 1519 > [extensions]
1487 1520 > testextension=$TESTTMP/test_extension.py
1488 1521 > EOF
1489 1522 $ hg init repo-issue-nativerevs-pending-changes
1490 1523 $ cd repo-issue-nativerevs-pending-changes
1491 1524 $ mkcommit a
1492 1525 $ mkcommit b
1493 1526 $ hg up ".^"
1494 1527 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1495 1528 $ echo aa > a
1496 1529 $ hg amendtransient
1497 1530 1 new orphan changesets
1498 1531 [1, 2]
1499 1532
1500 1533 Test cache consistency for the visible filter
1501 1534 1) We want to make sure that the cached filtered revs are invalidated when
1502 1535 bookmarks change
1503 1536 $ cd ..
1504 1537 $ cat >$TESTTMP/test_extension.py << EOF
1505 1538 > from __future__ import absolute_import, print_function
1506 1539 > import weakref
1507 1540 > from mercurial import (
1508 1541 > bookmarks,
1509 1542 > cmdutil,
1510 1543 > extensions,
1511 1544 > repoview,
1512 1545 > )
1513 1546 > def _bookmarkchanged(orig, bkmstoreinst, *args, **kwargs):
1514 1547 > reporef = weakref.ref(bkmstoreinst._repo)
1515 1548 > def trhook(tr):
1516 1549 > repo = reporef()
1517 1550 > hidden1 = repoview.computehidden(repo)
1518 1551 > hidden = repoview.filterrevs(repo, b'visible')
1519 1552 > if sorted(hidden1) != sorted(hidden):
1520 1553 > print("cache inconsistency")
1521 1554 > bkmstoreinst._repo.currenttransaction().addpostclose(b'test_extension', trhook)
1522 1555 > orig(bkmstoreinst, *args, **kwargs)
1523 1556 > def extsetup(ui):
1524 1557 > extensions.wrapfunction(bookmarks.bmstore, '_recordchange',
1525 1558 > _bookmarkchanged)
1526 1559 > EOF
1527 1560
1528 1561 $ hg init repo-cache-inconsistency
1529 1562 $ cd repo-issue-nativerevs-pending-changes
1530 1563 $ mkcommit a
1531 1564 a already tracked!
1532 1565 $ mkcommit b
1533 1566 $ hg id
1534 1567 13bedc178fce tip
1535 1568 $ echo "hello" > b
1536 1569 $ hg commit --amend -m "message"
1537 1570 $ hg book bookb -r 13bedc178fce --hidden
1538 1571 bookmarking hidden changeset 13bedc178fce
1539 1572 (hidden revision '13bedc178fce' was rewritten as: a9b1f8652753)
1540 1573 $ hg log -r 13bedc178fce
1541 1574 4:13bedc178fce (draft *obsolete*) [ bookb] add b [rewritten using amend as 5:a9b1f8652753]
1542 1575 $ hg book -d bookb
1543 1576 $ hg log -r 13bedc178fce
1544 1577 abort: hidden revision '13bedc178fce' was rewritten as: a9b1f8652753!
1545 1578 (use --hidden to access hidden revisions)
1546 1579 [255]
1547 1580
1548 1581 Empty out the test extension, as it isn't compatible with later parts
1549 1582 of the test.
1550 1583 $ echo > $TESTTMP/test_extension.py
1551 1584
1552 1585 Test ability to pull changeset with locally applying obsolescence markers
1553 1586 (issue4945)
1554 1587
1555 1588 $ cd ..
1556 1589 $ hg init issue4845
1557 1590 $ cd issue4845
1558 1591
1559 1592 $ echo foo > f0
1560 1593 $ hg add f0
1561 1594 $ hg ci -m '0'
1562 1595 $ echo foo > f1
1563 1596 $ hg add f1
1564 1597 $ hg ci -m '1'
1565 1598 $ echo foo > f2
1566 1599 $ hg add f2
1567 1600 $ hg ci -m '2'
1568 1601
1569 1602 $ echo bar > f2
1570 1603 $ hg commit --amend --config experimental.evolution.createmarkers=True
1571 1604 $ hg log -G
1572 1605 @ 3:b0551702f918 (draft) [tip ] 2
1573 1606 |
1574 1607 o 1:e016b03fd86f (draft) [ ] 1
1575 1608 |
1576 1609 o 0:a78f55e5508c (draft) [ ] 0
1577 1610
1578 1611 $ hg log -G --hidden
1579 1612 @ 3:b0551702f918 (draft) [tip ] 2
1580 1613 |
1581 1614 | x 2:e008cf283490 (draft *obsolete*) [ ] 2 [rewritten using amend as 3:b0551702f918]
1582 1615 |/
1583 1616 o 1:e016b03fd86f (draft) [ ] 1
1584 1617 |
1585 1618 o 0:a78f55e5508c (draft) [ ] 0
1586 1619
1587 1620
1588 1621 $ hg strip --hidden -r 2 --config extensions.strip= --config devel.strip-obsmarkers=no
1589 1622 saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e008cf283490-ede36964-backup.hg
1590 1623 $ hg debugobsolete
1591 1624 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
1592 1625 $ hg log -G
1593 1626 @ 2:b0551702f918 (draft) [tip ] 2
1594 1627 |
1595 1628 o 1:e016b03fd86f (draft) [ ] 1
1596 1629 |
1597 1630 o 0:a78f55e5508c (draft) [ ] 0
1598 1631
1599 1632 $ hg log -G --hidden
1600 1633 @ 2:b0551702f918 (draft) [tip ] 2
1601 1634 |
1602 1635 o 1:e016b03fd86f (draft) [ ] 1
1603 1636 |
1604 1637 o 0:a78f55e5508c (draft) [ ] 0
1605 1638
1606 1639 $ hg debugbundle .hg/strip-backup/e008cf283490-*-backup.hg
1607 1640 Stream params: {Compression: BZ}
1608 1641 changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
1609 1642 e008cf2834908e5d6b0f792a9d4b0e2272260fb8
1610 1643 cache:rev-branch-cache -- {} (mandatory: False)
1611 1644 phase-heads -- {} (mandatory: True)
1612 1645 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 draft
1613 1646
1614 1647 #if repobundlerepo
1615 1648 $ hg pull .hg/strip-backup/e008cf283490-*-backup.hg
1616 1649 pulling from .hg/strip-backup/e008cf283490-ede36964-backup.hg
1617 1650 searching for changes
1618 1651 no changes found
1619 1652 #endif
1620 1653 $ hg debugobsolete
1621 1654 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
1622 1655 $ hg log -G
1623 1656 @ 2:b0551702f918 (draft) [tip ] 2
1624 1657 |
1625 1658 o 1:e016b03fd86f (draft) [ ] 1
1626 1659 |
1627 1660 o 0:a78f55e5508c (draft) [ ] 0
1628 1661
1629 1662 $ hg log -G --hidden
1630 1663 @ 2:b0551702f918 (draft) [tip ] 2
1631 1664 |
1632 1665 o 1:e016b03fd86f (draft) [ ] 1
1633 1666 |
1634 1667 o 0:a78f55e5508c (draft) [ ] 0
1635 1668
1636 1669
1637 1670 Testing that strip remove markers:
1638 1671
1639 1672 $ hg strip -r 1 --config extensions.strip=
1640 1673 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1641 1674 saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e016b03fd86f-65ede734-backup.hg
1642 1675 $ hg debugobsolete
1643 1676 $ hg log -G
1644 1677 @ 0:a78f55e5508c (draft) [tip ] 0
1645 1678
1646 1679 $ hg log -G --hidden
1647 1680 @ 0:a78f55e5508c (draft) [tip ] 0
1648 1681
1649 1682 $ hg debugbundle .hg/strip-backup/e016b03fd86f-*-backup.hg
1650 1683 Stream params: {Compression: BZ}
1651 1684 changegroup -- {nbchanges: 2, version: 02} (mandatory: True)
1652 1685 e016b03fd86fcccc54817d120b90b751aaf367d6
1653 1686 b0551702f918510f01ae838ab03a463054c67b46
1654 1687 cache:rev-branch-cache -- {} (mandatory: False)
1655 1688 obsmarkers -- {} (mandatory: True)
1656 1689 version: 1 (92 bytes)
1657 1690 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
1658 1691 phase-heads -- {} (mandatory: True)
1659 1692 b0551702f918510f01ae838ab03a463054c67b46 draft
1660 1693
1661 1694 $ hg unbundle .hg/strip-backup/e016b03fd86f-*-backup.hg
1662 1695 adding changesets
1663 1696 adding manifests
1664 1697 adding file changes
1665 1698 added 2 changesets with 2 changes to 2 files
1666 1699 1 new obsolescence markers
1667 1700 new changesets e016b03fd86f:b0551702f918 (2 drafts)
1668 1701 (run 'hg update' to get a working copy)
1669 1702 $ hg debugobsolete | sort
1670 1703 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
1671 1704 $ hg log -G
1672 1705 o 2:b0551702f918 (draft) [tip ] 2
1673 1706 |
1674 1707 o 1:e016b03fd86f (draft) [ ] 1
1675 1708 |
1676 1709 @ 0:a78f55e5508c (draft) [ ] 0
1677 1710
1678 1711 $ hg log -G --hidden
1679 1712 o 2:b0551702f918 (draft) [tip ] 2
1680 1713 |
1681 1714 o 1:e016b03fd86f (draft) [ ] 1
1682 1715 |
1683 1716 @ 0:a78f55e5508c (draft) [ ] 0
1684 1717
1685 1718 Test that 'hg debugobsolete --index --rev' can show indices of obsmarkers when
1686 1719 only a subset of those are displayed (because of --rev option)
1687 1720 $ hg init doindexrev
1688 1721 $ cd doindexrev
1689 1722 $ echo a > a
1690 1723 $ hg ci -Am a
1691 1724 adding a
1692 1725 $ hg ci --amend -m aa
1693 1726 $ echo b > b
1694 1727 $ hg ci -Am b
1695 1728 adding b
1696 1729 $ hg ci --amend -m bb
1697 1730 $ echo c > c
1698 1731 $ hg ci -Am c
1699 1732 adding c
1700 1733 $ hg ci --amend -m cc
1701 1734 $ echo d > d
1702 1735 $ hg ci -Am d
1703 1736 adding d
1704 1737 $ hg ci --amend -m dd --config experimental.evolution.track-operation=1
1705 1738 $ hg debugobsolete --index --rev "3+7"
1706 1739 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1707 1740 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1708 1741 $ hg debugobsolete --index --rev "3+7" -Tjson
1709 1742 [
1710 1743 {
1711 1744 "date": [0, 0],
1712 1745 "flag": 0,
1713 1746 "index": 1,
1714 1747 "metadata": {"ef1": "1", "operation": "amend", "user": "test"},
1715 1748 "prednode": "6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1",
1716 1749 "succnodes": ["d27fb9b066076fd921277a4b9e8b9cb48c95bc6a"]
1717 1750 },
1718 1751 {
1719 1752 "date": [0, 0],
1720 1753 "flag": 0,
1721 1754 "index": 3,
1722 1755 "metadata": {"ef1": "1", "operation": "amend", "user": "test"},
1723 1756 "prednode": "4715cf767440ed891755448016c2b8cf70760c30",
1724 1757 "succnodes": ["7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d"]
1725 1758 }
1726 1759 ]
1727 1760
1728 1761 Test the --delete option of debugobsolete command
1729 1762 $ hg debugobsolete --index
1730 1763 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1731 1764 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1732 1765 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1733 1766 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1734 1767 $ hg debugobsolete --delete 1 --delete 3
1735 1768 deleted 2 obsolescence markers
1736 1769 $ hg debugobsolete
1737 1770 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1738 1771 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1739 1772
1740 1773 Test adding changeset after obsmarkers affecting it
1741 1774 (eg: during pull, or unbundle)
1742 1775
1743 1776 $ mkcommit e
1744 1777 $ hg bundle -r . --base .~1 ../bundle-2.hg
1745 1778 1 changesets found
1746 1779 $ getid .
1747 1780 $ hg --config extensions.strip= strip -r .
1748 1781 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1749 1782 saved backup bundle to $TESTTMP/tmpe/issue4845/doindexrev/.hg/strip-backup/9bc153528424-ee80edd4-backup.hg
1750 1783 $ hg debugobsolete 9bc153528424ea266d13e57f9ff0d799dfe61e4b
1751 1784 1 new obsolescence markers
1752 1785 $ hg unbundle ../bundle-2.hg
1753 1786 adding changesets
1754 1787 adding manifests
1755 1788 adding file changes
1756 1789 added 1 changesets with 1 changes to 1 files
1757 1790 (1 other changesets obsolete on arrival)
1758 1791 (run 'hg update' to get a working copy)
1759 1792 $ hg log -G
1760 1793 @ 7:7ae79c5d60f0 (draft) [tip ] dd
1761 1794 |
1762 1795 | o 6:4715cf767440 (draft) [ ] d
1763 1796 |/
1764 1797 o 5:29346082e4a9 (draft) [ ] cc
1765 1798 |
1766 1799 o 3:d27fb9b06607 (draft) [ ] bb
1767 1800 |
1768 1801 | o 2:6fdef60fcbab (draft) [ ] b
1769 1802 |/
1770 1803 o 1:f9bd49731b0b (draft) [ ] aa
1771 1804
1772 1805
1773 1806 $ cd ..
1774 1807
1775 1808 Test issue 5783
1776 1809
1777 1810 $ hg init issue-5783 --config format.obsstore-version=0
1778 1811 $ cd issue-5783
1779 1812 $ touch a.cpp
1780 1813 $ hg add a.cpp
1781 1814 $ hg commit -m 'Add a.cpp'
1782 1815 $ echo 'Hello' > a.cpp
1783 1816 $ hg amend -n 'Testing::Obsstore' --config format.obsstore-version=0 --config extensions.amend=
1784 1817 $ touch b.cpp
1785 1818 $ hg add b.cpp
1786 1819 $ hg commit -m 'Add b.cpp'
1787 1820 $ echo 'Hello' > b.cpp
1788 1821 $ hg amend -n 'Testing::Obsstore2' --config extensions.amend=
1789 1822 $ hg debugobsolete
1790 1823 d1b09fe3ad2b2a03e23a72f0c582e29a49570145 1a1a11184d2588af24e767e5335d5d9d07e8c550 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'note': 'Testing::Obsstore', 'operation': 'amend', 'user': 'test'}
1791 1824 1bfd8e3868f641e048b6667cd672c68932f26d00 79959ca316d5b27ac6be1dd0cfd0843a5b5412eb 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'note': 'Testing::Obsstore2', 'operation': 'amend', 'user': 'test'}
1792 1825 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now