##// END OF EJS Templates
push: use bundle2 to push obsmarkers when possible
Pierre-Yves David -
r22347:7198cb9b default
parent child Browse files
Show More
@@ -1,1045 +1,1056
1 1 # exchange.py - utility to exchange data between repos.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 from node import hex, nullid
10 10 import errno, urllib
11 11 import util, scmutil, changegroup, base85, error
12 12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
13 13
14 14 def readbundle(ui, fh, fname, vfs=None):
15 15 header = changegroup.readexactly(fh, 4)
16 16
17 17 alg = None
18 18 if not fname:
19 19 fname = "stream"
20 20 if not header.startswith('HG') and header.startswith('\0'):
21 21 fh = changegroup.headerlessfixup(fh, header)
22 22 header = "HG10"
23 23 alg = 'UN'
24 24 elif vfs:
25 25 fname = vfs.join(fname)
26 26
27 27 magic, version = header[0:2], header[2:4]
28 28
29 29 if magic != 'HG':
30 30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 31 if version == '10':
32 32 if alg is None:
33 33 alg = changegroup.readexactly(fh, 2)
34 34 return changegroup.unbundle10(fh, alg)
35 35 elif version == '2X':
36 36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 37 else:
38 38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39 39
40 40 def buildobsmarkerspart(bundler, markers):
41 41 """add an obsmarker part to the bundler with <markers>
42 42
43 43 No part is created if markers is empty.
44 44 Raises ValueError if the bundler doesn't support any known obsmarker format.
45 45 """
46 46 if markers:
47 47 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
48 48 version = obsolete.commonversion(remoteversions)
49 49 if version is None:
50 50 raise ValueError('bundler do not support common obsmarker format')
51 51 stream = obsolete.encodemarkers(markers, True, version=version)
52 52 return bundler.newpart('B2X:OBSMARKERS', data=stream)
53 53 return None
54 54
55 55 class pushoperation(object):
56 56 """A object that represent a single push operation
57 57
58 58 It purpose is to carry push related state and very common operation.
59 59
60 60 A new should be created at the beginning of each push and discarded
61 61 afterward.
62 62 """
63 63
64 64 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
65 65 # repo we push from
66 66 self.repo = repo
67 67 self.ui = repo.ui
68 68 # repo we push to
69 69 self.remote = remote
70 70 # force option provided
71 71 self.force = force
72 72 # revs to be pushed (None is "all")
73 73 self.revs = revs
74 74 # allow push of new branch
75 75 self.newbranch = newbranch
76 76 # did a local lock get acquired?
77 77 self.locallocked = None
78 78 # step already performed
79 79 # (used to check what steps have been already performed through bundle2)
80 80 self.stepsdone = set()
81 81 # Integer version of the push result
82 82 # - None means nothing to push
83 83 # - 0 means HTTP error
84 84 # - 1 means we pushed and remote head count is unchanged *or*
85 85 # we have outgoing changesets but refused to push
86 86 # - other values as described by addchangegroup()
87 87 self.ret = None
88 88 # discover.outgoing object (contains common and outgoing data)
89 89 self.outgoing = None
90 90 # all remote heads before the push
91 91 self.remoteheads = None
92 92 # testable as a boolean indicating if any nodes are missing locally.
93 93 self.incoming = None
94 94 # phases changes that must be pushed along side the changesets
95 95 self.outdatedphases = None
96 96 # phases changes that must be pushed if changeset push fails
97 97 self.fallbackoutdatedphases = None
98 98 # outgoing obsmarkers
99 99 self.outobsmarkers = set()
100 100 # outgoing bookmarks
101 101 self.outbookmarks = []
102 102
103 103 @util.propertycache
104 104 def futureheads(self):
105 105 """future remote heads if the changeset push succeeds"""
106 106 return self.outgoing.missingheads
107 107
108 108 @util.propertycache
109 109 def fallbackheads(self):
110 110 """future remote heads if the changeset push fails"""
111 111 if self.revs is None:
112 112 # not target to push, all common are relevant
113 113 return self.outgoing.commonheads
114 114 unfi = self.repo.unfiltered()
115 115 # I want cheads = heads(::missingheads and ::commonheads)
116 116 # (missingheads is revs with secret changeset filtered out)
117 117 #
118 118 # This can be expressed as:
119 119 # cheads = ( (missingheads and ::commonheads)
120 120 # + (commonheads and ::missingheads))"
121 121 # )
122 122 #
123 123 # while trying to push we already computed the following:
124 124 # common = (::commonheads)
125 125 # missing = ((commonheads::missingheads) - commonheads)
126 126 #
127 127 # We can pick:
128 128 # * missingheads part of common (::commonheads)
129 129 common = set(self.outgoing.common)
130 130 nm = self.repo.changelog.nodemap
131 131 cheads = [node for node in self.revs if nm[node] in common]
132 132 # and
133 133 # * commonheads parents on missing
134 134 revset = unfi.set('%ln and parents(roots(%ln))',
135 135 self.outgoing.commonheads,
136 136 self.outgoing.missing)
137 137 cheads.extend(c.node() for c in revset)
138 138 return cheads
139 139
140 140 @property
141 141 def commonheads(self):
142 142 """set of all common heads after changeset bundle push"""
143 143 if self.ret:
144 144 return self.futureheads
145 145 else:
146 146 return self.fallbackheads
147 147
148 148 def push(repo, remote, force=False, revs=None, newbranch=False):
149 149 '''Push outgoing changesets (limited by revs) from a local
150 150 repository to remote. Return an integer:
151 151 - None means nothing to push
152 152 - 0 means HTTP error
153 153 - 1 means we pushed and remote head count is unchanged *or*
154 154 we have outgoing changesets but refused to push
155 155 - other values as described by addchangegroup()
156 156 '''
157 157 pushop = pushoperation(repo, remote, force, revs, newbranch)
158 158 if pushop.remote.local():
159 159 missing = (set(pushop.repo.requirements)
160 160 - pushop.remote.local().supported)
161 161 if missing:
162 162 msg = _("required features are not"
163 163 " supported in the destination:"
164 164 " %s") % (', '.join(sorted(missing)))
165 165 raise util.Abort(msg)
166 166
167 167 # there are two ways to push to remote repo:
168 168 #
169 169 # addchangegroup assumes local user can lock remote
170 170 # repo (local filesystem, old ssh servers).
171 171 #
172 172 # unbundle assumes local user cannot lock remote repo (new ssh
173 173 # servers, http servers).
174 174
175 175 if not pushop.remote.canpush():
176 176 raise util.Abort(_("destination does not support push"))
177 177 # get local lock as we might write phase data
178 178 locallock = None
179 179 try:
180 180 locallock = pushop.repo.lock()
181 181 pushop.locallocked = True
182 182 except IOError, err:
183 183 pushop.locallocked = False
184 184 if err.errno != errno.EACCES:
185 185 raise
186 186 # source repo cannot be locked.
187 187 # We do not abort the push, but just disable the local phase
188 188 # synchronisation.
189 189 msg = 'cannot lock source repository: %s\n' % err
190 190 pushop.ui.debug(msg)
191 191 try:
192 192 pushop.repo.checkpush(pushop)
193 193 lock = None
194 194 unbundle = pushop.remote.capable('unbundle')
195 195 if not unbundle:
196 196 lock = pushop.remote.lock()
197 197 try:
198 198 _pushdiscovery(pushop)
199 199 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
200 200 False)
201 201 and pushop.remote.capable('bundle2-exp')):
202 202 _pushbundle2(pushop)
203 203 _pushchangeset(pushop)
204 204 _pushsyncphase(pushop)
205 205 _pushobsolete(pushop)
206 206 _pushbookmark(pushop)
207 207 finally:
208 208 if lock is not None:
209 209 lock.release()
210 210 finally:
211 211 if locallock is not None:
212 212 locallock.release()
213 213
214 214 return pushop.ret
215 215
216 216 # list of steps to perform discovery before push
217 217 pushdiscoveryorder = []
218 218
219 219 # Mapping between step name and function
220 220 #
221 221 # This exists to help extensions wrap steps if necessary
222 222 pushdiscoverymapping = {}
223 223
224 224 def pushdiscovery(stepname):
225 225 """decorator for function performing discovery before push
226 226
227 227 The function is added to the step -> function mapping and appended to the
228 228 list of steps. Beware that decorated function will be added in order (this
229 229 may matter).
230 230
231 231 You can only use this decorator for a new step, if you want to wrap a step
232 232 from an extension, change the pushdiscovery dictionary directly."""
233 233 def dec(func):
234 234 assert stepname not in pushdiscoverymapping
235 235 pushdiscoverymapping[stepname] = func
236 236 pushdiscoveryorder.append(stepname)
237 237 return func
238 238 return dec
239 239
240 240 def _pushdiscovery(pushop):
241 241 """Run all discovery steps"""
242 242 for stepname in pushdiscoveryorder:
243 243 step = pushdiscoverymapping[stepname]
244 244 step(pushop)
245 245
246 246 @pushdiscovery('changeset')
247 247 def _pushdiscoverychangeset(pushop):
248 248 """discover the changeset that need to be pushed"""
249 249 unfi = pushop.repo.unfiltered()
250 250 fci = discovery.findcommonincoming
251 251 commoninc = fci(unfi, pushop.remote, force=pushop.force)
252 252 common, inc, remoteheads = commoninc
253 253 fco = discovery.findcommonoutgoing
254 254 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
255 255 commoninc=commoninc, force=pushop.force)
256 256 pushop.outgoing = outgoing
257 257 pushop.remoteheads = remoteheads
258 258 pushop.incoming = inc
259 259
260 260 @pushdiscovery('phase')
261 261 def _pushdiscoveryphase(pushop):
262 262 """discover the phase that needs to be pushed
263 263
264 264 (computed for both success and failure case for changesets push)"""
265 265 outgoing = pushop.outgoing
266 266 unfi = pushop.repo.unfiltered()
267 267 remotephases = pushop.remote.listkeys('phases')
268 268 publishing = remotephases.get('publishing', False)
269 269 ana = phases.analyzeremotephases(pushop.repo,
270 270 pushop.fallbackheads,
271 271 remotephases)
272 272 pheads, droots = ana
273 273 extracond = ''
274 274 if not publishing:
275 275 extracond = ' and public()'
276 276 revset = 'heads((%%ln::%%ln) %s)' % extracond
277 277 # Get the list of all revs draft on remote by public here.
278 278 # XXX Beware that revset break if droots is not strictly
279 279 # XXX root we may want to ensure it is but it is costly
280 280 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
281 281 if not outgoing.missing:
282 282 future = fallback
283 283 else:
284 284 # adds changeset we are going to push as draft
285 285 #
286 286 # should not be necessary for pushblishing server, but because of an
287 287 # issue fixed in xxxxx we have to do it anyway.
288 288 fdroots = list(unfi.set('roots(%ln + %ln::)',
289 289 outgoing.missing, droots))
290 290 fdroots = [f.node() for f in fdroots]
291 291 future = list(unfi.set(revset, fdroots, pushop.futureheads))
292 292 pushop.outdatedphases = future
293 293 pushop.fallbackoutdatedphases = fallback
294 294
295 295 @pushdiscovery('obsmarker')
296 296 def _pushdiscoveryobsmarkers(pushop):
297 297 if (obsolete._enabled
298 298 and pushop.repo.obsstore
299 299 and 'obsolete' in pushop.remote.listkeys('namespaces')):
300 300 pushop.outobsmarkers = pushop.repo.obsstore
301 301
302 302 @pushdiscovery('bookmarks')
303 303 def _pushdiscoverybookmarks(pushop):
304 304 ui = pushop.ui
305 305 repo = pushop.repo.unfiltered()
306 306 remote = pushop.remote
307 307 ui.debug("checking for updated bookmarks\n")
308 308 ancestors = ()
309 309 if pushop.revs:
310 310 revnums = map(repo.changelog.rev, pushop.revs)
311 311 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
312 312 remotebookmark = remote.listkeys('bookmarks')
313 313
314 314 comp = bookmarks.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
315 315 addsrc, adddst, advsrc, advdst, diverge, differ, invalid = comp
316 316 for b, scid, dcid in advsrc:
317 317 if not ancestors or repo[scid].rev() in ancestors:
318 318 pushop.outbookmarks.append((b, dcid, scid))
319 319
320 320 def _pushcheckoutgoing(pushop):
321 321 outgoing = pushop.outgoing
322 322 unfi = pushop.repo.unfiltered()
323 323 if not outgoing.missing:
324 324 # nothing to push
325 325 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
326 326 return False
327 327 # something to push
328 328 if not pushop.force:
329 329 # if repo.obsstore == False --> no obsolete
330 330 # then, save the iteration
331 331 if unfi.obsstore:
332 332 # this message are here for 80 char limit reason
333 333 mso = _("push includes obsolete changeset: %s!")
334 334 mst = "push includes %s changeset: %s!"
335 335 # plain versions for i18n tool to detect them
336 336 _("push includes unstable changeset: %s!")
337 337 _("push includes bumped changeset: %s!")
338 338 _("push includes divergent changeset: %s!")
339 339 # If we are to push if there is at least one
340 340 # obsolete or unstable changeset in missing, at
341 341 # least one of the missinghead will be obsolete or
342 342 # unstable. So checking heads only is ok
343 343 for node in outgoing.missingheads:
344 344 ctx = unfi[node]
345 345 if ctx.obsolete():
346 346 raise util.Abort(mso % ctx)
347 347 elif ctx.troubled():
348 348 raise util.Abort(_(mst)
349 349 % (ctx.troubles()[0],
350 350 ctx))
351 351 newbm = pushop.ui.configlist('bookmarks', 'pushing')
352 352 discovery.checkheads(unfi, pushop.remote, outgoing,
353 353 pushop.remoteheads,
354 354 pushop.newbranch,
355 355 bool(pushop.incoming),
356 356 newbm)
357 357 return True
358 358
359 359 # List of names of steps to perform for an outgoing bundle2, order matters.
360 360 b2partsgenorder = []
361 361
362 362 # Mapping between step name and function
363 363 #
364 364 # This exists to help extensions wrap steps if necessary
365 365 b2partsgenmapping = {}
366 366
367 367 def b2partsgenerator(stepname):
368 368 """decorator for function generating bundle2 part
369 369
370 370 The function is added to the step -> function mapping and appended to the
371 371 list of steps. Beware that decorated functions will be added in order
372 372 (this may matter).
373 373
374 374 You can only use this decorator for new steps, if you want to wrap a step
375 375 from an extension, attack the b2partsgenmapping dictionary directly."""
376 376 def dec(func):
377 377 assert stepname not in b2partsgenmapping
378 378 b2partsgenmapping[stepname] = func
379 379 b2partsgenorder.append(stepname)
380 380 return func
381 381 return dec
382 382
383 383 @b2partsgenerator('changeset')
384 384 def _pushb2ctx(pushop, bundler):
385 385 """handle changegroup push through bundle2
386 386
387 387 addchangegroup result is stored in the ``pushop.ret`` attribute.
388 388 """
389 389 if 'changesets' in pushop.stepsdone:
390 390 return
391 391 pushop.stepsdone.add('changesets')
392 392 # Send known heads to the server for race detection.
393 393 if not _pushcheckoutgoing(pushop):
394 394 return
395 395 pushop.repo.prepushoutgoinghooks(pushop.repo,
396 396 pushop.remote,
397 397 pushop.outgoing)
398 398 if not pushop.force:
399 399 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
400 400 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
401 401 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
402 402 def handlereply(op):
403 403 """extract addchangroup returns from server reply"""
404 404 cgreplies = op.records.getreplies(cgpart.id)
405 405 assert len(cgreplies['changegroup']) == 1
406 406 pushop.ret = cgreplies['changegroup'][0]['return']
407 407 return handlereply
408 408
409 409 @b2partsgenerator('phase')
410 410 def _pushb2phases(pushop, bundler):
411 411 """handle phase push through bundle2"""
412 412 if 'phases' in pushop.stepsdone:
413 413 return
414 414 b2caps = bundle2.bundle2caps(pushop.remote)
415 415 if not 'b2x:pushkey' in b2caps:
416 416 return
417 417 pushop.stepsdone.add('phases')
418 418 part2node = []
419 419 enc = pushkey.encode
420 420 for newremotehead in pushop.outdatedphases:
421 421 part = bundler.newpart('b2x:pushkey')
422 422 part.addparam('namespace', enc('phases'))
423 423 part.addparam('key', enc(newremotehead.hex()))
424 424 part.addparam('old', enc(str(phases.draft)))
425 425 part.addparam('new', enc(str(phases.public)))
426 426 part2node.append((part.id, newremotehead))
427 427 def handlereply(op):
428 428 for partid, node in part2node:
429 429 partrep = op.records.getreplies(partid)
430 430 results = partrep['pushkey']
431 431 assert len(results) <= 1
432 432 msg = None
433 433 if not results:
434 434 msg = _('server ignored update of %s to public!\n') % node
435 435 elif not int(results[0]['return']):
436 436 msg = _('updating %s to public failed!\n') % node
437 437 if msg is not None:
438 438 pushop.ui.warn(msg)
439 439 return handlereply
440 440
441 @b2partsgenerator('obsmarkers')
442 def _pushb2obsmarkers(pushop, bundler):
443 if 'obsmarkers' in pushop.stepsdone:
444 return
445 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
446 if obsolete.commonversion(remoteversions) is None:
447 return
448 pushop.stepsdone.add('obsmarkers')
449 if pushop.outobsmarkers:
450 buildobsmarkerspart(bundler, pushop.outobsmarkers)
451
441 452 @b2partsgenerator('bookmarks')
442 453 def _pushb2bookmarks(pushop, bundler):
443 454 """handle phase push through bundle2"""
444 455 if 'bookmarks' in pushop.stepsdone:
445 456 return
446 457 b2caps = bundle2.bundle2caps(pushop.remote)
447 458 if 'b2x:pushkey' not in b2caps:
448 459 return
449 460 pushop.stepsdone.add('bookmarks')
450 461 part2book = []
451 462 enc = pushkey.encode
452 463 for book, old, new in pushop.outbookmarks:
453 464 part = bundler.newpart('b2x:pushkey')
454 465 part.addparam('namespace', enc('bookmarks'))
455 466 part.addparam('key', enc(book))
456 467 part.addparam('old', enc(old))
457 468 part.addparam('new', enc(new))
458 469 part2book.append((part.id, book))
459 470 def handlereply(op):
460 471 for partid, book in part2book:
461 472 partrep = op.records.getreplies(partid)
462 473 results = partrep['pushkey']
463 474 assert len(results) <= 1
464 475 if not results:
465 476 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
466 477 else:
467 478 ret = int(results[0]['return'])
468 479 if ret:
469 480 pushop.ui.status(_("updating bookmark %s\n") % book)
470 481 else:
471 482 pushop.ui.warn(_('updating bookmark %s failed!\n') % book)
472 483 return handlereply
473 484
474 485
475 486 def _pushbundle2(pushop):
476 487 """push data to the remote using bundle2
477 488
478 489 The only currently supported type of data is changegroup but this will
479 490 evolve in the future."""
480 491 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
481 492 # create reply capability
482 493 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
483 494 bundler.newpart('b2x:replycaps', data=capsblob)
484 495 replyhandlers = []
485 496 for partgenname in b2partsgenorder:
486 497 partgen = b2partsgenmapping[partgenname]
487 498 ret = partgen(pushop, bundler)
488 499 if callable(ret):
489 500 replyhandlers.append(ret)
490 501 # do not push if nothing to push
491 502 if bundler.nbparts <= 1:
492 503 return
493 504 stream = util.chunkbuffer(bundler.getchunks())
494 505 try:
495 506 reply = pushop.remote.unbundle(stream, ['force'], 'push')
496 507 except error.BundleValueError, exc:
497 508 raise util.Abort('missing support for %s' % exc)
498 509 try:
499 510 op = bundle2.processbundle(pushop.repo, reply)
500 511 except error.BundleValueError, exc:
501 512 raise util.Abort('missing support for %s' % exc)
502 513 for rephand in replyhandlers:
503 514 rephand(op)
504 515
505 516 def _pushchangeset(pushop):
506 517 """Make the actual push of changeset bundle to remote repo"""
507 518 if 'changesets' in pushop.stepsdone:
508 519 return
509 520 pushop.stepsdone.add('changesets')
510 521 if not _pushcheckoutgoing(pushop):
511 522 return
512 523 pushop.repo.prepushoutgoinghooks(pushop.repo,
513 524 pushop.remote,
514 525 pushop.outgoing)
515 526 outgoing = pushop.outgoing
516 527 unbundle = pushop.remote.capable('unbundle')
517 528 # TODO: get bundlecaps from remote
518 529 bundlecaps = None
519 530 # create a changegroup from local
520 531 if pushop.revs is None and not (outgoing.excluded
521 532 or pushop.repo.changelog.filteredrevs):
522 533 # push everything,
523 534 # use the fast path, no race possible on push
524 535 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
525 536 cg = changegroup.getsubset(pushop.repo,
526 537 outgoing,
527 538 bundler,
528 539 'push',
529 540 fastpath=True)
530 541 else:
531 542 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
532 543 bundlecaps)
533 544
534 545 # apply changegroup to remote
535 546 if unbundle:
536 547 # local repo finds heads on server, finds out what
537 548 # revs it must push. once revs transferred, if server
538 549 # finds it has different heads (someone else won
539 550 # commit/push race), server aborts.
540 551 if pushop.force:
541 552 remoteheads = ['force']
542 553 else:
543 554 remoteheads = pushop.remoteheads
544 555 # ssh: return remote's addchangegroup()
545 556 # http: return remote's addchangegroup() or 0 for error
546 557 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
547 558 pushop.repo.url())
548 559 else:
549 560 # we return an integer indicating remote head count
550 561 # change
551 562 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
552 563
553 564 def _pushsyncphase(pushop):
554 565 """synchronise phase information locally and remotely"""
555 566 cheads = pushop.commonheads
556 567 # even when we don't push, exchanging phase data is useful
557 568 remotephases = pushop.remote.listkeys('phases')
558 569 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
559 570 and remotephases # server supports phases
560 571 and pushop.ret is None # nothing was pushed
561 572 and remotephases.get('publishing', False)):
562 573 # When:
563 574 # - this is a subrepo push
564 575 # - and remote support phase
565 576 # - and no changeset was pushed
566 577 # - and remote is publishing
567 578 # We may be in issue 3871 case!
568 579 # We drop the possible phase synchronisation done by
569 580 # courtesy to publish changesets possibly locally draft
570 581 # on the remote.
571 582 remotephases = {'publishing': 'True'}
572 583 if not remotephases: # old server or public only reply from non-publishing
573 584 _localphasemove(pushop, cheads)
574 585 # don't push any phase data as there is nothing to push
575 586 else:
576 587 ana = phases.analyzeremotephases(pushop.repo, cheads,
577 588 remotephases)
578 589 pheads, droots = ana
579 590 ### Apply remote phase on local
580 591 if remotephases.get('publishing', False):
581 592 _localphasemove(pushop, cheads)
582 593 else: # publish = False
583 594 _localphasemove(pushop, pheads)
584 595 _localphasemove(pushop, cheads, phases.draft)
585 596 ### Apply local phase on remote
586 597
587 598 if pushop.ret:
588 599 if 'phases' in pushop.stepsdone:
589 600 # phases already pushed though bundle2
590 601 return
591 602 outdated = pushop.outdatedphases
592 603 else:
593 604 outdated = pushop.fallbackoutdatedphases
594 605
595 606 pushop.stepsdone.add('phases')
596 607
597 608 # filter heads already turned public by the push
598 609 outdated = [c for c in outdated if c.node() not in pheads]
599 610 b2caps = bundle2.bundle2caps(pushop.remote)
600 611 if 'b2x:pushkey' in b2caps:
601 612 # server supports bundle2, let's do a batched push through it
602 613 #
603 614 # This will eventually be unified with the changesets bundle2 push
604 615 bundler = bundle2.bundle20(pushop.ui, b2caps)
605 616 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
606 617 bundler.newpart('b2x:replycaps', data=capsblob)
607 618 part2node = []
608 619 enc = pushkey.encode
609 620 for newremotehead in outdated:
610 621 part = bundler.newpart('b2x:pushkey')
611 622 part.addparam('namespace', enc('phases'))
612 623 part.addparam('key', enc(newremotehead.hex()))
613 624 part.addparam('old', enc(str(phases.draft)))
614 625 part.addparam('new', enc(str(phases.public)))
615 626 part2node.append((part.id, newremotehead))
616 627 stream = util.chunkbuffer(bundler.getchunks())
617 628 try:
618 629 reply = pushop.remote.unbundle(stream, ['force'], 'push')
619 630 op = bundle2.processbundle(pushop.repo, reply)
620 631 except error.BundleValueError, exc:
621 632 raise util.Abort('missing support for %s' % exc)
622 633 for partid, node in part2node:
623 634 partrep = op.records.getreplies(partid)
624 635 results = partrep['pushkey']
625 636 assert len(results) <= 1
626 637 msg = None
627 638 if not results:
628 639 msg = _('server ignored update of %s to public!\n') % node
629 640 elif not int(results[0]['return']):
630 641 msg = _('updating %s to public failed!\n') % node
631 642 if msg is not None:
632 643 pushop.ui.warn(msg)
633 644
634 645 else:
635 646 # fallback to independant pushkey command
636 647 for newremotehead in outdated:
637 648 r = pushop.remote.pushkey('phases',
638 649 newremotehead.hex(),
639 650 str(phases.draft),
640 651 str(phases.public))
641 652 if not r:
642 653 pushop.ui.warn(_('updating %s to public failed!\n')
643 654 % newremotehead)
644 655
645 656 def _localphasemove(pushop, nodes, phase=phases.public):
646 657 """move <nodes> to <phase> in the local source repo"""
647 658 if pushop.locallocked:
648 659 tr = pushop.repo.transaction('push-phase-sync')
649 660 try:
650 661 phases.advanceboundary(pushop.repo, tr, phase, nodes)
651 662 tr.close()
652 663 finally:
653 664 tr.release()
654 665 else:
655 666 # repo is not locked, do not change any phases!
656 667 # Informs the user that phases should have been moved when
657 668 # applicable.
658 669 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
659 670 phasestr = phases.phasenames[phase]
660 671 if actualmoves:
661 672 pushop.ui.status(_('cannot lock source repo, skipping '
662 673 'local %s phase update\n') % phasestr)
663 674
664 675 def _pushobsolete(pushop):
665 676 """utility function to push obsolete markers to a remote"""
666 677 if 'obsmarkers' in pushop.stepsdone:
667 678 return
668 679 pushop.ui.debug('try to push obsolete markers to remote\n')
669 680 repo = pushop.repo
670 681 remote = pushop.remote
671 682 pushop.stepsdone.add('obsmarkers')
672 683 if (pushop.outobsmarkers):
673 684 rslts = []
674 685 remotedata = obsolete._pushkeyescape(pushop.outobsmarkers)
675 686 for key in sorted(remotedata, reverse=True):
676 687 # reverse sort to ensure we end with dump0
677 688 data = remotedata[key]
678 689 rslts.append(remote.pushkey('obsolete', key, '', data))
679 690 if [r for r in rslts if not r]:
680 691 msg = _('failed to push some obsolete markers!\n')
681 692 repo.ui.warn(msg)
682 693
683 694 def _pushbookmark(pushop):
684 695 """Update bookmark position on remote"""
685 696 if pushop.ret == 0 or 'bookmarks' in pushop.stepsdone:
686 697 return
687 698 pushop.stepsdone.add('bookmarks')
688 699 ui = pushop.ui
689 700 remote = pushop.remote
690 701 for b, old, new in pushop.outbookmarks:
691 702 if remote.pushkey('bookmarks', b, old, new):
692 703 ui.status(_("updating bookmark %s\n") % b)
693 704 else:
694 705 ui.warn(_('updating bookmark %s failed!\n') % b)
695 706
696 707 class pulloperation(object):
697 708 """A object that represent a single pull operation
698 709
699 710 It purpose is to carry push related state and very common operation.
700 711
701 712 A new should be created at the beginning of each pull and discarded
702 713 afterward.
703 714 """
704 715
705 716 def __init__(self, repo, remote, heads=None, force=False):
706 717 # repo we pull into
707 718 self.repo = repo
708 719 # repo we pull from
709 720 self.remote = remote
710 721 # revision we try to pull (None is "all")
711 722 self.heads = heads
712 723 # do we force pull?
713 724 self.force = force
714 725 # the name the pull transaction
715 726 self._trname = 'pull\n' + util.hidepassword(remote.url())
716 727 # hold the transaction once created
717 728 self._tr = None
718 729 # set of common changeset between local and remote before pull
719 730 self.common = None
720 731 # set of pulled head
721 732 self.rheads = None
722 733 # list of missing changeset to fetch remotely
723 734 self.fetch = None
724 735 # result of changegroup pulling (used as return code by pull)
725 736 self.cgresult = None
726 737 # list of step remaining todo (related to future bundle2 usage)
727 738 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
728 739
729 740 @util.propertycache
730 741 def pulledsubset(self):
731 742 """heads of the set of changeset target by the pull"""
732 743 # compute target subset
733 744 if self.heads is None:
734 745 # We pulled every thing possible
735 746 # sync on everything common
736 747 c = set(self.common)
737 748 ret = list(self.common)
738 749 for n in self.rheads:
739 750 if n not in c:
740 751 ret.append(n)
741 752 return ret
742 753 else:
743 754 # We pulled a specific subset
744 755 # sync on this subset
745 756 return self.heads
746 757
747 758 def gettransaction(self):
748 759 """get appropriate pull transaction, creating it if needed"""
749 760 if self._tr is None:
750 761 self._tr = self.repo.transaction(self._trname)
751 762 return self._tr
752 763
753 764 def closetransaction(self):
754 765 """close transaction if created"""
755 766 if self._tr is not None:
756 767 self._tr.close()
757 768
758 769 def releasetransaction(self):
759 770 """release transaction if created"""
760 771 if self._tr is not None:
761 772 self._tr.release()
762 773
763 774 def pull(repo, remote, heads=None, force=False):
764 775 pullop = pulloperation(repo, remote, heads, force)
765 776 if pullop.remote.local():
766 777 missing = set(pullop.remote.requirements) - pullop.repo.supported
767 778 if missing:
768 779 msg = _("required features are not"
769 780 " supported in the destination:"
770 781 " %s") % (', '.join(sorted(missing)))
771 782 raise util.Abort(msg)
772 783
773 784 lock = pullop.repo.lock()
774 785 try:
775 786 _pulldiscovery(pullop)
776 787 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
777 788 and pullop.remote.capable('bundle2-exp')):
778 789 _pullbundle2(pullop)
779 790 if 'changegroup' in pullop.todosteps:
780 791 _pullchangeset(pullop)
781 792 if 'phases' in pullop.todosteps:
782 793 _pullphase(pullop)
783 794 if 'obsmarkers' in pullop.todosteps:
784 795 _pullobsolete(pullop)
785 796 pullop.closetransaction()
786 797 finally:
787 798 pullop.releasetransaction()
788 799 lock.release()
789 800
790 801 return pullop.cgresult
791 802
792 803 def _pulldiscovery(pullop):
793 804 """discovery phase for the pull
794 805
795 806 Current handle changeset discovery only, will change handle all discovery
796 807 at some point."""
797 808 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
798 809 pullop.remote,
799 810 heads=pullop.heads,
800 811 force=pullop.force)
801 812 pullop.common, pullop.fetch, pullop.rheads = tmp
802 813
803 814 def _pullbundle2(pullop):
804 815 """pull data using bundle2
805 816
806 817 For now, the only supported data are changegroup."""
807 818 remotecaps = bundle2.bundle2caps(pullop.remote)
808 819 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
809 820 # pulling changegroup
810 821 pullop.todosteps.remove('changegroup')
811 822
812 823 kwargs['common'] = pullop.common
813 824 kwargs['heads'] = pullop.heads or pullop.rheads
814 825 if 'b2x:listkeys' in remotecaps:
815 826 kwargs['listkeys'] = ['phase']
816 827 if not pullop.fetch:
817 828 pullop.repo.ui.status(_("no changes found\n"))
818 829 pullop.cgresult = 0
819 830 else:
820 831 if pullop.heads is None and list(pullop.common) == [nullid]:
821 832 pullop.repo.ui.status(_("requesting all changes\n"))
822 833 _pullbundle2extraprepare(pullop, kwargs)
823 834 if kwargs.keys() == ['format']:
824 835 return # nothing to pull
825 836 bundle = pullop.remote.getbundle('pull', **kwargs)
826 837 try:
827 838 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
828 839 except error.BundleValueError, exc:
829 840 raise util.Abort('missing support for %s' % exc)
830 841
831 842 if pullop.fetch:
832 843 assert len(op.records['changegroup']) == 1
833 844 pullop.cgresult = op.records['changegroup'][0]['return']
834 845
835 846 # processing phases change
836 847 for namespace, value in op.records['listkeys']:
837 848 if namespace == 'phases':
838 849 _pullapplyphases(pullop, value)
839 850
840 851 def _pullbundle2extraprepare(pullop, kwargs):
841 852 """hook function so that extensions can extend the getbundle call"""
842 853 pass
843 854
844 855 def _pullchangeset(pullop):
845 856 """pull changeset from unbundle into the local repo"""
846 857 # We delay the open of the transaction as late as possible so we
847 858 # don't open transaction for nothing or you break future useful
848 859 # rollback call
849 860 pullop.todosteps.remove('changegroup')
850 861 if not pullop.fetch:
851 862 pullop.repo.ui.status(_("no changes found\n"))
852 863 pullop.cgresult = 0
853 864 return
854 865 pullop.gettransaction()
855 866 if pullop.heads is None and list(pullop.common) == [nullid]:
856 867 pullop.repo.ui.status(_("requesting all changes\n"))
857 868 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
858 869 # issue1320, avoid a race if remote changed after discovery
859 870 pullop.heads = pullop.rheads
860 871
861 872 if pullop.remote.capable('getbundle'):
862 873 # TODO: get bundlecaps from remote
863 874 cg = pullop.remote.getbundle('pull', common=pullop.common,
864 875 heads=pullop.heads or pullop.rheads)
865 876 elif pullop.heads is None:
866 877 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
867 878 elif not pullop.remote.capable('changegroupsubset'):
868 879 raise util.Abort(_("partial pull cannot be done because "
869 880 "other repository doesn't support "
870 881 "changegroupsubset."))
871 882 else:
872 883 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
873 884 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
874 885 pullop.remote.url())
875 886
876 887 def _pullphase(pullop):
877 888 # Get remote phases data from remote
878 889 remotephases = pullop.remote.listkeys('phases')
879 890 _pullapplyphases(pullop, remotephases)
880 891
881 892 def _pullapplyphases(pullop, remotephases):
882 893 """apply phase movement from observed remote state"""
883 894 pullop.todosteps.remove('phases')
884 895 publishing = bool(remotephases.get('publishing', False))
885 896 if remotephases and not publishing:
886 897 # remote is new and unpublishing
887 898 pheads, _dr = phases.analyzeremotephases(pullop.repo,
888 899 pullop.pulledsubset,
889 900 remotephases)
890 901 dheads = pullop.pulledsubset
891 902 else:
892 903 # Remote is old or publishing all common changesets
893 904 # should be seen as public
894 905 pheads = pullop.pulledsubset
895 906 dheads = []
896 907 unfi = pullop.repo.unfiltered()
897 908 phase = unfi._phasecache.phase
898 909 rev = unfi.changelog.nodemap.get
899 910 public = phases.public
900 911 draft = phases.draft
901 912
902 913 # exclude changesets already public locally and update the others
903 914 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
904 915 if pheads:
905 916 tr = pullop.gettransaction()
906 917 phases.advanceboundary(pullop.repo, tr, public, pheads)
907 918
908 919 # exclude changesets already draft locally and update the others
909 920 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
910 921 if dheads:
911 922 tr = pullop.gettransaction()
912 923 phases.advanceboundary(pullop.repo, tr, draft, dheads)
913 924
914 925 def _pullobsolete(pullop):
915 926 """utility function to pull obsolete markers from a remote
916 927
917 928 The `gettransaction` is function that return the pull transaction, creating
918 929 one if necessary. We return the transaction to inform the calling code that
919 930 a new transaction have been created (when applicable).
920 931
921 932 Exists mostly to allow overriding for experimentation purpose"""
922 933 pullop.todosteps.remove('obsmarkers')
923 934 tr = None
924 935 if obsolete._enabled:
925 936 pullop.repo.ui.debug('fetching remote obsolete markers\n')
926 937 remoteobs = pullop.remote.listkeys('obsolete')
927 938 if 'dump0' in remoteobs:
928 939 tr = pullop.gettransaction()
929 940 for key in sorted(remoteobs, reverse=True):
930 941 if key.startswith('dump'):
931 942 data = base85.b85decode(remoteobs[key])
932 943 pullop.repo.obsstore.mergemarkers(tr, data)
933 944 pullop.repo.invalidatevolatilesets()
934 945 return tr
935 946
936 947 def caps20to10(repo):
937 948 """return a set with appropriate options to use bundle20 during getbundle"""
938 949 caps = set(['HG2X'])
939 950 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
940 951 caps.add('bundle2=' + urllib.quote(capsblob))
941 952 return caps
942 953
943 954 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
944 955 **kwargs):
945 956 """return a full bundle (with potentially multiple kind of parts)
946 957
947 958 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
948 959 passed. For now, the bundle can contain only changegroup, but this will
949 960 changes when more part type will be available for bundle2.
950 961
951 962 This is different from changegroup.getbundle that only returns an HG10
952 963 changegroup bundle. They may eventually get reunited in the future when we
953 964 have a clearer idea of the API we what to query different data.
954 965
955 966 The implementation is at a very early stage and will get massive rework
956 967 when the API of bundle is refined.
957 968 """
958 969 cg = None
959 970 if kwargs.get('cg', True):
960 971 # build changegroup bundle here.
961 972 cg = changegroup.getbundle(repo, source, heads=heads,
962 973 common=common, bundlecaps=bundlecaps)
963 974 elif 'HG2X' not in bundlecaps:
964 975 raise ValueError(_('request for bundle10 must include changegroup'))
965 976 if bundlecaps is None or 'HG2X' not in bundlecaps:
966 977 if kwargs:
967 978 raise ValueError(_('unsupported getbundle arguments: %s')
968 979 % ', '.join(sorted(kwargs.keys())))
969 980 return cg
970 981 # very crude first implementation,
971 982 # the bundle API will change and the generation will be done lazily.
972 983 b2caps = {}
973 984 for bcaps in bundlecaps:
974 985 if bcaps.startswith('bundle2='):
975 986 blob = urllib.unquote(bcaps[len('bundle2='):])
976 987 b2caps.update(bundle2.decodecaps(blob))
977 988 bundler = bundle2.bundle20(repo.ui, b2caps)
978 989 if cg:
979 990 bundler.newpart('b2x:changegroup', data=cg.getchunks())
980 991 listkeys = kwargs.get('listkeys', ())
981 992 for namespace in listkeys:
982 993 part = bundler.newpart('b2x:listkeys')
983 994 part.addparam('namespace', namespace)
984 995 keys = repo.listkeys(namespace).items()
985 996 part.data = pushkey.encodekeys(keys)
986 997 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
987 998 bundlecaps=bundlecaps, **kwargs)
988 999 return util.chunkbuffer(bundler.getchunks())
989 1000
990 1001 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
991 1002 bundlecaps=None, **kwargs):
992 1003 """hook function to let extensions add parts to the requested bundle"""
993 1004 pass
994 1005
995 1006 def check_heads(repo, their_heads, context):
996 1007 """check if the heads of a repo have been modified
997 1008
998 1009 Used by peer for unbundling.
999 1010 """
1000 1011 heads = repo.heads()
1001 1012 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1002 1013 if not (their_heads == ['force'] or their_heads == heads or
1003 1014 their_heads == ['hashed', heads_hash]):
1004 1015 # someone else committed/pushed/unbundled while we
1005 1016 # were transferring data
1006 1017 raise error.PushRaced('repository changed while %s - '
1007 1018 'please try again' % context)
1008 1019
1009 1020 def unbundle(repo, cg, heads, source, url):
1010 1021 """Apply a bundle to a repo.
1011 1022
1012 1023 this function makes sure the repo is locked during the application and have
1013 1024 mechanism to check that no push race occurred between the creation of the
1014 1025 bundle and its application.
1015 1026
1016 1027 If the push was raced as PushRaced exception is raised."""
1017 1028 r = 0
1018 1029 # need a transaction when processing a bundle2 stream
1019 1030 tr = None
1020 1031 lock = repo.lock()
1021 1032 try:
1022 1033 check_heads(repo, heads, 'uploading changes')
1023 1034 # push can proceed
1024 1035 if util.safehasattr(cg, 'params'):
1025 1036 try:
1026 1037 tr = repo.transaction('unbundle')
1027 1038 tr.hookargs['bundle2-exp'] = '1'
1028 1039 r = bundle2.processbundle(repo, cg, lambda: tr).reply
1029 1040 cl = repo.unfiltered().changelog
1030 1041 p = cl.writepending() and repo.root or ""
1031 1042 repo.hook('b2x-pretransactionclose', throw=True, source=source,
1032 1043 url=url, pending=p, **tr.hookargs)
1033 1044 tr.close()
1034 1045 repo.hook('b2x-transactionclose', source=source, url=url,
1035 1046 **tr.hookargs)
1036 1047 except Exception, exc:
1037 1048 exc.duringunbundle2 = True
1038 1049 raise
1039 1050 else:
1040 1051 r = changegroup.addchangegroup(repo, cg, source, url)
1041 1052 finally:
1042 1053 if tr is not None:
1043 1054 tr.release()
1044 1055 lock.release()
1045 1056 return r
@@ -1,1202 +1,1203
1 1
2 2 $ getmainid() {
3 3 > hg -R main log --template '{node}\n' --rev "$1"
4 4 > }
5 5
6 6 Create an extension to test bundle2 API
7 7
8 8 $ cat > bundle2.py << EOF
9 9 > """A small extension to test bundle2 implementation
10 10 >
11 11 > Current bundle2 implementation is far too limited to be used in any core
12 12 > code. We still need to be able to test it while it grow up.
13 13 > """
14 14 >
15 15 > import sys, os
16 16 > from mercurial import cmdutil
17 17 > from mercurial import util
18 18 > from mercurial import bundle2
19 19 > from mercurial import scmutil
20 20 > from mercurial import discovery
21 21 > from mercurial import changegroup
22 22 > from mercurial import error
23 23 > from mercurial import obsolete
24 24 >
25 25 > obsolete._enabled = True
26 26 >
27 27 > try:
28 28 > import msvcrt
29 29 > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
30 30 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
31 31 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
32 32 > except ImportError:
33 33 > pass
34 34 >
35 35 > cmdtable = {}
36 36 > command = cmdutil.command(cmdtable)
37 37 >
38 38 > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
39 39 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
40 40 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
41 41 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
42 42 >
43 43 > @bundle2.parthandler('test:song')
44 44 > def songhandler(op, part):
45 45 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
46 46 > op.ui.write('The choir starts singing:\n')
47 47 > verses = 0
48 48 > for line in part.read().split('\n'):
49 49 > op.ui.write(' %s\n' % line)
50 50 > verses += 1
51 51 > op.records.add('song', {'verses': verses})
52 52 >
53 53 > @bundle2.parthandler('test:ping')
54 54 > def pinghandler(op, part):
55 55 > op.ui.write('received ping request (id %i)\n' % part.id)
56 56 > if op.reply is not None and 'ping-pong' in op.reply.capabilities:
57 57 > op.ui.write_err('replying to ping request (id %i)\n' % part.id)
58 58 > op.reply.newpart('test:pong', [('in-reply-to', str(part.id))])
59 59 >
60 60 > @bundle2.parthandler('test:debugreply')
61 61 > def debugreply(op, part):
62 62 > """print data about the capacity of the bundle reply"""
63 63 > if op.reply is None:
64 64 > op.ui.write('debugreply: no reply\n')
65 65 > else:
66 66 > op.ui.write('debugreply: capabilities:\n')
67 67 > for cap in sorted(op.reply.capabilities):
68 68 > op.ui.write('debugreply: %r\n' % cap)
69 69 > for val in op.reply.capabilities[cap]:
70 70 > op.ui.write('debugreply: %r\n' % val)
71 71 >
72 72 > @command('bundle2',
73 73 > [('', 'param', [], 'stream level parameter'),
74 74 > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
75 75 > ('', 'unknownparams', False, 'include an unknown part parameters in the bundle'),
76 76 > ('', 'parts', False, 'include some arbitrary parts to the bundle'),
77 77 > ('', 'reply', False, 'produce a reply bundle'),
78 78 > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
79 79 > ('r', 'rev', [], 'includes those changeset in the bundle'),],
80 80 > '[OUTPUTFILE]')
81 81 > def cmdbundle2(ui, repo, path=None, **opts):
82 82 > """write a bundle2 container on standard ouput"""
83 83 > bundler = bundle2.bundle20(ui)
84 84 > for p in opts['param']:
85 85 > p = p.split('=', 1)
86 86 > try:
87 87 > bundler.addparam(*p)
88 88 > except ValueError, exc:
89 89 > raise util.Abort('%s' % exc)
90 90 >
91 91 > if opts['reply']:
92 92 > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
93 93 > bundler.newpart('b2x:replycaps', data=capsstring)
94 94 >
95 95 > if opts['pushrace']:
96 96 > # also serve to test the assignement of data outside of init
97 97 > part = bundler.newpart('b2x:check:heads')
98 98 > part.data = '01234567890123456789'
99 99 >
100 100 > revs = opts['rev']
101 101 > if 'rev' in opts:
102 102 > revs = scmutil.revrange(repo, opts['rev'])
103 103 > if revs:
104 104 > # very crude version of a changegroup part creation
105 105 > bundled = repo.revs('%ld::%ld', revs, revs)
106 106 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
107 107 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
108 108 > outgoing = discovery.outgoing(repo.changelog, headcommon, headmissing)
109 109 > cg = changegroup.getlocalbundle(repo, 'test:bundle2', outgoing, None)
110 110 > bundler.newpart('b2x:changegroup', data=cg.getchunks())
111 111 >
112 112 > if opts['parts']:
113 113 > bundler.newpart('test:empty')
114 114 > # add a second one to make sure we handle multiple parts
115 115 > bundler.newpart('test:empty')
116 116 > bundler.newpart('test:song', data=ELEPHANTSSONG)
117 117 > bundler.newpart('test:debugreply')
118 118 > mathpart = bundler.newpart('test:math')
119 119 > mathpart.addparam('pi', '3.14')
120 120 > mathpart.addparam('e', '2.72')
121 121 > mathpart.addparam('cooking', 'raw', mandatory=False)
122 122 > mathpart.data = '42'
123 123 > # advisory known part with unknown mandatory param
124 124 > bundler.newpart('test:song', [('randomparam','')])
125 125 > if opts['unknown']:
126 126 > bundler.newpart('test:UNKNOWN', data='some random content')
127 127 > if opts['unknownparams']:
128 128 > bundler.newpart('test:SONG', [('randomparams', '')])
129 129 > if opts['parts']:
130 130 > bundler.newpart('test:ping')
131 131 >
132 132 > if path is None:
133 133 > file = sys.stdout
134 134 > else:
135 135 > file = open(path, 'wb')
136 136 >
137 137 > for chunk in bundler.getchunks():
138 138 > file.write(chunk)
139 139 >
140 140 > @command('unbundle2', [], '')
141 141 > def cmdunbundle2(ui, repo, replypath=None):
142 142 > """process a bundle2 stream from stdin on the current repo"""
143 143 > try:
144 144 > tr = None
145 145 > lock = repo.lock()
146 146 > tr = repo.transaction('processbundle')
147 147 > try:
148 148 > unbundler = bundle2.unbundle20(ui, sys.stdin)
149 149 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
150 150 > tr.close()
151 151 > except error.BundleValueError, exc:
152 152 > raise util.Abort('missing support for %s' % exc)
153 153 > except error.PushRaced, exc:
154 154 > raise util.Abort('push race: %s' % exc)
155 155 > finally:
156 156 > if tr is not None:
157 157 > tr.release()
158 158 > lock.release()
159 159 > remains = sys.stdin.read()
160 160 > ui.write('%i unread bytes\n' % len(remains))
161 161 > if op.records['song']:
162 162 > totalverses = sum(r['verses'] for r in op.records['song'])
163 163 > ui.write('%i total verses sung\n' % totalverses)
164 164 > for rec in op.records['changegroup']:
165 165 > ui.write('addchangegroup return: %i\n' % rec['return'])
166 166 > if op.reply is not None and replypath is not None:
167 167 > file = open(replypath, 'wb')
168 168 > for chunk in op.reply.getchunks():
169 169 > file.write(chunk)
170 170 >
171 171 > @command('statbundle2', [], '')
172 172 > def cmdstatbundle2(ui, repo):
173 173 > """print statistic on the bundle2 container read from stdin"""
174 174 > unbundler = bundle2.unbundle20(ui, sys.stdin)
175 175 > try:
176 176 > params = unbundler.params
177 177 > except error.BundleValueError, exc:
178 178 > raise util.Abort('unknown parameters: %s' % exc)
179 179 > ui.write('options count: %i\n' % len(params))
180 180 > for key in sorted(params):
181 181 > ui.write('- %s\n' % key)
182 182 > value = params[key]
183 183 > if value is not None:
184 184 > ui.write(' %s\n' % value)
185 185 > count = 0
186 186 > for p in unbundler.iterparts():
187 187 > count += 1
188 188 > ui.write(' :%s:\n' % p.type)
189 189 > ui.write(' mandatory: %i\n' % len(p.mandatoryparams))
190 190 > ui.write(' advisory: %i\n' % len(p.advisoryparams))
191 191 > ui.write(' payload: %i bytes\n' % len(p.read()))
192 192 > ui.write('parts count: %i\n' % count)
193 193 > EOF
194 194 $ cat >> $HGRCPATH << EOF
195 195 > [extensions]
196 196 > bundle2=$TESTTMP/bundle2.py
197 197 > [experimental]
198 198 > bundle2-exp=True
199 199 > [ui]
200 200 > ssh=python "$TESTDIR/dummyssh"
201 201 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
202 202 > [web]
203 203 > push_ssl = false
204 204 > allow_push = *
205 205 > [phases]
206 206 > publish=False
207 207 > EOF
208 208
209 209 The extension requires a repo (currently unused)
210 210
211 211 $ hg init main
212 212 $ cd main
213 213 $ touch a
214 214 $ hg add a
215 215 $ hg commit -m 'a'
216 216
217 217
218 218 Empty bundle
219 219 =================
220 220
221 221 - no option
222 222 - no parts
223 223
224 224 Test bundling
225 225
226 226 $ hg bundle2
227 227 HG2X\x00\x00\x00\x00 (no-eol) (esc)
228 228
229 229 Test unbundling
230 230
231 231 $ hg bundle2 | hg statbundle2
232 232 options count: 0
233 233 parts count: 0
234 234
235 235 Test old style bundle are detected and refused
236 236
237 237 $ hg bundle --all ../bundle.hg
238 238 1 changesets found
239 239 $ hg statbundle2 < ../bundle.hg
240 240 abort: unknown bundle version 10
241 241 [255]
242 242
243 243 Test parameters
244 244 =================
245 245
246 246 - some options
247 247 - no parts
248 248
249 249 advisory parameters, no value
250 250 -------------------------------
251 251
252 252 Simplest possible parameters form
253 253
254 254 Test generation simple option
255 255
256 256 $ hg bundle2 --param 'caution'
257 257 HG2X\x00\x07caution\x00\x00 (no-eol) (esc)
258 258
259 259 Test unbundling
260 260
261 261 $ hg bundle2 --param 'caution' | hg statbundle2
262 262 options count: 1
263 263 - caution
264 264 parts count: 0
265 265
266 266 Test generation multiple option
267 267
268 268 $ hg bundle2 --param 'caution' --param 'meal'
269 269 HG2X\x00\x0ccaution meal\x00\x00 (no-eol) (esc)
270 270
271 271 Test unbundling
272 272
273 273 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
274 274 options count: 2
275 275 - caution
276 276 - meal
277 277 parts count: 0
278 278
279 279 advisory parameters, with value
280 280 -------------------------------
281 281
282 282 Test generation
283 283
284 284 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants'
285 285 HG2X\x00\x1ccaution meal=vegan elephants\x00\x00 (no-eol) (esc)
286 286
287 287 Test unbundling
288 288
289 289 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
290 290 options count: 3
291 291 - caution
292 292 - elephants
293 293 - meal
294 294 vegan
295 295 parts count: 0
296 296
297 297 parameter with special char in value
298 298 ---------------------------------------------------
299 299
300 300 Test generation
301 301
302 302 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple
303 303 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
304 304
305 305 Test unbundling
306 306
307 307 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
308 308 options count: 2
309 309 - e|! 7/
310 310 babar%#==tutu
311 311 - simple
312 312 parts count: 0
313 313
314 314 Test unknown mandatory option
315 315 ---------------------------------------------------
316 316
317 317 $ hg bundle2 --param 'Gravity' | hg statbundle2
318 318 abort: unknown parameters: Stream Parameter - Gravity
319 319 [255]
320 320
321 321 Test debug output
322 322 ---------------------------------------------------
323 323
324 324 bundling debug
325 325
326 326 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2
327 327 start emission of HG2X stream
328 328 bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
329 329 start of parts
330 330 end of bundle
331 331
332 332 file content is ok
333 333
334 334 $ cat ../out.hg2
335 335 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
336 336
337 337 unbundling debug
338 338
339 339 $ hg statbundle2 --debug < ../out.hg2
340 340 start processing of HG2X stream
341 341 reading bundle2 stream parameters
342 342 ignoring unknown parameter 'e|! 7/'
343 343 ignoring unknown parameter 'simple'
344 344 options count: 2
345 345 - e|! 7/
346 346 babar%#==tutu
347 347 - simple
348 348 start extraction of bundle2 parts
349 349 part header size: 0
350 350 end of bundle2 stream
351 351 parts count: 0
352 352
353 353
354 354 Test buggy input
355 355 ---------------------------------------------------
356 356
357 357 empty parameter name
358 358
359 359 $ hg bundle2 --param '' --quiet
360 360 abort: empty parameter name
361 361 [255]
362 362
363 363 bad parameter name
364 364
365 365 $ hg bundle2 --param 42babar
366 366 abort: non letter first character: '42babar'
367 367 [255]
368 368
369 369
370 370 Test part
371 371 =================
372 372
373 373 $ hg bundle2 --parts ../parts.hg2 --debug
374 374 start emission of HG2X stream
375 375 bundle parameter:
376 376 start of parts
377 377 bundle part: "test:empty"
378 378 bundle part: "test:empty"
379 379 bundle part: "test:song"
380 380 bundle part: "test:debugreply"
381 381 bundle part: "test:math"
382 382 bundle part: "test:song"
383 383 bundle part: "test:ping"
384 384 end of bundle
385 385
386 386 $ cat ../parts.hg2
387 387 HG2X\x00\x00\x00\x11 (esc)
388 388 test:empty\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
389 389 test:empty\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x10 test:song\x00\x00\x00\x02\x00\x00\x00\x00\x00\xb2Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko (esc)
390 390 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
391 391 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.\x00\x00\x00\x00\x00\x16\x0ftest:debugreply\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00+ test:math\x00\x00\x00\x04\x02\x01\x02\x04\x01\x04\x07\x03pi3.14e2.72cookingraw\x00\x00\x00\x0242\x00\x00\x00\x00\x00\x1d test:song\x00\x00\x00\x05\x01\x00\x0b\x00randomparam\x00\x00\x00\x00\x00\x10 test:ping\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
392 392
393 393
394 394 $ hg statbundle2 < ../parts.hg2
395 395 options count: 0
396 396 :test:empty:
397 397 mandatory: 0
398 398 advisory: 0
399 399 payload: 0 bytes
400 400 :test:empty:
401 401 mandatory: 0
402 402 advisory: 0
403 403 payload: 0 bytes
404 404 :test:song:
405 405 mandatory: 0
406 406 advisory: 0
407 407 payload: 178 bytes
408 408 :test:debugreply:
409 409 mandatory: 0
410 410 advisory: 0
411 411 payload: 0 bytes
412 412 :test:math:
413 413 mandatory: 2
414 414 advisory: 1
415 415 payload: 2 bytes
416 416 :test:song:
417 417 mandatory: 1
418 418 advisory: 0
419 419 payload: 0 bytes
420 420 :test:ping:
421 421 mandatory: 0
422 422 advisory: 0
423 423 payload: 0 bytes
424 424 parts count: 7
425 425
426 426 $ hg statbundle2 --debug < ../parts.hg2
427 427 start processing of HG2X stream
428 428 reading bundle2 stream parameters
429 429 options count: 0
430 430 start extraction of bundle2 parts
431 431 part header size: 17
432 432 part type: "test:empty"
433 433 part id: "0"
434 434 part parameters: 0
435 435 :test:empty:
436 436 mandatory: 0
437 437 advisory: 0
438 438 payload chunk size: 0
439 439 payload: 0 bytes
440 440 part header size: 17
441 441 part type: "test:empty"
442 442 part id: "1"
443 443 part parameters: 0
444 444 :test:empty:
445 445 mandatory: 0
446 446 advisory: 0
447 447 payload chunk size: 0
448 448 payload: 0 bytes
449 449 part header size: 16
450 450 part type: "test:song"
451 451 part id: "2"
452 452 part parameters: 0
453 453 :test:song:
454 454 mandatory: 0
455 455 advisory: 0
456 456 payload chunk size: 178
457 457 payload chunk size: 0
458 458 payload: 178 bytes
459 459 part header size: 22
460 460 part type: "test:debugreply"
461 461 part id: "3"
462 462 part parameters: 0
463 463 :test:debugreply:
464 464 mandatory: 0
465 465 advisory: 0
466 466 payload chunk size: 0
467 467 payload: 0 bytes
468 468 part header size: 43
469 469 part type: "test:math"
470 470 part id: "4"
471 471 part parameters: 3
472 472 :test:math:
473 473 mandatory: 2
474 474 advisory: 1
475 475 payload chunk size: 2
476 476 payload chunk size: 0
477 477 payload: 2 bytes
478 478 part header size: 29
479 479 part type: "test:song"
480 480 part id: "5"
481 481 part parameters: 1
482 482 :test:song:
483 483 mandatory: 1
484 484 advisory: 0
485 485 payload chunk size: 0
486 486 payload: 0 bytes
487 487 part header size: 16
488 488 part type: "test:ping"
489 489 part id: "6"
490 490 part parameters: 0
491 491 :test:ping:
492 492 mandatory: 0
493 493 advisory: 0
494 494 payload chunk size: 0
495 495 payload: 0 bytes
496 496 part header size: 0
497 497 end of bundle2 stream
498 498 parts count: 7
499 499
500 500 Test actual unbundling of test part
501 501 =======================================
502 502
503 503 Process the bundle
504 504
505 505 $ hg unbundle2 --debug < ../parts.hg2
506 506 start processing of HG2X stream
507 507 reading bundle2 stream parameters
508 508 start extraction of bundle2 parts
509 509 part header size: 17
510 510 part type: "test:empty"
511 511 part id: "0"
512 512 part parameters: 0
513 513 ignoring unsupported advisory part test:empty
514 514 payload chunk size: 0
515 515 part header size: 17
516 516 part type: "test:empty"
517 517 part id: "1"
518 518 part parameters: 0
519 519 ignoring unsupported advisory part test:empty
520 520 payload chunk size: 0
521 521 part header size: 16
522 522 part type: "test:song"
523 523 part id: "2"
524 524 part parameters: 0
525 525 found a handler for part 'test:song'
526 526 The choir starts singing:
527 527 payload chunk size: 178
528 528 payload chunk size: 0
529 529 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
530 530 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
531 531 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
532 532 part header size: 22
533 533 part type: "test:debugreply"
534 534 part id: "3"
535 535 part parameters: 0
536 536 found a handler for part 'test:debugreply'
537 537 debugreply: no reply
538 538 payload chunk size: 0
539 539 part header size: 43
540 540 part type: "test:math"
541 541 part id: "4"
542 542 part parameters: 3
543 543 ignoring unsupported advisory part test:math
544 544 payload chunk size: 2
545 545 payload chunk size: 0
546 546 part header size: 29
547 547 part type: "test:song"
548 548 part id: "5"
549 549 part parameters: 1
550 550 found a handler for part 'test:song'
551 551 ignoring unsupported advisory part test:song - randomparam
552 552 payload chunk size: 0
553 553 part header size: 16
554 554 part type: "test:ping"
555 555 part id: "6"
556 556 part parameters: 0
557 557 found a handler for part 'test:ping'
558 558 received ping request (id 6)
559 559 payload chunk size: 0
560 560 part header size: 0
561 561 end of bundle2 stream
562 562 0 unread bytes
563 563 3 total verses sung
564 564
565 565 Unbundle with an unknown mandatory part
566 566 (should abort)
567 567
568 568 $ hg bundle2 --parts --unknown ../unknown.hg2
569 569
570 570 $ hg unbundle2 < ../unknown.hg2
571 571 The choir starts singing:
572 572 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
573 573 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
574 574 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
575 575 debugreply: no reply
576 576 0 unread bytes
577 577 abort: missing support for test:unknown
578 578 [255]
579 579
580 580 Unbundle with an unknown mandatory part parameters
581 581 (should abort)
582 582
583 583 $ hg bundle2 --unknownparams ../unknown.hg2
584 584
585 585 $ hg unbundle2 < ../unknown.hg2
586 586 0 unread bytes
587 587 abort: missing support for test:song - randomparams
588 588 [255]
589 589
590 590 unbundle with a reply
591 591
592 592 $ hg bundle2 --parts --reply ../parts-reply.hg2
593 593 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
594 594 0 unread bytes
595 595 3 total verses sung
596 596
597 597 The reply is a bundle
598 598
599 599 $ cat ../reply.hg2
600 600 HG2X\x00\x00\x00\x1f (esc)
601 601 b2x:output\x00\x00\x00\x00\x00\x01\x0b\x01in-reply-to3\x00\x00\x00\xd9The choir starts singing: (esc)
602 602 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
603 603 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
604 604 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
605 605 \x00\x00\x00\x00\x00\x1f (esc)
606 606 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to4\x00\x00\x00\xc9debugreply: capabilities: (esc)
607 607 debugreply: 'city=!'
608 608 debugreply: 'celeste,ville'
609 609 debugreply: 'elephants'
610 610 debugreply: 'babar'
611 611 debugreply: 'celeste'
612 612 debugreply: 'ping-pong'
613 613 \x00\x00\x00\x00\x00\x1e test:pong\x00\x00\x00\x02\x01\x00\x0b\x01in-reply-to7\x00\x00\x00\x00\x00\x1f (esc)
614 614 b2x:output\x00\x00\x00\x03\x00\x01\x0b\x01in-reply-to7\x00\x00\x00=received ping request (id 7) (esc)
615 615 replying to ping request (id 7)
616 616 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
617 617
618 618 The reply is valid
619 619
620 620 $ hg statbundle2 < ../reply.hg2
621 621 options count: 0
622 622 :b2x:output:
623 623 mandatory: 0
624 624 advisory: 1
625 625 payload: 217 bytes
626 626 :b2x:output:
627 627 mandatory: 0
628 628 advisory: 1
629 629 payload: 201 bytes
630 630 :test:pong:
631 631 mandatory: 1
632 632 advisory: 0
633 633 payload: 0 bytes
634 634 :b2x:output:
635 635 mandatory: 0
636 636 advisory: 1
637 637 payload: 61 bytes
638 638 parts count: 4
639 639
640 640 Unbundle the reply to get the output:
641 641
642 642 $ hg unbundle2 < ../reply.hg2
643 643 remote: The choir starts singing:
644 644 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
645 645 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
646 646 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
647 647 remote: debugreply: capabilities:
648 648 remote: debugreply: 'city=!'
649 649 remote: debugreply: 'celeste,ville'
650 650 remote: debugreply: 'elephants'
651 651 remote: debugreply: 'babar'
652 652 remote: debugreply: 'celeste'
653 653 remote: debugreply: 'ping-pong'
654 654 remote: received ping request (id 7)
655 655 remote: replying to ping request (id 7)
656 656 0 unread bytes
657 657
658 658 Test push race detection
659 659
660 660 $ hg bundle2 --pushrace ../part-race.hg2
661 661
662 662 $ hg unbundle2 < ../part-race.hg2
663 663 0 unread bytes
664 664 abort: push race: repository changed while pushing - please try again
665 665 [255]
666 666
667 667 Support for changegroup
668 668 ===================================
669 669
670 670 $ hg unbundle $TESTDIR/bundles/rebase.hg
671 671 adding changesets
672 672 adding manifests
673 673 adding file changes
674 674 added 8 changesets with 7 changes to 7 files (+3 heads)
675 675 (run 'hg heads' to see heads, 'hg merge' to merge)
676 676
677 677 $ hg log -G
678 678 o 8:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
679 679 |
680 680 | o 7:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
681 681 |/|
682 682 o | 6:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
683 683 | |
684 684 | o 5:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
685 685 |/
686 686 | o 4:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
687 687 | |
688 688 | o 3:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
689 689 | |
690 690 | o 2:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
691 691 |/
692 692 o 1:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
693 693
694 694 @ 0:3903775176ed draft test a
695 695
696 696
697 697 $ hg bundle2 --debug --rev '8+7+5+4' ../rev.hg2
698 698 4 changesets found
699 699 list of changesets:
700 700 32af7686d403cf45b5d95f2d70cebea587ac806a
701 701 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
702 702 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
703 703 02de42196ebee42ef284b6780a87cdc96e8eaab6
704 704 start emission of HG2X stream
705 705 bundle parameter:
706 706 start of parts
707 707 bundle part: "b2x:changegroup"
708 708 bundling: 1/4 changesets (25.00%)
709 709 bundling: 2/4 changesets (50.00%)
710 710 bundling: 3/4 changesets (75.00%)
711 711 bundling: 4/4 changesets (100.00%)
712 712 bundling: 1/4 manifests (25.00%)
713 713 bundling: 2/4 manifests (50.00%)
714 714 bundling: 3/4 manifests (75.00%)
715 715 bundling: 4/4 manifests (100.00%)
716 716 bundling: D 1/3 files (33.33%)
717 717 bundling: E 2/3 files (66.67%)
718 718 bundling: H 3/3 files (100.00%)
719 719 end of bundle
720 720
721 721 $ cat ../rev.hg2
722 722 HG2X\x00\x00\x00\x16\x0fb2x:changegroup\x00\x00\x00\x00\x00\x00\x00\x00\x06\x13\x00\x00\x00\xa42\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j_\xdd\xd9\x89W\xc8\xa5JMCm\xfe\x1d\xa9\xd8\x7f!\xa1\xb9{\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c (esc)
723 723 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02D (esc)
724 724 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01D\x00\x00\x00\xa4\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xcd\x01\x0b\x8c\xd9\x98\xf3\x98\x1aZ\x81\x15\xf9O\x8d\xa4\xabP`\x89\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)4dece9c826f69490507b98c6383a3009b295837d (esc)
725 725 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02E (esc)
726 726 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01E\x00\x00\x00\xa2\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)365b93d57fdf4814e2b5911d6bacff2b12014441 (esc)
727 727 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x00\x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01G\x00\x00\x00\xa4\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
728 728 \x87\xcd\xc9n\x8e\xaa\xb6$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
729 729 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)8bee48edc7318541fc0013ee41b089276a8c24bf (esc)
730 730 \x00\x00\x00f\x00\x00\x00f\x00\x00\x00\x02H (esc)
731 731 \x00\x00\x00g\x00\x00\x00h\x00\x00\x00\x01H\x00\x00\x00\x00\x00\x00\x00\x8bn\x1fLG\xec\xb53\xff\xd0\xc8\xe5,\xdc\x88\xaf\xb6\xcd9\xe2\x0cf\xa5\xa0\x18\x17\xfd\xf5#\x9c'8\x02\xb5\xb7a\x8d\x05\x1c\x89\xe4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+D\x00c3f1ca2924c16a19b0656a84900e504e5b0aec2d (esc)
732 732 \x00\x00\x00\x8bM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\x00}\x8c\x9d\x88\x84\x13%\xf5\xc6\xb0cq\xb3[N\x8a+\x1a\x83\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00+\x00\x00\x00\xac\x00\x00\x00+E\x009c6fd0350a6c0d0c49d4a9c5017cf07043f54e58 (esc)
733 733 \x00\x00\x00\x8b6[\x93\xd5\x7f\xdfH\x14\xe2\xb5\x91\x1dk\xac\xff+\x12\x01DA(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xceM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00V\x00\x00\x00V\x00\x00\x00+F\x0022bfcfd62a21a3287edbd4d656218d0f525ed76a (esc)
734 734 \x00\x00\x00\x97\x8b\xeeH\xed\xc71\x85A\xfc\x00\x13\xeeA\xb0\x89'j\x8c$\xbf(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
735 735 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00+\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+H\x008500189e74a9e0475e822093bc7db0d631aeb0b4 (esc)
736 736 \x00\x00\x00\x00\x00\x00\x00\x05D\x00\x00\x00b\xc3\xf1\xca)$\xc1j\x19\xb0ej\x84\x90\x0ePN[ (esc)
737 737 \xec-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02D (esc)
738 738 \x00\x00\x00\x00\x00\x00\x00\x05E\x00\x00\x00b\x9co\xd05 (esc)
739 739 l\r (no-eol) (esc)
740 740 \x0cI\xd4\xa9\xc5\x01|\xf0pC\xf5NX\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02E (esc)
741 741 \x00\x00\x00\x00\x00\x00\x00\x05H\x00\x00\x00b\x85\x00\x18\x9et\xa9\xe0G^\x82 \x93\xbc}\xb0\xd61\xae\xb0\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
742 742 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02H (esc)
743 743 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
744 744
745 745 $ hg unbundle2 < ../rev.hg2
746 746 adding changesets
747 747 adding manifests
748 748 adding file changes
749 749 added 0 changesets with 0 changes to 3 files
750 750 0 unread bytes
751 751 addchangegroup return: 1
752 752
753 753 with reply
754 754
755 755 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
756 756 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
757 757 0 unread bytes
758 758 addchangegroup return: 1
759 759
760 760 $ cat ../rev-reply.hg2
761 761 HG2X\x00\x00\x003\x15b2x:reply:changegroup\x00\x00\x00\x00\x00\x02\x0b\x01\x06\x01in-reply-to1return1\x00\x00\x00\x00\x00\x1f (esc)
762 762 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to1\x00\x00\x00dadding changesets (esc)
763 763 adding manifests
764 764 adding file changes
765 765 added 0 changesets with 0 changes to 3 files
766 766 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
767 767
768 768 $ cd ..
769 769
770 770 Real world exchange
771 771 =====================
772 772
773 773 Add more obsolescence information
774 774
775 775 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
776 776 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
777 777
778 778 clone --pull
779 779
780 780 $ hg -R main phase --public cd010b8cd998
781 781 $ hg clone main other --pull --rev 9520eea781bc
782 782 adding changesets
783 783 adding manifests
784 784 adding file changes
785 785 added 2 changesets with 2 changes to 2 files
786 786 updating to branch default
787 787 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
788 788 $ hg -R other log -G
789 789 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
790 790 |
791 791 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
792 792
793 793 $ hg -R other debugobsolete
794 794 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
795 795 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
796 796
797 797 pull
798 798
799 799 $ hg -R main phase --public 9520eea781bc
800 800 $ hg -R other pull -r 24b6387c8c8c
801 801 pulling from $TESTTMP/main (glob)
802 802 searching for changes
803 803 adding changesets
804 804 adding manifests
805 805 adding file changes
806 806 added 1 changesets with 1 changes to 1 files (+1 heads)
807 807 (run 'hg heads' to see heads, 'hg merge' to merge)
808 808 $ hg -R other log -G
809 809 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
810 810 |
811 811 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
812 812 |/
813 813 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
814 814
815 815 $ hg -R other debugobsolete
816 816 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
817 817 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
818 818
819 819 pull empty (with phase movement)
820 820
821 821 $ hg -R main phase --public 24b6387c8c8c
822 822 $ hg -R other pull -r 24b6387c8c8c
823 823 pulling from $TESTTMP/main (glob)
824 824 no changes found
825 825 $ hg -R other log -G
826 826 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
827 827 |
828 828 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
829 829 |/
830 830 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
831 831
832 832 $ hg -R other debugobsolete
833 833 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
834 834 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
835 835
836 836 pull empty
837 837
838 838 $ hg -R other pull -r 24b6387c8c8c
839 839 pulling from $TESTTMP/main (glob)
840 840 no changes found
841 841 $ hg -R other log -G
842 842 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
843 843 |
844 844 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
845 845 |/
846 846 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
847 847
848 848 $ hg -R other debugobsolete
849 849 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
850 850 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
851 851
852 852 add extra data to test their exchange during push
853 853
854 854 $ hg -R main bookmark --rev eea13746799a book_eea1
855 855 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
856 856 $ hg -R main bookmark --rev 02de42196ebe book_02de
857 857 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
858 858 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
859 859 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
860 860 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
861 861 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
862 862 $ hg -R main bookmark --rev 32af7686d403 book_32af
863 863 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
864 864
865 865 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
866 866 $ hg -R other bookmark --rev cd010b8cd998 book_02de
867 867 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
868 868 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
869 869 $ hg -R other bookmark --rev cd010b8cd998 book_32af
870 870
871 871 $ hg -R main phase --public eea13746799a
872 872
873 873 push
874 874 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
875 875 pushing to other
876 876 searching for changes
877 877 remote: adding changesets
878 878 remote: adding manifests
879 879 remote: adding file changes
880 880 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
881 remote: 5 new obsolescence markers
881 882 updating bookmark book_eea1
882 883 exporting bookmark book_eea1
883 884 $ hg -R other log -G
884 885 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
885 886 |\
886 887 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
887 888 | |
888 889 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
889 890 |/
890 891 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
891 892
892 893 $ hg -R other debugobsolete
893 894 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
894 895 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
895 896 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
896 897 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
897 898 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
898 899 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
899 900 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
900 901
901 902 pull over ssh
902 903
903 904 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
904 905 pulling from ssh://user@dummy/main
905 906 searching for changes
906 907 adding changesets
907 908 adding manifests
908 909 adding file changes
909 910 added 1 changesets with 1 changes to 1 files (+1 heads)
910 911 updating bookmark book_02de
911 912 (run 'hg heads' to see heads, 'hg merge' to merge)
912 913 importing bookmark book_02de
913 914 $ hg -R other debugobsolete
914 915 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
915 916 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
916 917 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
917 918 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
918 919 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
919 920 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
920 921 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
921 922
922 923 pull over http
923 924
924 925 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
925 926 $ cat main.pid >> $DAEMON_PIDS
926 927
927 928 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
928 929 pulling from http://localhost:$HGPORT/
929 930 searching for changes
930 931 adding changesets
931 932 adding manifests
932 933 adding file changes
933 934 added 1 changesets with 1 changes to 1 files (+1 heads)
934 935 updating bookmark book_42cc
935 936 (run 'hg heads .' to see heads, 'hg merge' to merge)
936 937 importing bookmark book_42cc
937 938 $ cat main-error.log
938 939 $ hg -R other debugobsolete
939 940 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
940 941 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
941 942 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
942 943 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
943 944 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
944 945 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
945 946 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
946 947
947 948 push over ssh
948 949
949 950 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
950 951 pushing to ssh://user@dummy/other
951 952 searching for changes
952 953 remote: adding changesets
953 954 remote: adding manifests
954 955 remote: adding file changes
955 956 remote: added 1 changesets with 1 changes to 1 files
956 957 updating bookmark book_5fdd
957 958 exporting bookmark book_5fdd
958 959 $ hg -R other log -G
959 960 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
960 961 |
961 962 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
962 963 |
963 964 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
964 965 | |
965 966 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
966 967 | |/|
967 968 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
968 969 |/ /
969 970 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
970 971 |/
971 972 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
972 973
973 974 $ hg -R other debugobsolete
974 975 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
975 976 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
976 977 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
977 978 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
978 979 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
979 980 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
980 981 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
981 982
982 983 push over http
983 984
984 985 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
985 986 $ cat other.pid >> $DAEMON_PIDS
986 987
987 988 $ hg -R main phase --public 32af7686d403
988 989 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
989 990 pushing to http://localhost:$HGPORT2/
990 991 searching for changes
991 992 remote: adding changesets
992 993 remote: adding manifests
993 994 remote: adding file changes
994 995 remote: added 1 changesets with 1 changes to 1 files
995 996 updating bookmark book_32af
996 997 exporting bookmark book_32af
997 998 $ cat other-error.log
998 999
999 1000 Check final content.
1000 1001
1001 1002 $ hg -R other log -G
1002 1003 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
1003 1004 |
1004 1005 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
1005 1006 |
1006 1007 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
1007 1008 |
1008 1009 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
1009 1010 | |
1010 1011 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
1011 1012 | |/|
1012 1013 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
1013 1014 |/ /
1014 1015 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
1015 1016 |/
1016 1017 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
1017 1018
1018 1019 $ hg -R other debugobsolete
1019 1020 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1020 1021 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1021 1022 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1022 1023 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1023 1024 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1024 1025 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1025 1026 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1026 1027
1027 1028 Error Handling
1028 1029 ==============
1029 1030
1030 1031 Check that errors are properly returned to the client during push.
1031 1032
1032 1033 Setting up
1033 1034
1034 1035 $ cat > failpush.py << EOF
1035 1036 > """A small extension that makes push fails when using bundle2
1036 1037 >
1037 1038 > used to test error handling in bundle2
1038 1039 > """
1039 1040 >
1040 1041 > from mercurial import util
1041 1042 > from mercurial import bundle2
1042 1043 > from mercurial import exchange
1043 1044 > from mercurial import extensions
1044 1045 >
1045 1046 > def _pushbundle2failpart(pushop, bundler):
1046 1047 > reason = pushop.ui.config('failpush', 'reason', None)
1047 1048 > part = None
1048 1049 > if reason == 'abort':
1049 1050 > bundler.newpart('test:abort')
1050 1051 > if reason == 'unknown':
1051 1052 > bundler.newpart('TEST:UNKNOWN')
1052 1053 > if reason == 'race':
1053 1054 > # 20 Bytes of crap
1054 1055 > bundler.newpart('b2x:check:heads', data='01234567890123456789')
1055 1056 >
1056 1057 > @bundle2.parthandler("test:abort")
1057 1058 > def handleabort(op, part):
1058 1059 > raise util.Abort('Abandon ship!', hint="don't panic")
1059 1060 >
1060 1061 > def uisetup(ui):
1061 1062 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
1062 1063 > exchange.b2partsgenorder.insert(0, 'failpart')
1063 1064 >
1064 1065 > EOF
1065 1066
1066 1067 $ cd main
1067 1068 $ hg up tip
1068 1069 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
1069 1070 $ echo 'I' > I
1070 1071 $ hg add I
1071 1072 $ hg ci -m 'I'
1072 1073 $ hg id
1073 1074 e7ec4e813ba6 tip
1074 1075 $ cd ..
1075 1076
1076 1077 $ cat << EOF >> $HGRCPATH
1077 1078 > [extensions]
1078 1079 > failpush=$TESTTMP/failpush.py
1079 1080 > EOF
1080 1081
1081 1082 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
1082 1083 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
1083 1084 $ cat other.pid >> $DAEMON_PIDS
1084 1085
1085 1086 Doing the actual push: Abort error
1086 1087
1087 1088 $ cat << EOF >> $HGRCPATH
1088 1089 > [failpush]
1089 1090 > reason = abort
1090 1091 > EOF
1091 1092
1092 1093 $ hg -R main push other -r e7ec4e813ba6
1093 1094 pushing to other
1094 1095 searching for changes
1095 1096 abort: Abandon ship!
1096 1097 (don't panic)
1097 1098 [255]
1098 1099
1099 1100 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1100 1101 pushing to ssh://user@dummy/other
1101 1102 searching for changes
1102 1103 abort: Abandon ship!
1103 1104 (don't panic)
1104 1105 [255]
1105 1106
1106 1107 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1107 1108 pushing to http://localhost:$HGPORT2/
1108 1109 searching for changes
1109 1110 abort: Abandon ship!
1110 1111 (don't panic)
1111 1112 [255]
1112 1113
1113 1114
1114 1115 Doing the actual push: unknown mandatory parts
1115 1116
1116 1117 $ cat << EOF >> $HGRCPATH
1117 1118 > [failpush]
1118 1119 > reason = unknown
1119 1120 > EOF
1120 1121
1121 1122 $ hg -R main push other -r e7ec4e813ba6
1122 1123 pushing to other
1123 1124 searching for changes
1124 1125 abort: missing support for test:unknown
1125 1126 [255]
1126 1127
1127 1128 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1128 1129 pushing to ssh://user@dummy/other
1129 1130 searching for changes
1130 1131 abort: missing support for test:unknown
1131 1132 [255]
1132 1133
1133 1134 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1134 1135 pushing to http://localhost:$HGPORT2/
1135 1136 searching for changes
1136 1137 abort: missing support for test:unknown
1137 1138 [255]
1138 1139
1139 1140 Doing the actual push: race
1140 1141
1141 1142 $ cat << EOF >> $HGRCPATH
1142 1143 > [failpush]
1143 1144 > reason = race
1144 1145 > EOF
1145 1146
1146 1147 $ hg -R main push other -r e7ec4e813ba6
1147 1148 pushing to other
1148 1149 searching for changes
1149 1150 abort: push failed:
1150 1151 'repository changed while pushing - please try again'
1151 1152 [255]
1152 1153
1153 1154 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1154 1155 pushing to ssh://user@dummy/other
1155 1156 searching for changes
1156 1157 abort: push failed:
1157 1158 'repository changed while pushing - please try again'
1158 1159 [255]
1159 1160
1160 1161 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1161 1162 pushing to http://localhost:$HGPORT2/
1162 1163 searching for changes
1163 1164 abort: push failed:
1164 1165 'repository changed while pushing - please try again'
1165 1166 [255]
1166 1167
1167 1168 Doing the actual push: hook abort
1168 1169
1169 1170 $ cat << EOF >> $HGRCPATH
1170 1171 > [failpush]
1171 1172 > reason =
1172 1173 > [hooks]
1173 1174 > b2x-pretransactionclose.failpush = false
1174 1175 > EOF
1175 1176
1176 1177 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
1177 1178 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
1178 1179 $ cat other.pid >> $DAEMON_PIDS
1179 1180
1180 1181 $ hg -R main push other -r e7ec4e813ba6
1181 1182 pushing to other
1182 1183 searching for changes
1183 1184 transaction abort!
1184 1185 rollback completed
1185 1186 abort: b2x-pretransactionclose.failpush hook exited with status 1
1186 1187 [255]
1187 1188
1188 1189 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1189 1190 pushing to ssh://user@dummy/other
1190 1191 searching for changes
1191 1192 abort: b2x-pretransactionclose.failpush hook exited with status 1
1192 1193 remote: transaction abort!
1193 1194 remote: rollback completed
1194 1195 [255]
1195 1196
1196 1197 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1197 1198 pushing to http://localhost:$HGPORT2/
1198 1199 searching for changes
1199 1200 abort: b2x-pretransactionclose.failpush hook exited with status 1
1200 1201 [255]
1201 1202
1202 1203
General Comments 0
You need to be logged in to leave comments. Login now