##// END OF EJS Templates
bundle2: only use callable return as reply handler...
Pierre-Yves David -
r21941:dab31290 stable
parent child Browse files
Show More
@@ -1,811 +1,812
1 1 # exchange.py - utility to exchange data between repos.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 from node import hex, nullid
10 10 import errno, urllib
11 11 import util, scmutil, changegroup, base85, error
12 12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
13 13
14 14 def readbundle(ui, fh, fname, vfs=None):
15 15 header = changegroup.readexactly(fh, 4)
16 16
17 17 alg = None
18 18 if not fname:
19 19 fname = "stream"
20 20 if not header.startswith('HG') and header.startswith('\0'):
21 21 fh = changegroup.headerlessfixup(fh, header)
22 22 header = "HG10"
23 23 alg = 'UN'
24 24 elif vfs:
25 25 fname = vfs.join(fname)
26 26
27 27 magic, version = header[0:2], header[2:4]
28 28
29 29 if magic != 'HG':
30 30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 31 if version == '10':
32 32 if alg is None:
33 33 alg = changegroup.readexactly(fh, 2)
34 34 return changegroup.unbundle10(fh, alg)
35 35 elif version == '2X':
36 36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 37 else:
38 38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39 39
40 40
41 41 class pushoperation(object):
42 42 """A object that represent a single push operation
43 43
44 44 It purpose is to carry push related state and very common operation.
45 45
46 46 A new should be created at the beginning of each push and discarded
47 47 afterward.
48 48 """
49 49
50 50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
51 51 # repo we push from
52 52 self.repo = repo
53 53 self.ui = repo.ui
54 54 # repo we push to
55 55 self.remote = remote
56 56 # force option provided
57 57 self.force = force
58 58 # revs to be pushed (None is "all")
59 59 self.revs = revs
60 60 # allow push of new branch
61 61 self.newbranch = newbranch
62 62 # did a local lock get acquired?
63 63 self.locallocked = None
64 64 # step already performed
65 65 # (used to check what steps have been already performed through bundle2)
66 66 self.stepsdone = set()
67 67 # Integer version of the push result
68 68 # - None means nothing to push
69 69 # - 0 means HTTP error
70 70 # - 1 means we pushed and remote head count is unchanged *or*
71 71 # we have outgoing changesets but refused to push
72 72 # - other values as described by addchangegroup()
73 73 self.ret = None
74 74 # discover.outgoing object (contains common and outgoing data)
75 75 self.outgoing = None
76 76 # all remote heads before the push
77 77 self.remoteheads = None
78 78 # testable as a boolean indicating if any nodes are missing locally.
79 79 self.incoming = None
80 80 # set of all heads common after changeset bundle push
81 81 self.commonheads = None
82 82
83 83 def push(repo, remote, force=False, revs=None, newbranch=False):
84 84 '''Push outgoing changesets (limited by revs) from a local
85 85 repository to remote. Return an integer:
86 86 - None means nothing to push
87 87 - 0 means HTTP error
88 88 - 1 means we pushed and remote head count is unchanged *or*
89 89 we have outgoing changesets but refused to push
90 90 - other values as described by addchangegroup()
91 91 '''
92 92 pushop = pushoperation(repo, remote, force, revs, newbranch)
93 93 if pushop.remote.local():
94 94 missing = (set(pushop.repo.requirements)
95 95 - pushop.remote.local().supported)
96 96 if missing:
97 97 msg = _("required features are not"
98 98 " supported in the destination:"
99 99 " %s") % (', '.join(sorted(missing)))
100 100 raise util.Abort(msg)
101 101
102 102 # there are two ways to push to remote repo:
103 103 #
104 104 # addchangegroup assumes local user can lock remote
105 105 # repo (local filesystem, old ssh servers).
106 106 #
107 107 # unbundle assumes local user cannot lock remote repo (new ssh
108 108 # servers, http servers).
109 109
110 110 if not pushop.remote.canpush():
111 111 raise util.Abort(_("destination does not support push"))
112 112 # get local lock as we might write phase data
113 113 locallock = None
114 114 try:
115 115 locallock = pushop.repo.lock()
116 116 pushop.locallocked = True
117 117 except IOError, err:
118 118 pushop.locallocked = False
119 119 if err.errno != errno.EACCES:
120 120 raise
121 121 # source repo cannot be locked.
122 122 # We do not abort the push, but just disable the local phase
123 123 # synchronisation.
124 124 msg = 'cannot lock source repository: %s\n' % err
125 125 pushop.ui.debug(msg)
126 126 try:
127 127 pushop.repo.checkpush(pushop)
128 128 lock = None
129 129 unbundle = pushop.remote.capable('unbundle')
130 130 if not unbundle:
131 131 lock = pushop.remote.lock()
132 132 try:
133 133 _pushdiscovery(pushop)
134 134 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
135 135 False)
136 136 and pushop.remote.capable('bundle2-exp')):
137 137 _pushbundle2(pushop)
138 138 _pushchangeset(pushop)
139 139 _pushcomputecommonheads(pushop)
140 140 _pushsyncphase(pushop)
141 141 _pushobsolete(pushop)
142 142 finally:
143 143 if lock is not None:
144 144 lock.release()
145 145 finally:
146 146 if locallock is not None:
147 147 locallock.release()
148 148
149 149 _pushbookmark(pushop)
150 150 return pushop.ret
151 151
152 152 def _pushdiscovery(pushop):
153 153 # discovery
154 154 unfi = pushop.repo.unfiltered()
155 155 fci = discovery.findcommonincoming
156 156 commoninc = fci(unfi, pushop.remote, force=pushop.force)
157 157 common, inc, remoteheads = commoninc
158 158 fco = discovery.findcommonoutgoing
159 159 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
160 160 commoninc=commoninc, force=pushop.force)
161 161 pushop.outgoing = outgoing
162 162 pushop.remoteheads = remoteheads
163 163 pushop.incoming = inc
164 164
165 165 def _pushcheckoutgoing(pushop):
166 166 outgoing = pushop.outgoing
167 167 unfi = pushop.repo.unfiltered()
168 168 if not outgoing.missing:
169 169 # nothing to push
170 170 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
171 171 return False
172 172 # something to push
173 173 if not pushop.force:
174 174 # if repo.obsstore == False --> no obsolete
175 175 # then, save the iteration
176 176 if unfi.obsstore:
177 177 # this message are here for 80 char limit reason
178 178 mso = _("push includes obsolete changeset: %s!")
179 179 mst = "push includes %s changeset: %s!"
180 180 # plain versions for i18n tool to detect them
181 181 _("push includes unstable changeset: %s!")
182 182 _("push includes bumped changeset: %s!")
183 183 _("push includes divergent changeset: %s!")
184 184 # If we are to push if there is at least one
185 185 # obsolete or unstable changeset in missing, at
186 186 # least one of the missinghead will be obsolete or
187 187 # unstable. So checking heads only is ok
188 188 for node in outgoing.missingheads:
189 189 ctx = unfi[node]
190 190 if ctx.obsolete():
191 191 raise util.Abort(mso % ctx)
192 192 elif ctx.troubled():
193 193 raise util.Abort(_(mst)
194 194 % (ctx.troubles()[0],
195 195 ctx))
196 196 newbm = pushop.ui.configlist('bookmarks', 'pushing')
197 197 discovery.checkheads(unfi, pushop.remote, outgoing,
198 198 pushop.remoteheads,
199 199 pushop.newbranch,
200 200 bool(pushop.incoming),
201 201 newbm)
202 202 return True
203 203
204 204 def _pushb2ctx(pushop, bundler):
205 205 """handle changegroup push through bundle2
206 206
207 207 addchangegroup result is stored in the ``pushop.ret`` attribute.
208 208 """
209 209 if 'changesets' in pushop.stepsdone:
210 210 return
211 211 pushop.stepsdone.add('changesets')
212 212 # Send known heads to the server for race detection.
213 213 pushop.stepsdone.add('changesets')
214 214 if not _pushcheckoutgoing(pushop):
215 215 return
216 216 pushop.repo.prepushoutgoinghooks(pushop.repo,
217 217 pushop.remote,
218 218 pushop.outgoing)
219 219 if not pushop.force:
220 220 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
221 221 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
222 222 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
223 223 def handlereply(op):
224 224 """extract addchangroup returns from server reply"""
225 225 cgreplies = op.records.getreplies(cgpart.id)
226 226 assert len(cgreplies['changegroup']) == 1
227 227 pushop.ret = cgreplies['changegroup'][0]['return']
228 228 return handlereply
229 229
230 230 # list of function that may decide to add parts to an outgoing bundle2
231 231 bundle2partsgenerators = [_pushb2ctx]
232 232
233 233 def _pushbundle2(pushop):
234 234 """push data to the remote using bundle2
235 235
236 236 The only currently supported type of data is changegroup but this will
237 237 evolve in the future."""
238 238 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
239 239 # create reply capability
240 240 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
241 241 bundler.newpart('b2x:replycaps', data=capsblob)
242 242 replyhandlers = []
243 243 for partgen in bundle2partsgenerators:
244 244 ret = partgen(pushop, bundler)
245 replyhandlers.append(ret)
245 if callable(ret):
246 replyhandlers.append(ret)
246 247 # do not push if nothing to push
247 248 if bundler.nbparts <= 1:
248 249 return
249 250 stream = util.chunkbuffer(bundler.getchunks())
250 251 try:
251 252 reply = pushop.remote.unbundle(stream, ['force'], 'push')
252 253 except error.BundleValueError, exc:
253 254 raise util.Abort('missing support for %s' % exc)
254 255 try:
255 256 op = bundle2.processbundle(pushop.repo, reply)
256 257 except error.BundleValueError, exc:
257 258 raise util.Abort('missing support for %s' % exc)
258 259 for rephand in replyhandlers:
259 260 rephand(op)
260 261
261 262 def _pushchangeset(pushop):
262 263 """Make the actual push of changeset bundle to remote repo"""
263 264 if 'changesets' in pushop.stepsdone:
264 265 return
265 266 pushop.stepsdone.add('changesets')
266 267 if not _pushcheckoutgoing(pushop):
267 268 return
268 269 pushop.repo.prepushoutgoinghooks(pushop.repo,
269 270 pushop.remote,
270 271 pushop.outgoing)
271 272 outgoing = pushop.outgoing
272 273 unbundle = pushop.remote.capable('unbundle')
273 274 # TODO: get bundlecaps from remote
274 275 bundlecaps = None
275 276 # create a changegroup from local
276 277 if pushop.revs is None and not (outgoing.excluded
277 278 or pushop.repo.changelog.filteredrevs):
278 279 # push everything,
279 280 # use the fast path, no race possible on push
280 281 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
281 282 cg = changegroup.getsubset(pushop.repo,
282 283 outgoing,
283 284 bundler,
284 285 'push',
285 286 fastpath=True)
286 287 else:
287 288 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
288 289 bundlecaps)
289 290
290 291 # apply changegroup to remote
291 292 if unbundle:
292 293 # local repo finds heads on server, finds out what
293 294 # revs it must push. once revs transferred, if server
294 295 # finds it has different heads (someone else won
295 296 # commit/push race), server aborts.
296 297 if pushop.force:
297 298 remoteheads = ['force']
298 299 else:
299 300 remoteheads = pushop.remoteheads
300 301 # ssh: return remote's addchangegroup()
301 302 # http: return remote's addchangegroup() or 0 for error
302 303 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
303 304 pushop.repo.url())
304 305 else:
305 306 # we return an integer indicating remote head count
306 307 # change
307 308 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
308 309
309 310 def _pushcomputecommonheads(pushop):
310 311 unfi = pushop.repo.unfiltered()
311 312 if pushop.ret:
312 313 # push succeed, synchronize target of the push
313 314 cheads = pushop.outgoing.missingheads
314 315 elif pushop.revs is None:
315 316 # All out push fails. synchronize all common
316 317 cheads = pushop.outgoing.commonheads
317 318 else:
318 319 # I want cheads = heads(::missingheads and ::commonheads)
319 320 # (missingheads is revs with secret changeset filtered out)
320 321 #
321 322 # This can be expressed as:
322 323 # cheads = ( (missingheads and ::commonheads)
323 324 # + (commonheads and ::missingheads))"
324 325 # )
325 326 #
326 327 # while trying to push we already computed the following:
327 328 # common = (::commonheads)
328 329 # missing = ((commonheads::missingheads) - commonheads)
329 330 #
330 331 # We can pick:
331 332 # * missingheads part of common (::commonheads)
332 333 common = set(pushop.outgoing.common)
333 334 nm = pushop.repo.changelog.nodemap
334 335 cheads = [node for node in pushop.revs if nm[node] in common]
335 336 # and
336 337 # * commonheads parents on missing
337 338 revset = unfi.set('%ln and parents(roots(%ln))',
338 339 pushop.outgoing.commonheads,
339 340 pushop.outgoing.missing)
340 341 cheads.extend(c.node() for c in revset)
341 342 pushop.commonheads = cheads
342 343
343 344 def _pushsyncphase(pushop):
344 345 """synchronise phase information locally and remotely"""
345 346 unfi = pushop.repo.unfiltered()
346 347 cheads = pushop.commonheads
347 348 # even when we don't push, exchanging phase data is useful
348 349 remotephases = pushop.remote.listkeys('phases')
349 350 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
350 351 and remotephases # server supports phases
351 352 and pushop.ret is None # nothing was pushed
352 353 and remotephases.get('publishing', False)):
353 354 # When:
354 355 # - this is a subrepo push
355 356 # - and remote support phase
356 357 # - and no changeset was pushed
357 358 # - and remote is publishing
358 359 # We may be in issue 3871 case!
359 360 # We drop the possible phase synchronisation done by
360 361 # courtesy to publish changesets possibly locally draft
361 362 # on the remote.
362 363 remotephases = {'publishing': 'True'}
363 364 if not remotephases: # old server or public only reply from non-publishing
364 365 _localphasemove(pushop, cheads)
365 366 # don't push any phase data as there is nothing to push
366 367 else:
367 368 ana = phases.analyzeremotephases(pushop.repo, cheads,
368 369 remotephases)
369 370 pheads, droots = ana
370 371 ### Apply remote phase on local
371 372 if remotephases.get('publishing', False):
372 373 _localphasemove(pushop, cheads)
373 374 else: # publish = False
374 375 _localphasemove(pushop, pheads)
375 376 _localphasemove(pushop, cheads, phases.draft)
376 377 ### Apply local phase on remote
377 378
378 379 # Get the list of all revs draft on remote by public here.
379 380 # XXX Beware that revset break if droots is not strictly
380 381 # XXX root we may want to ensure it is but it is costly
381 382 outdated = unfi.set('heads((%ln::%ln) and public())',
382 383 droots, cheads)
383 384
384 385 b2caps = bundle2.bundle2caps(pushop.remote)
385 386 if 'b2x:pushkey' in b2caps:
386 387 # server supports bundle2, let's do a batched push through it
387 388 #
388 389 # This will eventually be unified with the changesets bundle2 push
389 390 bundler = bundle2.bundle20(pushop.ui, b2caps)
390 391 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
391 392 bundler.newpart('b2x:replycaps', data=capsblob)
392 393 part2node = []
393 394 enc = pushkey.encode
394 395 for newremotehead in outdated:
395 396 part = bundler.newpart('b2x:pushkey')
396 397 part.addparam('namespace', enc('phases'))
397 398 part.addparam('key', enc(newremotehead.hex()))
398 399 part.addparam('old', enc(str(phases.draft)))
399 400 part.addparam('new', enc(str(phases.public)))
400 401 part2node.append((part.id, newremotehead))
401 402 stream = util.chunkbuffer(bundler.getchunks())
402 403 try:
403 404 reply = pushop.remote.unbundle(stream, ['force'], 'push')
404 405 op = bundle2.processbundle(pushop.repo, reply)
405 406 except error.BundleValueError, exc:
406 407 raise util.Abort('missing support for %s' % exc)
407 408 for partid, node in part2node:
408 409 partrep = op.records.getreplies(partid)
409 410 results = partrep['pushkey']
410 411 assert len(results) <= 1
411 412 msg = None
412 413 if not results:
413 414 msg = _('server ignored update of %s to public!\n') % node
414 415 elif not int(results[0]['return']):
415 416 msg = _('updating %s to public failed!\n') % node
416 417 if msg is not None:
417 418 pushop.ui.warn(msg)
418 419
419 420 else:
420 421 # fallback to independant pushkey command
421 422 for newremotehead in outdated:
422 423 r = pushop.remote.pushkey('phases',
423 424 newremotehead.hex(),
424 425 str(phases.draft),
425 426 str(phases.public))
426 427 if not r:
427 428 pushop.ui.warn(_('updating %s to public failed!\n')
428 429 % newremotehead)
429 430
430 431 def _localphasemove(pushop, nodes, phase=phases.public):
431 432 """move <nodes> to <phase> in the local source repo"""
432 433 if pushop.locallocked:
433 434 phases.advanceboundary(pushop.repo, phase, nodes)
434 435 else:
435 436 # repo is not locked, do not change any phases!
436 437 # Informs the user that phases should have been moved when
437 438 # applicable.
438 439 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
439 440 phasestr = phases.phasenames[phase]
440 441 if actualmoves:
441 442 pushop.ui.status(_('cannot lock source repo, skipping '
442 443 'local %s phase update\n') % phasestr)
443 444
444 445 def _pushobsolete(pushop):
445 446 """utility function to push obsolete markers to a remote"""
446 447 pushop.ui.debug('try to push obsolete markers to remote\n')
447 448 repo = pushop.repo
448 449 remote = pushop.remote
449 450 if (obsolete._enabled and repo.obsstore and
450 451 'obsolete' in remote.listkeys('namespaces')):
451 452 rslts = []
452 453 remotedata = repo.listkeys('obsolete')
453 454 for key in sorted(remotedata, reverse=True):
454 455 # reverse sort to ensure we end with dump0
455 456 data = remotedata[key]
456 457 rslts.append(remote.pushkey('obsolete', key, '', data))
457 458 if [r for r in rslts if not r]:
458 459 msg = _('failed to push some obsolete markers!\n')
459 460 repo.ui.warn(msg)
460 461
461 462 def _pushbookmark(pushop):
462 463 """Update bookmark position on remote"""
463 464 ui = pushop.ui
464 465 repo = pushop.repo.unfiltered()
465 466 remote = pushop.remote
466 467 ui.debug("checking for updated bookmarks\n")
467 468 revnums = map(repo.changelog.rev, pushop.revs or [])
468 469 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
469 470 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
470 471 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
471 472 srchex=hex)
472 473
473 474 for b, scid, dcid in advsrc:
474 475 if ancestors and repo[scid].rev() not in ancestors:
475 476 continue
476 477 if remote.pushkey('bookmarks', b, dcid, scid):
477 478 ui.status(_("updating bookmark %s\n") % b)
478 479 else:
479 480 ui.warn(_('updating bookmark %s failed!\n') % b)
480 481
481 482 class pulloperation(object):
482 483 """A object that represent a single pull operation
483 484
484 485 It purpose is to carry push related state and very common operation.
485 486
486 487 A new should be created at the beginning of each pull and discarded
487 488 afterward.
488 489 """
489 490
490 491 def __init__(self, repo, remote, heads=None, force=False):
491 492 # repo we pull into
492 493 self.repo = repo
493 494 # repo we pull from
494 495 self.remote = remote
495 496 # revision we try to pull (None is "all")
496 497 self.heads = heads
497 498 # do we force pull?
498 499 self.force = force
499 500 # the name the pull transaction
500 501 self._trname = 'pull\n' + util.hidepassword(remote.url())
501 502 # hold the transaction once created
502 503 self._tr = None
503 504 # set of common changeset between local and remote before pull
504 505 self.common = None
505 506 # set of pulled head
506 507 self.rheads = None
507 508 # list of missing changeset to fetch remotely
508 509 self.fetch = None
509 510 # result of changegroup pulling (used as return code by pull)
510 511 self.cgresult = None
511 512 # list of step remaining todo (related to future bundle2 usage)
512 513 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
513 514
514 515 @util.propertycache
515 516 def pulledsubset(self):
516 517 """heads of the set of changeset target by the pull"""
517 518 # compute target subset
518 519 if self.heads is None:
519 520 # We pulled every thing possible
520 521 # sync on everything common
521 522 c = set(self.common)
522 523 ret = list(self.common)
523 524 for n in self.rheads:
524 525 if n not in c:
525 526 ret.append(n)
526 527 return ret
527 528 else:
528 529 # We pulled a specific subset
529 530 # sync on this subset
530 531 return self.heads
531 532
532 533 def gettransaction(self):
533 534 """get appropriate pull transaction, creating it if needed"""
534 535 if self._tr is None:
535 536 self._tr = self.repo.transaction(self._trname)
536 537 return self._tr
537 538
538 539 def closetransaction(self):
539 540 """close transaction if created"""
540 541 if self._tr is not None:
541 542 self._tr.close()
542 543
543 544 def releasetransaction(self):
544 545 """release transaction if created"""
545 546 if self._tr is not None:
546 547 self._tr.release()
547 548
548 549 def pull(repo, remote, heads=None, force=False):
549 550 pullop = pulloperation(repo, remote, heads, force)
550 551 if pullop.remote.local():
551 552 missing = set(pullop.remote.requirements) - pullop.repo.supported
552 553 if missing:
553 554 msg = _("required features are not"
554 555 " supported in the destination:"
555 556 " %s") % (', '.join(sorted(missing)))
556 557 raise util.Abort(msg)
557 558
558 559 lock = pullop.repo.lock()
559 560 try:
560 561 _pulldiscovery(pullop)
561 562 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
562 563 and pullop.remote.capable('bundle2-exp')):
563 564 _pullbundle2(pullop)
564 565 if 'changegroup' in pullop.todosteps:
565 566 _pullchangeset(pullop)
566 567 if 'phases' in pullop.todosteps:
567 568 _pullphase(pullop)
568 569 if 'obsmarkers' in pullop.todosteps:
569 570 _pullobsolete(pullop)
570 571 pullop.closetransaction()
571 572 finally:
572 573 pullop.releasetransaction()
573 574 lock.release()
574 575
575 576 return pullop.cgresult
576 577
577 578 def _pulldiscovery(pullop):
578 579 """discovery phase for the pull
579 580
580 581 Current handle changeset discovery only, will change handle all discovery
581 582 at some point."""
582 583 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
583 584 pullop.remote,
584 585 heads=pullop.heads,
585 586 force=pullop.force)
586 587 pullop.common, pullop.fetch, pullop.rheads = tmp
587 588
588 589 def _pullbundle2(pullop):
589 590 """pull data using bundle2
590 591
591 592 For now, the only supported data are changegroup."""
592 593 remotecaps = bundle2.bundle2caps(pullop.remote)
593 594 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
594 595 # pulling changegroup
595 596 pullop.todosteps.remove('changegroup')
596 597
597 598 kwargs['common'] = pullop.common
598 599 kwargs['heads'] = pullop.heads or pullop.rheads
599 600 if 'b2x:listkeys' in remotecaps:
600 601 kwargs['listkeys'] = ['phase']
601 602 if not pullop.fetch:
602 603 pullop.repo.ui.status(_("no changes found\n"))
603 604 pullop.cgresult = 0
604 605 else:
605 606 if pullop.heads is None and list(pullop.common) == [nullid]:
606 607 pullop.repo.ui.status(_("requesting all changes\n"))
607 608 _pullbundle2extraprepare(pullop, kwargs)
608 609 if kwargs.keys() == ['format']:
609 610 return # nothing to pull
610 611 bundle = pullop.remote.getbundle('pull', **kwargs)
611 612 try:
612 613 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
613 614 except error.BundleValueError, exc:
614 615 raise util.Abort('missing support for %s' % exc)
615 616
616 617 if pullop.fetch:
617 618 assert len(op.records['changegroup']) == 1
618 619 pullop.cgresult = op.records['changegroup'][0]['return']
619 620
620 621 # processing phases change
621 622 for namespace, value in op.records['listkeys']:
622 623 if namespace == 'phases':
623 624 _pullapplyphases(pullop, value)
624 625
625 626 def _pullbundle2extraprepare(pullop, kwargs):
626 627 """hook function so that extensions can extend the getbundle call"""
627 628 pass
628 629
629 630 def _pullchangeset(pullop):
630 631 """pull changeset from unbundle into the local repo"""
631 632 # We delay the open of the transaction as late as possible so we
632 633 # don't open transaction for nothing or you break future useful
633 634 # rollback call
634 635 pullop.todosteps.remove('changegroup')
635 636 if not pullop.fetch:
636 637 pullop.repo.ui.status(_("no changes found\n"))
637 638 pullop.cgresult = 0
638 639 return
639 640 pullop.gettransaction()
640 641 if pullop.heads is None and list(pullop.common) == [nullid]:
641 642 pullop.repo.ui.status(_("requesting all changes\n"))
642 643 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
643 644 # issue1320, avoid a race if remote changed after discovery
644 645 pullop.heads = pullop.rheads
645 646
646 647 if pullop.remote.capable('getbundle'):
647 648 # TODO: get bundlecaps from remote
648 649 cg = pullop.remote.getbundle('pull', common=pullop.common,
649 650 heads=pullop.heads or pullop.rheads)
650 651 elif pullop.heads is None:
651 652 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
652 653 elif not pullop.remote.capable('changegroupsubset'):
653 654 raise util.Abort(_("partial pull cannot be done because "
654 655 "other repository doesn't support "
655 656 "changegroupsubset."))
656 657 else:
657 658 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
658 659 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
659 660 pullop.remote.url())
660 661
661 662 def _pullphase(pullop):
662 663 # Get remote phases data from remote
663 664 remotephases = pullop.remote.listkeys('phases')
664 665 _pullapplyphases(pullop, remotephases)
665 666
666 667 def _pullapplyphases(pullop, remotephases):
667 668 """apply phase movement from observed remote state"""
668 669 pullop.todosteps.remove('phases')
669 670 publishing = bool(remotephases.get('publishing', False))
670 671 if remotephases and not publishing:
671 672 # remote is new and unpublishing
672 673 pheads, _dr = phases.analyzeremotephases(pullop.repo,
673 674 pullop.pulledsubset,
674 675 remotephases)
675 676 phases.advanceboundary(pullop.repo, phases.public, pheads)
676 677 phases.advanceboundary(pullop.repo, phases.draft,
677 678 pullop.pulledsubset)
678 679 else:
679 680 # Remote is old or publishing all common changesets
680 681 # should be seen as public
681 682 phases.advanceboundary(pullop.repo, phases.public,
682 683 pullop.pulledsubset)
683 684
684 685 def _pullobsolete(pullop):
685 686 """utility function to pull obsolete markers from a remote
686 687
687 688 The `gettransaction` is function that return the pull transaction, creating
688 689 one if necessary. We return the transaction to inform the calling code that
689 690 a new transaction have been created (when applicable).
690 691
691 692 Exists mostly to allow overriding for experimentation purpose"""
692 693 pullop.todosteps.remove('obsmarkers')
693 694 tr = None
694 695 if obsolete._enabled:
695 696 pullop.repo.ui.debug('fetching remote obsolete markers\n')
696 697 remoteobs = pullop.remote.listkeys('obsolete')
697 698 if 'dump0' in remoteobs:
698 699 tr = pullop.gettransaction()
699 700 for key in sorted(remoteobs, reverse=True):
700 701 if key.startswith('dump'):
701 702 data = base85.b85decode(remoteobs[key])
702 703 pullop.repo.obsstore.mergemarkers(tr, data)
703 704 pullop.repo.invalidatevolatilesets()
704 705 return tr
705 706
706 707 def caps20to10(repo):
707 708 """return a set with appropriate options to use bundle20 during getbundle"""
708 709 caps = set(['HG2X'])
709 710 capsblob = bundle2.encodecaps(repo.bundle2caps)
710 711 caps.add('bundle2=' + urllib.quote(capsblob))
711 712 return caps
712 713
713 714 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
714 715 **kwargs):
715 716 """return a full bundle (with potentially multiple kind of parts)
716 717
717 718 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
718 719 passed. For now, the bundle can contain only changegroup, but this will
719 720 changes when more part type will be available for bundle2.
720 721
721 722 This is different from changegroup.getbundle that only returns an HG10
722 723 changegroup bundle. They may eventually get reunited in the future when we
723 724 have a clearer idea of the API we what to query different data.
724 725
725 726 The implementation is at a very early stage and will get massive rework
726 727 when the API of bundle is refined.
727 728 """
728 729 # build changegroup bundle here.
729 730 cg = changegroup.getbundle(repo, source, heads=heads,
730 731 common=common, bundlecaps=bundlecaps)
731 732 if bundlecaps is None or 'HG2X' not in bundlecaps:
732 733 if kwargs:
733 734 raise ValueError(_('unsupported getbundle arguments: %s')
734 735 % ', '.join(sorted(kwargs.keys())))
735 736 return cg
736 737 # very crude first implementation,
737 738 # the bundle API will change and the generation will be done lazily.
738 739 b2caps = {}
739 740 for bcaps in bundlecaps:
740 741 if bcaps.startswith('bundle2='):
741 742 blob = urllib.unquote(bcaps[len('bundle2='):])
742 743 b2caps.update(bundle2.decodecaps(blob))
743 744 bundler = bundle2.bundle20(repo.ui, b2caps)
744 745 if cg:
745 746 bundler.newpart('b2x:changegroup', data=cg.getchunks())
746 747 listkeys = kwargs.get('listkeys', ())
747 748 for namespace in listkeys:
748 749 part = bundler.newpart('b2x:listkeys')
749 750 part.addparam('namespace', namespace)
750 751 keys = repo.listkeys(namespace).items()
751 752 part.data = pushkey.encodekeys(keys)
752 753 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
753 754 bundlecaps=bundlecaps, **kwargs)
754 755 return util.chunkbuffer(bundler.getchunks())
755 756
756 757 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
757 758 bundlecaps=None, **kwargs):
758 759 """hook function to let extensions add parts to the requested bundle"""
759 760 pass
760 761
761 762 def check_heads(repo, their_heads, context):
762 763 """check if the heads of a repo have been modified
763 764
764 765 Used by peer for unbundling.
765 766 """
766 767 heads = repo.heads()
767 768 heads_hash = util.sha1(''.join(sorted(heads))).digest()
768 769 if not (their_heads == ['force'] or their_heads == heads or
769 770 their_heads == ['hashed', heads_hash]):
770 771 # someone else committed/pushed/unbundled while we
771 772 # were transferring data
772 773 raise error.PushRaced('repository changed while %s - '
773 774 'please try again' % context)
774 775
775 776 def unbundle(repo, cg, heads, source, url):
776 777 """Apply a bundle to a repo.
777 778
778 779 this function makes sure the repo is locked during the application and have
779 780 mechanism to check that no push race occurred between the creation of the
780 781 bundle and its application.
781 782
782 783 If the push was raced as PushRaced exception is raised."""
783 784 r = 0
784 785 # need a transaction when processing a bundle2 stream
785 786 tr = None
786 787 lock = repo.lock()
787 788 try:
788 789 check_heads(repo, heads, 'uploading changes')
789 790 # push can proceed
790 791 if util.safehasattr(cg, 'params'):
791 792 try:
792 793 tr = repo.transaction('unbundle')
793 794 tr.hookargs['bundle2-exp'] = '1'
794 795 r = bundle2.processbundle(repo, cg, lambda: tr).reply
795 796 cl = repo.unfiltered().changelog
796 797 p = cl.writepending() and repo.root or ""
797 798 repo.hook('b2x-pretransactionclose', throw=True, source=source,
798 799 url=url, pending=p, **tr.hookargs)
799 800 tr.close()
800 801 repo.hook('b2x-transactionclose', source=source, url=url,
801 802 **tr.hookargs)
802 803 except Exception, exc:
803 804 exc.duringunbundle2 = True
804 805 raise
805 806 else:
806 807 r = changegroup.addchangegroup(repo, cg, source, url)
807 808 finally:
808 809 if tr is not None:
809 810 tr.release()
810 811 lock.release()
811 812 return r
@@ -1,1107 +1,1106
1 1
2 2 Create an extension to test bundle2 API
3 3
4 4 $ cat > bundle2.py << EOF
5 5 > """A small extension to test bundle2 implementation
6 6 >
7 7 > Current bundle2 implementation is far too limited to be used in any core
8 8 > code. We still need to be able to test it while it grow up.
9 9 > """
10 10 >
11 11 > try:
12 12 > import msvcrt
13 13 > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
14 14 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
15 15 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
16 16 > except ImportError:
17 17 > pass
18 18 >
19 19 > import sys
20 20 > from mercurial import cmdutil
21 21 > from mercurial import util
22 22 > from mercurial import bundle2
23 23 > from mercurial import scmutil
24 24 > from mercurial import discovery
25 25 > from mercurial import changegroup
26 26 > from mercurial import error
27 27 > cmdtable = {}
28 28 > command = cmdutil.command(cmdtable)
29 29 >
30 30 > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
31 31 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
32 32 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
33 33 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
34 34 >
35 35 > @bundle2.parthandler('test:song')
36 36 > def songhandler(op, part):
37 37 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
38 38 > op.ui.write('The choir starts singing:\n')
39 39 > verses = 0
40 40 > for line in part.read().split('\n'):
41 41 > op.ui.write(' %s\n' % line)
42 42 > verses += 1
43 43 > op.records.add('song', {'verses': verses})
44 44 >
45 45 > @bundle2.parthandler('test:ping')
46 46 > def pinghandler(op, part):
47 47 > op.ui.write('received ping request (id %i)\n' % part.id)
48 48 > if op.reply is not None and 'ping-pong' in op.reply.capabilities:
49 49 > op.ui.write_err('replying to ping request (id %i)\n' % part.id)
50 50 > op.reply.newpart('test:pong', [('in-reply-to', str(part.id))])
51 51 >
52 52 > @bundle2.parthandler('test:debugreply')
53 53 > def debugreply(op, part):
54 54 > """print data about the capacity of the bundle reply"""
55 55 > if op.reply is None:
56 56 > op.ui.write('debugreply: no reply\n')
57 57 > else:
58 58 > op.ui.write('debugreply: capabilities:\n')
59 59 > for cap in sorted(op.reply.capabilities):
60 60 > op.ui.write('debugreply: %r\n' % cap)
61 61 > for val in op.reply.capabilities[cap]:
62 62 > op.ui.write('debugreply: %r\n' % val)
63 63 >
64 64 > @command('bundle2',
65 65 > [('', 'param', [], 'stream level parameter'),
66 66 > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
67 67 > ('', 'unknownparams', False, 'include an unknown part parameters in the bundle'),
68 68 > ('', 'parts', False, 'include some arbitrary parts to the bundle'),
69 69 > ('', 'reply', False, 'produce a reply bundle'),
70 70 > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
71 71 > ('r', 'rev', [], 'includes those changeset in the bundle'),],
72 72 > '[OUTPUTFILE]')
73 73 > def cmdbundle2(ui, repo, path=None, **opts):
74 74 > """write a bundle2 container on standard ouput"""
75 75 > bundler = bundle2.bundle20(ui)
76 76 > for p in opts['param']:
77 77 > p = p.split('=', 1)
78 78 > try:
79 79 > bundler.addparam(*p)
80 80 > except ValueError, exc:
81 81 > raise util.Abort('%s' % exc)
82 82 >
83 83 > if opts['reply']:
84 84 > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
85 85 > bundler.newpart('b2x:replycaps', data=capsstring)
86 86 >
87 87 > if opts['pushrace']:
88 88 > # also serve to test the assignement of data outside of init
89 89 > part = bundler.newpart('b2x:check:heads')
90 90 > part.data = '01234567890123456789'
91 91 >
92 92 > revs = opts['rev']
93 93 > if 'rev' in opts:
94 94 > revs = scmutil.revrange(repo, opts['rev'])
95 95 > if revs:
96 96 > # very crude version of a changegroup part creation
97 97 > bundled = repo.revs('%ld::%ld', revs, revs)
98 98 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
99 99 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
100 100 > outgoing = discovery.outgoing(repo.changelog, headcommon, headmissing)
101 101 > cg = changegroup.getlocalbundle(repo, 'test:bundle2', outgoing, None)
102 102 > bundler.newpart('b2x:changegroup', data=cg.getchunks())
103 103 >
104 104 > if opts['parts']:
105 105 > bundler.newpart('test:empty')
106 106 > # add a second one to make sure we handle multiple parts
107 107 > bundler.newpart('test:empty')
108 108 > bundler.newpart('test:song', data=ELEPHANTSSONG)
109 109 > bundler.newpart('test:debugreply')
110 110 > mathpart = bundler.newpart('test:math')
111 111 > mathpart.addparam('pi', '3.14')
112 112 > mathpart.addparam('e', '2.72')
113 113 > mathpart.addparam('cooking', 'raw', mandatory=False)
114 114 > mathpart.data = '42'
115 115 > # advisory known part with unknown mandatory param
116 116 > bundler.newpart('test:song', [('randomparam','')])
117 117 > if opts['unknown']:
118 118 > bundler.newpart('test:UNKNOWN', data='some random content')
119 119 > if opts['unknownparams']:
120 120 > bundler.newpart('test:SONG', [('randomparams', '')])
121 121 > if opts['parts']:
122 122 > bundler.newpart('test:ping')
123 123 >
124 124 > if path is None:
125 125 > file = sys.stdout
126 126 > else:
127 127 > file = open(path, 'w')
128 128 >
129 129 > for chunk in bundler.getchunks():
130 130 > file.write(chunk)
131 131 >
132 132 > @command('unbundle2', [], '')
133 133 > def cmdunbundle2(ui, repo, replypath=None):
134 134 > """process a bundle2 stream from stdin on the current repo"""
135 135 > try:
136 136 > tr = None
137 137 > lock = repo.lock()
138 138 > tr = repo.transaction('processbundle')
139 139 > try:
140 140 > unbundler = bundle2.unbundle20(ui, sys.stdin)
141 141 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
142 142 > tr.close()
143 143 > except error.BundleValueError, exc:
144 144 > raise util.Abort('missing support for %s' % exc)
145 145 > except error.PushRaced, exc:
146 146 > raise util.Abort('push race: %s' % exc)
147 147 > finally:
148 148 > if tr is not None:
149 149 > tr.release()
150 150 > lock.release()
151 151 > remains = sys.stdin.read()
152 152 > ui.write('%i unread bytes\n' % len(remains))
153 153 > if op.records['song']:
154 154 > totalverses = sum(r['verses'] for r in op.records['song'])
155 155 > ui.write('%i total verses sung\n' % totalverses)
156 156 > for rec in op.records['changegroup']:
157 157 > ui.write('addchangegroup return: %i\n' % rec['return'])
158 158 > if op.reply is not None and replypath is not None:
159 159 > file = open(replypath, 'w')
160 160 > for chunk in op.reply.getchunks():
161 161 > file.write(chunk)
162 162 >
163 163 > @command('statbundle2', [], '')
164 164 > def cmdstatbundle2(ui, repo):
165 165 > """print statistic on the bundle2 container read from stdin"""
166 166 > unbundler = bundle2.unbundle20(ui, sys.stdin)
167 167 > try:
168 168 > params = unbundler.params
169 169 > except error.BundleValueError, exc:
170 170 > raise util.Abort('unknown parameters: %s' % exc)
171 171 > ui.write('options count: %i\n' % len(params))
172 172 > for key in sorted(params):
173 173 > ui.write('- %s\n' % key)
174 174 > value = params[key]
175 175 > if value is not None:
176 176 > ui.write(' %s\n' % value)
177 177 > count = 0
178 178 > for p in unbundler.iterparts():
179 179 > count += 1
180 180 > ui.write(' :%s:\n' % p.type)
181 181 > ui.write(' mandatory: %i\n' % len(p.mandatoryparams))
182 182 > ui.write(' advisory: %i\n' % len(p.advisoryparams))
183 183 > ui.write(' payload: %i bytes\n' % len(p.read()))
184 184 > ui.write('parts count: %i\n' % count)
185 185 > EOF
186 186 $ cat >> $HGRCPATH << EOF
187 187 > [extensions]
188 188 > bundle2=$TESTTMP/bundle2.py
189 189 > [experimental]
190 190 > bundle2-exp=True
191 191 > [ui]
192 192 > ssh=python "$TESTDIR/dummyssh"
193 193 > logtemplate={rev}:{node|short} {phase} {author} {desc|firstline}
194 194 > [web]
195 195 > push_ssl = false
196 196 > allow_push = *
197 197 > [phases]
198 198 > publish=False
199 199 > EOF
200 200
201 201 The extension requires a repo (currently unused)
202 202
203 203 $ hg init main
204 204 $ cd main
205 205 $ touch a
206 206 $ hg add a
207 207 $ hg commit -m 'a'
208 208
209 209
210 210 Empty bundle
211 211 =================
212 212
213 213 - no option
214 214 - no parts
215 215
216 216 Test bundling
217 217
218 218 $ hg bundle2
219 219 HG2X\x00\x00\x00\x00 (no-eol) (esc)
220 220
221 221 Test unbundling
222 222
223 223 $ hg bundle2 | hg statbundle2
224 224 options count: 0
225 225 parts count: 0
226 226
227 227 Test old style bundle are detected and refused
228 228
229 229 $ hg bundle --all ../bundle.hg
230 230 1 changesets found
231 231 $ hg statbundle2 < ../bundle.hg
232 232 abort: unknown bundle version 10
233 233 [255]
234 234
235 235 Test parameters
236 236 =================
237 237
238 238 - some options
239 239 - no parts
240 240
241 241 advisory parameters, no value
242 242 -------------------------------
243 243
244 244 Simplest possible parameters form
245 245
246 246 Test generation simple option
247 247
248 248 $ hg bundle2 --param 'caution'
249 249 HG2X\x00\x07caution\x00\x00 (no-eol) (esc)
250 250
251 251 Test unbundling
252 252
253 253 $ hg bundle2 --param 'caution' | hg statbundle2
254 254 options count: 1
255 255 - caution
256 256 parts count: 0
257 257
258 258 Test generation multiple option
259 259
260 260 $ hg bundle2 --param 'caution' --param 'meal'
261 261 HG2X\x00\x0ccaution meal\x00\x00 (no-eol) (esc)
262 262
263 263 Test unbundling
264 264
265 265 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
266 266 options count: 2
267 267 - caution
268 268 - meal
269 269 parts count: 0
270 270
271 271 advisory parameters, with value
272 272 -------------------------------
273 273
274 274 Test generation
275 275
276 276 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants'
277 277 HG2X\x00\x1ccaution meal=vegan elephants\x00\x00 (no-eol) (esc)
278 278
279 279 Test unbundling
280 280
281 281 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
282 282 options count: 3
283 283 - caution
284 284 - elephants
285 285 - meal
286 286 vegan
287 287 parts count: 0
288 288
289 289 parameter with special char in value
290 290 ---------------------------------------------------
291 291
292 292 Test generation
293 293
294 294 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple
295 295 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
296 296
297 297 Test unbundling
298 298
299 299 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
300 300 options count: 2
301 301 - e|! 7/
302 302 babar%#==tutu
303 303 - simple
304 304 parts count: 0
305 305
306 306 Test unknown mandatory option
307 307 ---------------------------------------------------
308 308
309 309 $ hg bundle2 --param 'Gravity' | hg statbundle2
310 310 abort: unknown parameters: Stream Parameter - Gravity
311 311 [255]
312 312
313 313 Test debug output
314 314 ---------------------------------------------------
315 315
316 316 bundling debug
317 317
318 318 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2
319 319 start emission of HG2X stream
320 320 bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
321 321 start of parts
322 322 end of bundle
323 323
324 324 file content is ok
325 325
326 326 $ cat ../out.hg2
327 327 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
328 328
329 329 unbundling debug
330 330
331 331 $ hg statbundle2 --debug < ../out.hg2
332 332 start processing of HG2X stream
333 333 reading bundle2 stream parameters
334 334 ignoring unknown parameter 'e|! 7/'
335 335 ignoring unknown parameter 'simple'
336 336 options count: 2
337 337 - e|! 7/
338 338 babar%#==tutu
339 339 - simple
340 340 start extraction of bundle2 parts
341 341 part header size: 0
342 342 end of bundle2 stream
343 343 parts count: 0
344 344
345 345
346 346 Test buggy input
347 347 ---------------------------------------------------
348 348
349 349 empty parameter name
350 350
351 351 $ hg bundle2 --param '' --quiet
352 352 abort: empty parameter name
353 353 [255]
354 354
355 355 bad parameter name
356 356
357 357 $ hg bundle2 --param 42babar
358 358 abort: non letter first character: '42babar'
359 359 [255]
360 360
361 361
362 362 Test part
363 363 =================
364 364
365 365 $ hg bundle2 --parts ../parts.hg2 --debug
366 366 start emission of HG2X stream
367 367 bundle parameter:
368 368 start of parts
369 369 bundle part: "test:empty"
370 370 bundle part: "test:empty"
371 371 bundle part: "test:song"
372 372 bundle part: "test:debugreply"
373 373 bundle part: "test:math"
374 374 bundle part: "test:song"
375 375 bundle part: "test:ping"
376 376 end of bundle
377 377
378 378 $ cat ../parts.hg2
379 379 HG2X\x00\x00\x00\x11 (esc)
380 380 test:empty\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
381 381 test:empty\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x10 test:song\x00\x00\x00\x02\x00\x00\x00\x00\x00\xb2Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko (esc)
382 382 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
383 383 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.\x00\x00\x00\x00\x00\x16\x0ftest:debugreply\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00+ test:math\x00\x00\x00\x04\x02\x01\x02\x04\x01\x04\x07\x03pi3.14e2.72cookingraw\x00\x00\x00\x0242\x00\x00\x00\x00\x00\x1d test:song\x00\x00\x00\x05\x01\x00\x0b\x00randomparam\x00\x00\x00\x00\x00\x10 test:ping\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
384 384
385 385
386 386 $ hg statbundle2 < ../parts.hg2
387 387 options count: 0
388 388 :test:empty:
389 389 mandatory: 0
390 390 advisory: 0
391 391 payload: 0 bytes
392 392 :test:empty:
393 393 mandatory: 0
394 394 advisory: 0
395 395 payload: 0 bytes
396 396 :test:song:
397 397 mandatory: 0
398 398 advisory: 0
399 399 payload: 178 bytes
400 400 :test:debugreply:
401 401 mandatory: 0
402 402 advisory: 0
403 403 payload: 0 bytes
404 404 :test:math:
405 405 mandatory: 2
406 406 advisory: 1
407 407 payload: 2 bytes
408 408 :test:song:
409 409 mandatory: 1
410 410 advisory: 0
411 411 payload: 0 bytes
412 412 :test:ping:
413 413 mandatory: 0
414 414 advisory: 0
415 415 payload: 0 bytes
416 416 parts count: 7
417 417
418 418 $ hg statbundle2 --debug < ../parts.hg2
419 419 start processing of HG2X stream
420 420 reading bundle2 stream parameters
421 421 options count: 0
422 422 start extraction of bundle2 parts
423 423 part header size: 17
424 424 part type: "test:empty"
425 425 part id: "0"
426 426 part parameters: 0
427 427 :test:empty:
428 428 mandatory: 0
429 429 advisory: 0
430 430 payload chunk size: 0
431 431 payload: 0 bytes
432 432 part header size: 17
433 433 part type: "test:empty"
434 434 part id: "1"
435 435 part parameters: 0
436 436 :test:empty:
437 437 mandatory: 0
438 438 advisory: 0
439 439 payload chunk size: 0
440 440 payload: 0 bytes
441 441 part header size: 16
442 442 part type: "test:song"
443 443 part id: "2"
444 444 part parameters: 0
445 445 :test:song:
446 446 mandatory: 0
447 447 advisory: 0
448 448 payload chunk size: 178
449 449 payload chunk size: 0
450 450 payload: 178 bytes
451 451 part header size: 22
452 452 part type: "test:debugreply"
453 453 part id: "3"
454 454 part parameters: 0
455 455 :test:debugreply:
456 456 mandatory: 0
457 457 advisory: 0
458 458 payload chunk size: 0
459 459 payload: 0 bytes
460 460 part header size: 43
461 461 part type: "test:math"
462 462 part id: "4"
463 463 part parameters: 3
464 464 :test:math:
465 465 mandatory: 2
466 466 advisory: 1
467 467 payload chunk size: 2
468 468 payload chunk size: 0
469 469 payload: 2 bytes
470 470 part header size: 29
471 471 part type: "test:song"
472 472 part id: "5"
473 473 part parameters: 1
474 474 :test:song:
475 475 mandatory: 1
476 476 advisory: 0
477 477 payload chunk size: 0
478 478 payload: 0 bytes
479 479 part header size: 16
480 480 part type: "test:ping"
481 481 part id: "6"
482 482 part parameters: 0
483 483 :test:ping:
484 484 mandatory: 0
485 485 advisory: 0
486 486 payload chunk size: 0
487 487 payload: 0 bytes
488 488 part header size: 0
489 489 end of bundle2 stream
490 490 parts count: 7
491 491
492 492 Test actual unbundling of test part
493 493 =======================================
494 494
495 495 Process the bundle
496 496
497 497 $ hg unbundle2 --debug < ../parts.hg2
498 498 start processing of HG2X stream
499 499 reading bundle2 stream parameters
500 500 start extraction of bundle2 parts
501 501 part header size: 17
502 502 part type: "test:empty"
503 503 part id: "0"
504 504 part parameters: 0
505 505 ignoring unsupported advisory part test:empty
506 506 payload chunk size: 0
507 507 part header size: 17
508 508 part type: "test:empty"
509 509 part id: "1"
510 510 part parameters: 0
511 511 ignoring unsupported advisory part test:empty
512 512 payload chunk size: 0
513 513 part header size: 16
514 514 part type: "test:song"
515 515 part id: "2"
516 516 part parameters: 0
517 517 found a handler for part 'test:song'
518 518 The choir starts singing:
519 519 payload chunk size: 178
520 520 payload chunk size: 0
521 521 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
522 522 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
523 523 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
524 524 part header size: 22
525 525 part type: "test:debugreply"
526 526 part id: "3"
527 527 part parameters: 0
528 528 found a handler for part 'test:debugreply'
529 529 debugreply: no reply
530 530 payload chunk size: 0
531 531 part header size: 43
532 532 part type: "test:math"
533 533 part id: "4"
534 534 part parameters: 3
535 535 ignoring unsupported advisory part test:math
536 536 payload chunk size: 2
537 537 payload chunk size: 0
538 538 part header size: 29
539 539 part type: "test:song"
540 540 part id: "5"
541 541 part parameters: 1
542 542 found a handler for part 'test:song'
543 543 ignoring unsupported advisory part test:song - randomparam
544 544 payload chunk size: 0
545 545 part header size: 16
546 546 part type: "test:ping"
547 547 part id: "6"
548 548 part parameters: 0
549 549 found a handler for part 'test:ping'
550 550 received ping request (id 6)
551 551 payload chunk size: 0
552 552 part header size: 0
553 553 end of bundle2 stream
554 554 0 unread bytes
555 555 3 total verses sung
556 556
557 557 Unbundle with an unknown mandatory part
558 558 (should abort)
559 559
560 560 $ hg bundle2 --parts --unknown ../unknown.hg2
561 561
562 562 $ hg unbundle2 < ../unknown.hg2
563 563 The choir starts singing:
564 564 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
565 565 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
566 566 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
567 567 debugreply: no reply
568 568 0 unread bytes
569 569 abort: missing support for test:unknown
570 570 [255]
571 571
572 572 Unbundle with an unknown mandatory part parameters
573 573 (should abort)
574 574
575 575 $ hg bundle2 --unknownparams ../unknown.hg2
576 576
577 577 $ hg unbundle2 < ../unknown.hg2
578 578 0 unread bytes
579 579 abort: missing support for test:song - randomparams
580 580 [255]
581 581
582 582 unbundle with a reply
583 583
584 584 $ hg bundle2 --parts --reply ../parts-reply.hg2
585 585 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
586 586 0 unread bytes
587 587 3 total verses sung
588 588
589 589 The reply is a bundle
590 590
591 591 $ cat ../reply.hg2
592 592 HG2X\x00\x00\x00\x1f (esc)
593 593 b2x:output\x00\x00\x00\x00\x00\x01\x0b\x01in-reply-to3\x00\x00\x00\xd9The choir starts singing: (esc)
594 594 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
595 595 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
596 596 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
597 597 \x00\x00\x00\x00\x00\x1f (esc)
598 598 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to4\x00\x00\x00\xc9debugreply: capabilities: (esc)
599 599 debugreply: 'city=!'
600 600 debugreply: 'celeste,ville'
601 601 debugreply: 'elephants'
602 602 debugreply: 'babar'
603 603 debugreply: 'celeste'
604 604 debugreply: 'ping-pong'
605 605 \x00\x00\x00\x00\x00\x1e test:pong\x00\x00\x00\x02\x01\x00\x0b\x01in-reply-to7\x00\x00\x00\x00\x00\x1f (esc)
606 606 b2x:output\x00\x00\x00\x03\x00\x01\x0b\x01in-reply-to7\x00\x00\x00=received ping request (id 7) (esc)
607 607 replying to ping request (id 7)
608 608 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
609 609
610 610 The reply is valid
611 611
612 612 $ hg statbundle2 < ../reply.hg2
613 613 options count: 0
614 614 :b2x:output:
615 615 mandatory: 0
616 616 advisory: 1
617 617 payload: 217 bytes
618 618 :b2x:output:
619 619 mandatory: 0
620 620 advisory: 1
621 621 payload: 201 bytes
622 622 :test:pong:
623 623 mandatory: 1
624 624 advisory: 0
625 625 payload: 0 bytes
626 626 :b2x:output:
627 627 mandatory: 0
628 628 advisory: 1
629 629 payload: 61 bytes
630 630 parts count: 4
631 631
632 632 Unbundle the reply to get the output:
633 633
634 634 $ hg unbundle2 < ../reply.hg2
635 635 remote: The choir starts singing:
636 636 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
637 637 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
638 638 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
639 639 remote: debugreply: capabilities:
640 640 remote: debugreply: 'city=!'
641 641 remote: debugreply: 'celeste,ville'
642 642 remote: debugreply: 'elephants'
643 643 remote: debugreply: 'babar'
644 644 remote: debugreply: 'celeste'
645 645 remote: debugreply: 'ping-pong'
646 646 remote: received ping request (id 7)
647 647 remote: replying to ping request (id 7)
648 648 0 unread bytes
649 649
650 650 Test push race detection
651 651
652 652 $ hg bundle2 --pushrace ../part-race.hg2
653 653
654 654 $ hg unbundle2 < ../part-race.hg2
655 655 0 unread bytes
656 656 abort: push race: repository changed while pushing - please try again
657 657 [255]
658 658
659 659 Support for changegroup
660 660 ===================================
661 661
662 662 $ hg unbundle $TESTDIR/bundles/rebase.hg
663 663 adding changesets
664 664 adding manifests
665 665 adding file changes
666 666 added 8 changesets with 7 changes to 7 files (+3 heads)
667 667 (run 'hg heads' to see heads, 'hg merge' to merge)
668 668
669 669 $ hg log -G
670 670 o 8:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
671 671 |
672 672 | o 7:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
673 673 |/|
674 674 o | 6:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
675 675 | |
676 676 | o 5:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
677 677 |/
678 678 | o 4:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
679 679 | |
680 680 | o 3:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
681 681 | |
682 682 | o 2:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
683 683 |/
684 684 o 1:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
685 685
686 686 @ 0:3903775176ed draft test a
687 687
688 688
689 689 $ hg bundle2 --debug --rev '8+7+5+4' ../rev.hg2
690 690 4 changesets found
691 691 list of changesets:
692 692 32af7686d403cf45b5d95f2d70cebea587ac806a
693 693 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
694 694 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
695 695 02de42196ebee42ef284b6780a87cdc96e8eaab6
696 696 start emission of HG2X stream
697 697 bundle parameter:
698 698 start of parts
699 699 bundle part: "b2x:changegroup"
700 700 bundling: 1/4 changesets (25.00%)
701 701 bundling: 2/4 changesets (50.00%)
702 702 bundling: 3/4 changesets (75.00%)
703 703 bundling: 4/4 changesets (100.00%)
704 704 bundling: 1/4 manifests (25.00%)
705 705 bundling: 2/4 manifests (50.00%)
706 706 bundling: 3/4 manifests (75.00%)
707 707 bundling: 4/4 manifests (100.00%)
708 708 bundling: D 1/3 files (33.33%)
709 709 bundling: E 2/3 files (66.67%)
710 710 bundling: H 3/3 files (100.00%)
711 711 end of bundle
712 712
713 713 $ cat ../rev.hg2
714 714 HG2X\x00\x00\x00\x16\x0fb2x:changegroup\x00\x00\x00\x00\x00\x00\x00\x00\x06\x13\x00\x00\x00\xa42\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j_\xdd\xd9\x89W\xc8\xa5JMCm\xfe\x1d\xa9\xd8\x7f!\xa1\xb9{\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c (esc)
715 715 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02D (esc)
716 716 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01D\x00\x00\x00\xa4\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xcd\x01\x0b\x8c\xd9\x98\xf3\x98\x1aZ\x81\x15\xf9O\x8d\xa4\xabP`\x89\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)4dece9c826f69490507b98c6383a3009b295837d (esc)
717 717 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02E (esc)
718 718 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01E\x00\x00\x00\xa2\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)365b93d57fdf4814e2b5911d6bacff2b12014441 (esc)
719 719 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x00\x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01G\x00\x00\x00\xa4\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
720 720 \x87\xcd\xc9n\x8e\xaa\xb6$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
721 721 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)8bee48edc7318541fc0013ee41b089276a8c24bf (esc)
722 722 \x00\x00\x00f\x00\x00\x00f\x00\x00\x00\x02H (esc)
723 723 \x00\x00\x00g\x00\x00\x00h\x00\x00\x00\x01H\x00\x00\x00\x00\x00\x00\x00\x8bn\x1fLG\xec\xb53\xff\xd0\xc8\xe5,\xdc\x88\xaf\xb6\xcd9\xe2\x0cf\xa5\xa0\x18\x17\xfd\xf5#\x9c'8\x02\xb5\xb7a\x8d\x05\x1c\x89\xe4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+D\x00c3f1ca2924c16a19b0656a84900e504e5b0aec2d (esc)
724 724 \x00\x00\x00\x8bM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\x00}\x8c\x9d\x88\x84\x13%\xf5\xc6\xb0cq\xb3[N\x8a+\x1a\x83\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00+\x00\x00\x00\xac\x00\x00\x00+E\x009c6fd0350a6c0d0c49d4a9c5017cf07043f54e58 (esc)
725 725 \x00\x00\x00\x8b6[\x93\xd5\x7f\xdfH\x14\xe2\xb5\x91\x1dk\xac\xff+\x12\x01DA(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xceM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00V\x00\x00\x00V\x00\x00\x00+F\x0022bfcfd62a21a3287edbd4d656218d0f525ed76a (esc)
726 726 \x00\x00\x00\x97\x8b\xeeH\xed\xc71\x85A\xfc\x00\x13\xeeA\xb0\x89'j\x8c$\xbf(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
727 727 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00+\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+H\x008500189e74a9e0475e822093bc7db0d631aeb0b4 (esc)
728 728 \x00\x00\x00\x00\x00\x00\x00\x05D\x00\x00\x00b\xc3\xf1\xca)$\xc1j\x19\xb0ej\x84\x90\x0ePN[ (esc)
729 729 \xec-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02D (esc)
730 730 \x00\x00\x00\x00\x00\x00\x00\x05E\x00\x00\x00b\x9co\xd05 (esc)
731 731 l\r (no-eol) (esc)
732 732 \x0cI\xd4\xa9\xc5\x01|\xf0pC\xf5NX\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02E (esc)
733 733 \x00\x00\x00\x00\x00\x00\x00\x05H\x00\x00\x00b\x85\x00\x18\x9et\xa9\xe0G^\x82 \x93\xbc}\xb0\xd61\xae\xb0\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
734 734 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02H (esc)
735 735 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
736 736
737 737 $ hg unbundle2 < ../rev.hg2
738 738 adding changesets
739 739 adding manifests
740 740 adding file changes
741 741 added 0 changesets with 0 changes to 3 files
742 742 0 unread bytes
743 743 addchangegroup return: 1
744 744
745 745 with reply
746 746
747 747 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
748 748 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
749 749 0 unread bytes
750 750 addchangegroup return: 1
751 751
752 752 $ cat ../rev-reply.hg2
753 753 HG2X\x00\x00\x003\x15b2x:reply:changegroup\x00\x00\x00\x00\x00\x02\x0b\x01\x06\x01in-reply-to1return1\x00\x00\x00\x00\x00\x1f (esc)
754 754 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to1\x00\x00\x00dadding changesets (esc)
755 755 adding manifests
756 756 adding file changes
757 757 added 0 changesets with 0 changes to 3 files
758 758 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
759 759
760 760 Real world exchange
761 761 =====================
762 762
763 763
764 764 clone --pull
765 765
766 766 $ cd ..
767 767 $ hg -R main phase --public cd010b8cd998
768 768 $ hg clone main other --pull --rev 9520eea781bc
769 769 adding changesets
770 770 adding manifests
771 771 adding file changes
772 772 added 2 changesets with 2 changes to 2 files
773 773 updating to branch default
774 774 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
775 775 $ hg -R other log -G
776 776 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
777 777 |
778 778 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
779 779
780 780
781 781 pull
782 782
783 783 $ hg -R main phase --public 9520eea781bc
784 784 $ hg -R other pull -r 24b6387c8c8c
785 785 pulling from $TESTTMP/main (glob)
786 786 searching for changes
787 787 adding changesets
788 788 adding manifests
789 789 adding file changes
790 790 added 1 changesets with 1 changes to 1 files (+1 heads)
791 791 (run 'hg heads' to see heads, 'hg merge' to merge)
792 792 $ hg -R other log -G
793 793 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
794 794 |
795 795 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
796 796 |/
797 797 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
798 798
799 799
800 800 pull empty (with phase movement)
801 801
802 802 $ hg -R main phase --public 24b6387c8c8c
803 803 $ hg -R other pull -r 24b6387c8c8c
804 804 pulling from $TESTTMP/main (glob)
805 805 no changes found
806 806 $ hg -R other log -G
807 807 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
808 808 |
809 809 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
810 810 |/
811 811 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
812 812
813 813 pull empty
814 814
815 815 $ hg -R other pull -r 24b6387c8c8c
816 816 pulling from $TESTTMP/main (glob)
817 817 no changes found
818 818 $ hg -R other log -G
819 819 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
820 820 |
821 821 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
822 822 |/
823 823 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
824 824
825 825
826 826 push
827 827
828 828 $ hg -R main phase --public eea13746799a
829 829 $ hg -R main push other --rev eea13746799a
830 830 pushing to other
831 831 searching for changes
832 832 remote: adding changesets
833 833 remote: adding manifests
834 834 remote: adding file changes
835 835 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
836 836 $ hg -R other log -G
837 837 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
838 838 |\
839 839 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
840 840 | |
841 841 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
842 842 |/
843 843 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
844 844
845 845
846 846 pull over ssh
847 847
848 848 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --traceback
849 849 pulling from ssh://user@dummy/main
850 850 searching for changes
851 851 adding changesets
852 852 adding manifests
853 853 adding file changes
854 854 added 1 changesets with 1 changes to 1 files (+1 heads)
855 855 (run 'hg heads' to see heads, 'hg merge' to merge)
856 856
857 857 pull over http
858 858
859 859 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
860 860 $ cat main.pid >> $DAEMON_PIDS
861 861
862 862 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16
863 863 pulling from http://localhost:$HGPORT/
864 864 searching for changes
865 865 adding changesets
866 866 adding manifests
867 867 adding file changes
868 868 added 1 changesets with 1 changes to 1 files (+1 heads)
869 869 (run 'hg heads .' to see heads, 'hg merge' to merge)
870 870 $ cat main-error.log
871 871
872 872 push over ssh
873 873
874 874 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8
875 875 pushing to ssh://user@dummy/other
876 876 searching for changes
877 877 remote: adding changesets
878 878 remote: adding manifests
879 879 remote: adding file changes
880 880 remote: added 1 changesets with 1 changes to 1 files
881 881 $ hg -R other log -G
882 882 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
883 883 |
884 884 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
885 885 |
886 886 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
887 887 | |
888 888 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
889 889 | |/|
890 890 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
891 891 |/ /
892 892 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
893 893 |/
894 894 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
895 895
896 896
897 897 push over http
898 898
899 899 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
900 900 $ cat other.pid >> $DAEMON_PIDS
901 901
902 902 $ hg -R main phase --public 32af7686d403
903 903 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403
904 904 pushing to http://localhost:$HGPORT2/
905 905 searching for changes
906 906 remote: adding changesets
907 907 remote: adding manifests
908 908 remote: adding file changes
909 909 remote: added 1 changesets with 1 changes to 1 files
910 910 $ cat other-error.log
911 911
912 912 Check final content.
913 913
914 914 $ hg -R other log -G
915 915 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
916 916 |
917 917 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
918 918 |
919 919 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
920 920 |
921 921 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
922 922 | |
923 923 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
924 924 | |/|
925 925 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
926 926 |/ /
927 927 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
928 928 |/
929 929 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
930 930
931 931
932 932 Error Handling
933 933 ==============
934 934
935 935 Check that errors are properly returned to the client during push.
936 936
937 937 Setting up
938 938
939 939 $ cat > failpush.py << EOF
940 940 > """A small extension that makes push fails when using bundle2
941 941 >
942 942 > used to test error handling in bundle2
943 943 > """
944 944 >
945 945 > from mercurial import util
946 946 > from mercurial import bundle2
947 947 > from mercurial import exchange
948 948 > from mercurial import extensions
949 949 >
950 950 > def _pushbundle2failpart(pushop, bundler):
951 951 > reason = pushop.ui.config('failpush', 'reason', None)
952 952 > part = None
953 953 > if reason == 'abort':
954 954 > bundler.newpart('test:abort')
955 955 > if reason == 'unknown':
956 956 > bundler.newpart('TEST:UNKNOWN')
957 957 > if reason == 'race':
958 958 > # 20 Bytes of crap
959 959 > bundler.newpart('b2x:check:heads', data='01234567890123456789')
960 > return lambda op: None
961 960 >
962 961 > @bundle2.parthandler("test:abort")
963 962 > def handleabort(op, part):
964 963 > raise util.Abort('Abandon ship!', hint="don't panic")
965 964 >
966 965 > def uisetup(ui):
967 966 > exchange.bundle2partsgenerators.insert(0, _pushbundle2failpart)
968 967 >
969 968 > EOF
970 969
971 970 $ cd main
972 971 $ hg up tip
973 972 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
974 973 $ echo 'I' > I
975 974 $ hg add I
976 975 $ hg ci -m 'I'
977 976 $ hg id
978 977 e7ec4e813ba6 tip
979 978 $ cd ..
980 979
981 980 $ cat << EOF >> $HGRCPATH
982 981 > [extensions]
983 982 > failpush=$TESTTMP/failpush.py
984 983 > EOF
985 984
986 985 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
987 986 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
988 987 $ cat other.pid >> $DAEMON_PIDS
989 988
990 989 Doing the actual push: Abort error
991 990
992 991 $ cat << EOF >> $HGRCPATH
993 992 > [failpush]
994 993 > reason = abort
995 994 > EOF
996 995
997 996 $ hg -R main push other -r e7ec4e813ba6
998 997 pushing to other
999 998 searching for changes
1000 999 abort: Abandon ship!
1001 1000 (don't panic)
1002 1001 [255]
1003 1002
1004 1003 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1005 1004 pushing to ssh://user@dummy/other
1006 1005 searching for changes
1007 1006 abort: Abandon ship!
1008 1007 (don't panic)
1009 1008 [255]
1010 1009
1011 1010 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1012 1011 pushing to http://localhost:$HGPORT2/
1013 1012 searching for changes
1014 1013 abort: Abandon ship!
1015 1014 (don't panic)
1016 1015 [255]
1017 1016
1018 1017
1019 1018 Doing the actual push: unknown mandatory parts
1020 1019
1021 1020 $ cat << EOF >> $HGRCPATH
1022 1021 > [failpush]
1023 1022 > reason = unknown
1024 1023 > EOF
1025 1024
1026 1025 $ hg -R main push other -r e7ec4e813ba6
1027 1026 pushing to other
1028 1027 searching for changes
1029 1028 abort: missing support for test:unknown
1030 1029 [255]
1031 1030
1032 1031 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1033 1032 pushing to ssh://user@dummy/other
1034 1033 searching for changes
1035 1034 abort: missing support for test:unknown
1036 1035 [255]
1037 1036
1038 1037 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1039 1038 pushing to http://localhost:$HGPORT2/
1040 1039 searching for changes
1041 1040 abort: missing support for test:unknown
1042 1041 [255]
1043 1042
1044 1043 Doing the actual push: race
1045 1044
1046 1045 $ cat << EOF >> $HGRCPATH
1047 1046 > [failpush]
1048 1047 > reason = race
1049 1048 > EOF
1050 1049
1051 1050 $ hg -R main push other -r e7ec4e813ba6
1052 1051 pushing to other
1053 1052 searching for changes
1054 1053 abort: push failed:
1055 1054 'repository changed while pushing - please try again'
1056 1055 [255]
1057 1056
1058 1057 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1059 1058 pushing to ssh://user@dummy/other
1060 1059 searching for changes
1061 1060 abort: push failed:
1062 1061 'repository changed while pushing - please try again'
1063 1062 [255]
1064 1063
1065 1064 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1066 1065 pushing to http://localhost:$HGPORT2/
1067 1066 searching for changes
1068 1067 abort: push failed:
1069 1068 'repository changed while pushing - please try again'
1070 1069 [255]
1071 1070
1072 1071 Doing the actual push: hook abort
1073 1072
1074 1073 $ cat << EOF >> $HGRCPATH
1075 1074 > [failpush]
1076 1075 > reason =
1077 1076 > [hooks]
1078 1077 > b2x-pretransactionclose.failpush = false
1079 1078 > EOF
1080 1079
1081 1080 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
1082 1081 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
1083 1082 $ cat other.pid >> $DAEMON_PIDS
1084 1083
1085 1084 $ hg -R main push other -r e7ec4e813ba6
1086 1085 pushing to other
1087 1086 searching for changes
1088 1087 transaction abort!
1089 1088 rollback completed
1090 1089 abort: b2x-pretransactionclose.failpush hook exited with status 1
1091 1090 [255]
1092 1091
1093 1092 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1094 1093 pushing to ssh://user@dummy/other
1095 1094 searching for changes
1096 1095 abort: b2x-pretransactionclose.failpush hook exited with status 1
1097 1096 remote: transaction abort!
1098 1097 remote: rollback completed
1099 1098 [255]
1100 1099
1101 1100 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1102 1101 pushing to http://localhost:$HGPORT2/
1103 1102 searching for changes
1104 1103 abort: b2x-pretransactionclose.failpush hook exited with status 1
1105 1104 [255]
1106 1105
1107 1106
General Comments 0
You need to be logged in to leave comments. Login now