##// END OF EJS Templates
push: rework the bundle2partsgenerators logic...
Pierre-Yves David -
r22017:7986e99b default
parent child Browse files
Show More
@@ -1,824 +1,848
1 1 # exchange.py - utility to exchange data between repos.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 from node import hex, nullid
10 10 import errno, urllib
11 11 import util, scmutil, changegroup, base85, error
12 12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
13 13
14 14 def readbundle(ui, fh, fname, vfs=None):
15 15 header = changegroup.readexactly(fh, 4)
16 16
17 17 alg = None
18 18 if not fname:
19 19 fname = "stream"
20 20 if not header.startswith('HG') and header.startswith('\0'):
21 21 fh = changegroup.headerlessfixup(fh, header)
22 22 header = "HG10"
23 23 alg = 'UN'
24 24 elif vfs:
25 25 fname = vfs.join(fname)
26 26
27 27 magic, version = header[0:2], header[2:4]
28 28
29 29 if magic != 'HG':
30 30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 31 if version == '10':
32 32 if alg is None:
33 33 alg = changegroup.readexactly(fh, 2)
34 34 return changegroup.unbundle10(fh, alg)
35 35 elif version == '2X':
36 36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 37 else:
38 38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39 39
40 40
41 41 class pushoperation(object):
42 42 """A object that represent a single push operation
43 43
44 44 It purpose is to carry push related state and very common operation.
45 45
46 46 A new should be created at the beginning of each push and discarded
47 47 afterward.
48 48 """
49 49
50 50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
51 51 # repo we push from
52 52 self.repo = repo
53 53 self.ui = repo.ui
54 54 # repo we push to
55 55 self.remote = remote
56 56 # force option provided
57 57 self.force = force
58 58 # revs to be pushed (None is "all")
59 59 self.revs = revs
60 60 # allow push of new branch
61 61 self.newbranch = newbranch
62 62 # did a local lock get acquired?
63 63 self.locallocked = None
64 64 # step already performed
65 65 # (used to check what steps have been already performed through bundle2)
66 66 self.stepsdone = set()
67 67 # Integer version of the push result
68 68 # - None means nothing to push
69 69 # - 0 means HTTP error
70 70 # - 1 means we pushed and remote head count is unchanged *or*
71 71 # we have outgoing changesets but refused to push
72 72 # - other values as described by addchangegroup()
73 73 self.ret = None
74 74 # discover.outgoing object (contains common and outgoing data)
75 75 self.outgoing = None
76 76 # all remote heads before the push
77 77 self.remoteheads = None
78 78 # testable as a boolean indicating if any nodes are missing locally.
79 79 self.incoming = None
80 80
81 81 @util.propertycache
82 82 def futureheads(self):
83 83 """future remote heads if the changeset push succeeds"""
84 84 return self.outgoing.missingheads
85 85
86 86 @util.propertycache
87 87 def fallbackheads(self):
88 88 """future remote heads if the changeset push fails"""
89 89 if self.revs is None:
90 90 # not target to push, all common are relevant
91 91 return self.outgoing.commonheads
92 92 unfi = self.repo.unfiltered()
93 93 # I want cheads = heads(::missingheads and ::commonheads)
94 94 # (missingheads is revs with secret changeset filtered out)
95 95 #
96 96 # This can be expressed as:
97 97 # cheads = ( (missingheads and ::commonheads)
98 98 # + (commonheads and ::missingheads))"
99 99 # )
100 100 #
101 101 # while trying to push we already computed the following:
102 102 # common = (::commonheads)
103 103 # missing = ((commonheads::missingheads) - commonheads)
104 104 #
105 105 # We can pick:
106 106 # * missingheads part of common (::commonheads)
107 107 common = set(self.outgoing.common)
108 108 nm = self.repo.changelog.nodemap
109 109 cheads = [node for node in self.revs if nm[node] in common]
110 110 # and
111 111 # * commonheads parents on missing
112 112 revset = unfi.set('%ln and parents(roots(%ln))',
113 113 self.outgoing.commonheads,
114 114 self.outgoing.missing)
115 115 cheads.extend(c.node() for c in revset)
116 116 return cheads
117 117
118 118 @property
119 119 def commonheads(self):
120 120 """set of all common heads after changeset bundle push"""
121 121 if self.ret:
122 122 return self.futureheads
123 123 else:
124 124 return self.fallbackheads
125 125
126 126 def push(repo, remote, force=False, revs=None, newbranch=False):
127 127 '''Push outgoing changesets (limited by revs) from a local
128 128 repository to remote. Return an integer:
129 129 - None means nothing to push
130 130 - 0 means HTTP error
131 131 - 1 means we pushed and remote head count is unchanged *or*
132 132 we have outgoing changesets but refused to push
133 133 - other values as described by addchangegroup()
134 134 '''
135 135 pushop = pushoperation(repo, remote, force, revs, newbranch)
136 136 if pushop.remote.local():
137 137 missing = (set(pushop.repo.requirements)
138 138 - pushop.remote.local().supported)
139 139 if missing:
140 140 msg = _("required features are not"
141 141 " supported in the destination:"
142 142 " %s") % (', '.join(sorted(missing)))
143 143 raise util.Abort(msg)
144 144
145 145 # there are two ways to push to remote repo:
146 146 #
147 147 # addchangegroup assumes local user can lock remote
148 148 # repo (local filesystem, old ssh servers).
149 149 #
150 150 # unbundle assumes local user cannot lock remote repo (new ssh
151 151 # servers, http servers).
152 152
153 153 if not pushop.remote.canpush():
154 154 raise util.Abort(_("destination does not support push"))
155 155 # get local lock as we might write phase data
156 156 locallock = None
157 157 try:
158 158 locallock = pushop.repo.lock()
159 159 pushop.locallocked = True
160 160 except IOError, err:
161 161 pushop.locallocked = False
162 162 if err.errno != errno.EACCES:
163 163 raise
164 164 # source repo cannot be locked.
165 165 # We do not abort the push, but just disable the local phase
166 166 # synchronisation.
167 167 msg = 'cannot lock source repository: %s\n' % err
168 168 pushop.ui.debug(msg)
169 169 try:
170 170 pushop.repo.checkpush(pushop)
171 171 lock = None
172 172 unbundle = pushop.remote.capable('unbundle')
173 173 if not unbundle:
174 174 lock = pushop.remote.lock()
175 175 try:
176 176 _pushdiscovery(pushop)
177 177 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
178 178 False)
179 179 and pushop.remote.capable('bundle2-exp')):
180 180 _pushbundle2(pushop)
181 181 _pushchangeset(pushop)
182 182 _pushsyncphase(pushop)
183 183 _pushobsolete(pushop)
184 184 finally:
185 185 if lock is not None:
186 186 lock.release()
187 187 finally:
188 188 if locallock is not None:
189 189 locallock.release()
190 190
191 191 _pushbookmark(pushop)
192 192 return pushop.ret
193 193
194 194 def _pushdiscovery(pushop):
195 195 # discovery
196 196 unfi = pushop.repo.unfiltered()
197 197 fci = discovery.findcommonincoming
198 198 commoninc = fci(unfi, pushop.remote, force=pushop.force)
199 199 common, inc, remoteheads = commoninc
200 200 fco = discovery.findcommonoutgoing
201 201 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
202 202 commoninc=commoninc, force=pushop.force)
203 203 pushop.outgoing = outgoing
204 204 pushop.remoteheads = remoteheads
205 205 pushop.incoming = inc
206 206
207 207 def _pushcheckoutgoing(pushop):
208 208 outgoing = pushop.outgoing
209 209 unfi = pushop.repo.unfiltered()
210 210 if not outgoing.missing:
211 211 # nothing to push
212 212 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
213 213 return False
214 214 # something to push
215 215 if not pushop.force:
216 216 # if repo.obsstore == False --> no obsolete
217 217 # then, save the iteration
218 218 if unfi.obsstore:
219 219 # this message are here for 80 char limit reason
220 220 mso = _("push includes obsolete changeset: %s!")
221 221 mst = "push includes %s changeset: %s!"
222 222 # plain versions for i18n tool to detect them
223 223 _("push includes unstable changeset: %s!")
224 224 _("push includes bumped changeset: %s!")
225 225 _("push includes divergent changeset: %s!")
226 226 # If we are to push if there is at least one
227 227 # obsolete or unstable changeset in missing, at
228 228 # least one of the missinghead will be obsolete or
229 229 # unstable. So checking heads only is ok
230 230 for node in outgoing.missingheads:
231 231 ctx = unfi[node]
232 232 if ctx.obsolete():
233 233 raise util.Abort(mso % ctx)
234 234 elif ctx.troubled():
235 235 raise util.Abort(_(mst)
236 236 % (ctx.troubles()[0],
237 237 ctx))
238 238 newbm = pushop.ui.configlist('bookmarks', 'pushing')
239 239 discovery.checkheads(unfi, pushop.remote, outgoing,
240 240 pushop.remoteheads,
241 241 pushop.newbranch,
242 242 bool(pushop.incoming),
243 243 newbm)
244 244 return True
245 245
246 # List of names of steps to perform for an outgoing bundle2, order matters.
247 b2partsgenorder = []
248
249 # Mapping between step name and function
250 #
251 # This exists to help extensions wrap steps if necessary
252 b2partsgenmapping = {}
253
254 def b2partsgenerator(stepname):
255 """decorator for function generating bundle2 part
256
257 The function is added to the step -> function mapping and appended to the
258 list of steps. Beware that decorated functions will be added in order
259 (this may matter).
260
261 You can only use this decorator for new steps, if you want to wrap a step
262 from an extension, attack the b2partsgenmapping dictionary directly."""
263 def dec(func):
264 assert stepname not in b2partsgenmapping
265 b2partsgenmapping[stepname] = func
266 b2partsgenorder.append(stepname)
267 return func
268 return dec
269
270 @b2partsgenerator('changeset')
246 271 def _pushb2ctx(pushop, bundler):
247 272 """handle changegroup push through bundle2
248 273
249 274 addchangegroup result is stored in the ``pushop.ret`` attribute.
250 275 """
251 276 if 'changesets' in pushop.stepsdone:
252 277 return
253 278 pushop.stepsdone.add('changesets')
254 279 # Send known heads to the server for race detection.
255 280 pushop.stepsdone.add('changesets')
256 281 if not _pushcheckoutgoing(pushop):
257 282 return
258 283 pushop.repo.prepushoutgoinghooks(pushop.repo,
259 284 pushop.remote,
260 285 pushop.outgoing)
261 286 if not pushop.force:
262 287 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
263 288 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
264 289 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
265 290 def handlereply(op):
266 291 """extract addchangroup returns from server reply"""
267 292 cgreplies = op.records.getreplies(cgpart.id)
268 293 assert len(cgreplies['changegroup']) == 1
269 294 pushop.ret = cgreplies['changegroup'][0]['return']
270 295 return handlereply
271 296
272 # list of function that may decide to add parts to an outgoing bundle2
273 bundle2partsgenerators = [_pushb2ctx]
274 297
275 298 def _pushbundle2(pushop):
276 299 """push data to the remote using bundle2
277 300
278 301 The only currently supported type of data is changegroup but this will
279 302 evolve in the future."""
280 303 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
281 304 # create reply capability
282 305 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
283 306 bundler.newpart('b2x:replycaps', data=capsblob)
284 307 replyhandlers = []
285 for partgen in bundle2partsgenerators:
308 for partgenname in b2partsgenorder:
309 partgen = b2partsgenmapping[partgenname]
286 310 ret = partgen(pushop, bundler)
287 311 if callable(ret):
288 312 replyhandlers.append(ret)
289 313 # do not push if nothing to push
290 314 if bundler.nbparts <= 1:
291 315 return
292 316 stream = util.chunkbuffer(bundler.getchunks())
293 317 try:
294 318 reply = pushop.remote.unbundle(stream, ['force'], 'push')
295 319 except error.BundleValueError, exc:
296 320 raise util.Abort('missing support for %s' % exc)
297 321 try:
298 322 op = bundle2.processbundle(pushop.repo, reply)
299 323 except error.BundleValueError, exc:
300 324 raise util.Abort('missing support for %s' % exc)
301 325 for rephand in replyhandlers:
302 326 rephand(op)
303 327
304 328 def _pushchangeset(pushop):
305 329 """Make the actual push of changeset bundle to remote repo"""
306 330 if 'changesets' in pushop.stepsdone:
307 331 return
308 332 pushop.stepsdone.add('changesets')
309 333 if not _pushcheckoutgoing(pushop):
310 334 return
311 335 pushop.repo.prepushoutgoinghooks(pushop.repo,
312 336 pushop.remote,
313 337 pushop.outgoing)
314 338 outgoing = pushop.outgoing
315 339 unbundle = pushop.remote.capable('unbundle')
316 340 # TODO: get bundlecaps from remote
317 341 bundlecaps = None
318 342 # create a changegroup from local
319 343 if pushop.revs is None and not (outgoing.excluded
320 344 or pushop.repo.changelog.filteredrevs):
321 345 # push everything,
322 346 # use the fast path, no race possible on push
323 347 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
324 348 cg = changegroup.getsubset(pushop.repo,
325 349 outgoing,
326 350 bundler,
327 351 'push',
328 352 fastpath=True)
329 353 else:
330 354 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
331 355 bundlecaps)
332 356
333 357 # apply changegroup to remote
334 358 if unbundle:
335 359 # local repo finds heads on server, finds out what
336 360 # revs it must push. once revs transferred, if server
337 361 # finds it has different heads (someone else won
338 362 # commit/push race), server aborts.
339 363 if pushop.force:
340 364 remoteheads = ['force']
341 365 else:
342 366 remoteheads = pushop.remoteheads
343 367 # ssh: return remote's addchangegroup()
344 368 # http: return remote's addchangegroup() or 0 for error
345 369 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
346 370 pushop.repo.url())
347 371 else:
348 372 # we return an integer indicating remote head count
349 373 # change
350 374 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
351 375
352 376 def _pushsyncphase(pushop):
353 377 """synchronise phase information locally and remotely"""
354 378 unfi = pushop.repo.unfiltered()
355 379 cheads = pushop.commonheads
356 380 # even when we don't push, exchanging phase data is useful
357 381 remotephases = pushop.remote.listkeys('phases')
358 382 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
359 383 and remotephases # server supports phases
360 384 and pushop.ret is None # nothing was pushed
361 385 and remotephases.get('publishing', False)):
362 386 # When:
363 387 # - this is a subrepo push
364 388 # - and remote support phase
365 389 # - and no changeset was pushed
366 390 # - and remote is publishing
367 391 # We may be in issue 3871 case!
368 392 # We drop the possible phase synchronisation done by
369 393 # courtesy to publish changesets possibly locally draft
370 394 # on the remote.
371 395 remotephases = {'publishing': 'True'}
372 396 if not remotephases: # old server or public only reply from non-publishing
373 397 _localphasemove(pushop, cheads)
374 398 # don't push any phase data as there is nothing to push
375 399 else:
376 400 ana = phases.analyzeremotephases(pushop.repo, cheads,
377 401 remotephases)
378 402 pheads, droots = ana
379 403 ### Apply remote phase on local
380 404 if remotephases.get('publishing', False):
381 405 _localphasemove(pushop, cheads)
382 406 else: # publish = False
383 407 _localphasemove(pushop, pheads)
384 408 _localphasemove(pushop, cheads, phases.draft)
385 409 ### Apply local phase on remote
386 410
387 411 # Get the list of all revs draft on remote by public here.
388 412 # XXX Beware that revset break if droots is not strictly
389 413 # XXX root we may want to ensure it is but it is costly
390 414 outdated = unfi.set('heads((%ln::%ln) and public())',
391 415 droots, cheads)
392 416
393 417 b2caps = bundle2.bundle2caps(pushop.remote)
394 418 if 'b2x:pushkey' in b2caps:
395 419 # server supports bundle2, let's do a batched push through it
396 420 #
397 421 # This will eventually be unified with the changesets bundle2 push
398 422 bundler = bundle2.bundle20(pushop.ui, b2caps)
399 423 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
400 424 bundler.newpart('b2x:replycaps', data=capsblob)
401 425 part2node = []
402 426 enc = pushkey.encode
403 427 for newremotehead in outdated:
404 428 part = bundler.newpart('b2x:pushkey')
405 429 part.addparam('namespace', enc('phases'))
406 430 part.addparam('key', enc(newremotehead.hex()))
407 431 part.addparam('old', enc(str(phases.draft)))
408 432 part.addparam('new', enc(str(phases.public)))
409 433 part2node.append((part.id, newremotehead))
410 434 stream = util.chunkbuffer(bundler.getchunks())
411 435 try:
412 436 reply = pushop.remote.unbundle(stream, ['force'], 'push')
413 437 op = bundle2.processbundle(pushop.repo, reply)
414 438 except error.BundleValueError, exc:
415 439 raise util.Abort('missing support for %s' % exc)
416 440 for partid, node in part2node:
417 441 partrep = op.records.getreplies(partid)
418 442 results = partrep['pushkey']
419 443 assert len(results) <= 1
420 444 msg = None
421 445 if not results:
422 446 msg = _('server ignored update of %s to public!\n') % node
423 447 elif not int(results[0]['return']):
424 448 msg = _('updating %s to public failed!\n') % node
425 449 if msg is not None:
426 450 pushop.ui.warn(msg)
427 451
428 452 else:
429 453 # fallback to independant pushkey command
430 454 for newremotehead in outdated:
431 455 r = pushop.remote.pushkey('phases',
432 456 newremotehead.hex(),
433 457 str(phases.draft),
434 458 str(phases.public))
435 459 if not r:
436 460 pushop.ui.warn(_('updating %s to public failed!\n')
437 461 % newremotehead)
438 462
439 463 def _localphasemove(pushop, nodes, phase=phases.public):
440 464 """move <nodes> to <phase> in the local source repo"""
441 465 if pushop.locallocked:
442 466 phases.advanceboundary(pushop.repo, phase, nodes)
443 467 else:
444 468 # repo is not locked, do not change any phases!
445 469 # Informs the user that phases should have been moved when
446 470 # applicable.
447 471 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
448 472 phasestr = phases.phasenames[phase]
449 473 if actualmoves:
450 474 pushop.ui.status(_('cannot lock source repo, skipping '
451 475 'local %s phase update\n') % phasestr)
452 476
453 477 def _pushobsolete(pushop):
454 478 """utility function to push obsolete markers to a remote"""
455 479 pushop.ui.debug('try to push obsolete markers to remote\n')
456 480 repo = pushop.repo
457 481 remote = pushop.remote
458 482 if (obsolete._enabled and repo.obsstore and
459 483 'obsolete' in remote.listkeys('namespaces')):
460 484 rslts = []
461 485 remotedata = repo.listkeys('obsolete')
462 486 for key in sorted(remotedata, reverse=True):
463 487 # reverse sort to ensure we end with dump0
464 488 data = remotedata[key]
465 489 rslts.append(remote.pushkey('obsolete', key, '', data))
466 490 if [r for r in rslts if not r]:
467 491 msg = _('failed to push some obsolete markers!\n')
468 492 repo.ui.warn(msg)
469 493
470 494 def _pushbookmark(pushop):
471 495 """Update bookmark position on remote"""
472 496 ui = pushop.ui
473 497 repo = pushop.repo.unfiltered()
474 498 remote = pushop.remote
475 499 ui.debug("checking for updated bookmarks\n")
476 500 revnums = map(repo.changelog.rev, pushop.revs or [])
477 501 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
478 502 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
479 503 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
480 504 srchex=hex)
481 505
482 506 for b, scid, dcid in advsrc:
483 507 if ancestors and repo[scid].rev() not in ancestors:
484 508 continue
485 509 if remote.pushkey('bookmarks', b, dcid, scid):
486 510 ui.status(_("updating bookmark %s\n") % b)
487 511 else:
488 512 ui.warn(_('updating bookmark %s failed!\n') % b)
489 513
490 514 class pulloperation(object):
491 515 """A object that represent a single pull operation
492 516
493 517 It purpose is to carry push related state and very common operation.
494 518
495 519 A new should be created at the beginning of each pull and discarded
496 520 afterward.
497 521 """
498 522
499 523 def __init__(self, repo, remote, heads=None, force=False):
500 524 # repo we pull into
501 525 self.repo = repo
502 526 # repo we pull from
503 527 self.remote = remote
504 528 # revision we try to pull (None is "all")
505 529 self.heads = heads
506 530 # do we force pull?
507 531 self.force = force
508 532 # the name the pull transaction
509 533 self._trname = 'pull\n' + util.hidepassword(remote.url())
510 534 # hold the transaction once created
511 535 self._tr = None
512 536 # set of common changeset between local and remote before pull
513 537 self.common = None
514 538 # set of pulled head
515 539 self.rheads = None
516 540 # list of missing changeset to fetch remotely
517 541 self.fetch = None
518 542 # result of changegroup pulling (used as return code by pull)
519 543 self.cgresult = None
520 544 # list of step remaining todo (related to future bundle2 usage)
521 545 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
522 546
523 547 @util.propertycache
524 548 def pulledsubset(self):
525 549 """heads of the set of changeset target by the pull"""
526 550 # compute target subset
527 551 if self.heads is None:
528 552 # We pulled every thing possible
529 553 # sync on everything common
530 554 c = set(self.common)
531 555 ret = list(self.common)
532 556 for n in self.rheads:
533 557 if n not in c:
534 558 ret.append(n)
535 559 return ret
536 560 else:
537 561 # We pulled a specific subset
538 562 # sync on this subset
539 563 return self.heads
540 564
541 565 def gettransaction(self):
542 566 """get appropriate pull transaction, creating it if needed"""
543 567 if self._tr is None:
544 568 self._tr = self.repo.transaction(self._trname)
545 569 return self._tr
546 570
547 571 def closetransaction(self):
548 572 """close transaction if created"""
549 573 if self._tr is not None:
550 574 self._tr.close()
551 575
552 576 def releasetransaction(self):
553 577 """release transaction if created"""
554 578 if self._tr is not None:
555 579 self._tr.release()
556 580
557 581 def pull(repo, remote, heads=None, force=False):
558 582 pullop = pulloperation(repo, remote, heads, force)
559 583 if pullop.remote.local():
560 584 missing = set(pullop.remote.requirements) - pullop.repo.supported
561 585 if missing:
562 586 msg = _("required features are not"
563 587 " supported in the destination:"
564 588 " %s") % (', '.join(sorted(missing)))
565 589 raise util.Abort(msg)
566 590
567 591 lock = pullop.repo.lock()
568 592 try:
569 593 _pulldiscovery(pullop)
570 594 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
571 595 and pullop.remote.capable('bundle2-exp')):
572 596 _pullbundle2(pullop)
573 597 if 'changegroup' in pullop.todosteps:
574 598 _pullchangeset(pullop)
575 599 if 'phases' in pullop.todosteps:
576 600 _pullphase(pullop)
577 601 if 'obsmarkers' in pullop.todosteps:
578 602 _pullobsolete(pullop)
579 603 pullop.closetransaction()
580 604 finally:
581 605 pullop.releasetransaction()
582 606 lock.release()
583 607
584 608 return pullop.cgresult
585 609
586 610 def _pulldiscovery(pullop):
587 611 """discovery phase for the pull
588 612
589 613 Current handle changeset discovery only, will change handle all discovery
590 614 at some point."""
591 615 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
592 616 pullop.remote,
593 617 heads=pullop.heads,
594 618 force=pullop.force)
595 619 pullop.common, pullop.fetch, pullop.rheads = tmp
596 620
597 621 def _pullbundle2(pullop):
598 622 """pull data using bundle2
599 623
600 624 For now, the only supported data are changegroup."""
601 625 remotecaps = bundle2.bundle2caps(pullop.remote)
602 626 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
603 627 # pulling changegroup
604 628 pullop.todosteps.remove('changegroup')
605 629
606 630 kwargs['common'] = pullop.common
607 631 kwargs['heads'] = pullop.heads or pullop.rheads
608 632 if 'b2x:listkeys' in remotecaps:
609 633 kwargs['listkeys'] = ['phase']
610 634 if not pullop.fetch:
611 635 pullop.repo.ui.status(_("no changes found\n"))
612 636 pullop.cgresult = 0
613 637 else:
614 638 if pullop.heads is None and list(pullop.common) == [nullid]:
615 639 pullop.repo.ui.status(_("requesting all changes\n"))
616 640 _pullbundle2extraprepare(pullop, kwargs)
617 641 if kwargs.keys() == ['format']:
618 642 return # nothing to pull
619 643 bundle = pullop.remote.getbundle('pull', **kwargs)
620 644 try:
621 645 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
622 646 except error.BundleValueError, exc:
623 647 raise util.Abort('missing support for %s' % exc)
624 648
625 649 if pullop.fetch:
626 650 assert len(op.records['changegroup']) == 1
627 651 pullop.cgresult = op.records['changegroup'][0]['return']
628 652
629 653 # processing phases change
630 654 for namespace, value in op.records['listkeys']:
631 655 if namespace == 'phases':
632 656 _pullapplyphases(pullop, value)
633 657
634 658 def _pullbundle2extraprepare(pullop, kwargs):
635 659 """hook function so that extensions can extend the getbundle call"""
636 660 pass
637 661
638 662 def _pullchangeset(pullop):
639 663 """pull changeset from unbundle into the local repo"""
640 664 # We delay the open of the transaction as late as possible so we
641 665 # don't open transaction for nothing or you break future useful
642 666 # rollback call
643 667 pullop.todosteps.remove('changegroup')
644 668 if not pullop.fetch:
645 669 pullop.repo.ui.status(_("no changes found\n"))
646 670 pullop.cgresult = 0
647 671 return
648 672 pullop.gettransaction()
649 673 if pullop.heads is None and list(pullop.common) == [nullid]:
650 674 pullop.repo.ui.status(_("requesting all changes\n"))
651 675 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
652 676 # issue1320, avoid a race if remote changed after discovery
653 677 pullop.heads = pullop.rheads
654 678
655 679 if pullop.remote.capable('getbundle'):
656 680 # TODO: get bundlecaps from remote
657 681 cg = pullop.remote.getbundle('pull', common=pullop.common,
658 682 heads=pullop.heads or pullop.rheads)
659 683 elif pullop.heads is None:
660 684 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
661 685 elif not pullop.remote.capable('changegroupsubset'):
662 686 raise util.Abort(_("partial pull cannot be done because "
663 687 "other repository doesn't support "
664 688 "changegroupsubset."))
665 689 else:
666 690 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
667 691 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
668 692 pullop.remote.url())
669 693
670 694 def _pullphase(pullop):
671 695 # Get remote phases data from remote
672 696 remotephases = pullop.remote.listkeys('phases')
673 697 _pullapplyphases(pullop, remotephases)
674 698
675 699 def _pullapplyphases(pullop, remotephases):
676 700 """apply phase movement from observed remote state"""
677 701 pullop.todosteps.remove('phases')
678 702 publishing = bool(remotephases.get('publishing', False))
679 703 if remotephases and not publishing:
680 704 # remote is new and unpublishing
681 705 pheads, _dr = phases.analyzeremotephases(pullop.repo,
682 706 pullop.pulledsubset,
683 707 remotephases)
684 708 phases.advanceboundary(pullop.repo, phases.public, pheads)
685 709 phases.advanceboundary(pullop.repo, phases.draft,
686 710 pullop.pulledsubset)
687 711 else:
688 712 # Remote is old or publishing all common changesets
689 713 # should be seen as public
690 714 phases.advanceboundary(pullop.repo, phases.public,
691 715 pullop.pulledsubset)
692 716
693 717 def _pullobsolete(pullop):
694 718 """utility function to pull obsolete markers from a remote
695 719
696 720 The `gettransaction` is function that return the pull transaction, creating
697 721 one if necessary. We return the transaction to inform the calling code that
698 722 a new transaction have been created (when applicable).
699 723
700 724 Exists mostly to allow overriding for experimentation purpose"""
701 725 pullop.todosteps.remove('obsmarkers')
702 726 tr = None
703 727 if obsolete._enabled:
704 728 pullop.repo.ui.debug('fetching remote obsolete markers\n')
705 729 remoteobs = pullop.remote.listkeys('obsolete')
706 730 if 'dump0' in remoteobs:
707 731 tr = pullop.gettransaction()
708 732 for key in sorted(remoteobs, reverse=True):
709 733 if key.startswith('dump'):
710 734 data = base85.b85decode(remoteobs[key])
711 735 pullop.repo.obsstore.mergemarkers(tr, data)
712 736 pullop.repo.invalidatevolatilesets()
713 737 return tr
714 738
715 739 def caps20to10(repo):
716 740 """return a set with appropriate options to use bundle20 during getbundle"""
717 741 caps = set(['HG2X'])
718 742 capsblob = bundle2.encodecaps(repo.bundle2caps)
719 743 caps.add('bundle2=' + urllib.quote(capsblob))
720 744 return caps
721 745
722 746 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
723 747 **kwargs):
724 748 """return a full bundle (with potentially multiple kind of parts)
725 749
726 750 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
727 751 passed. For now, the bundle can contain only changegroup, but this will
728 752 changes when more part type will be available for bundle2.
729 753
730 754 This is different from changegroup.getbundle that only returns an HG10
731 755 changegroup bundle. They may eventually get reunited in the future when we
732 756 have a clearer idea of the API we what to query different data.
733 757
734 758 The implementation is at a very early stage and will get massive rework
735 759 when the API of bundle is refined.
736 760 """
737 761 cg = None
738 762 if kwargs.get('cg', True):
739 763 # build changegroup bundle here.
740 764 cg = changegroup.getbundle(repo, source, heads=heads,
741 765 common=common, bundlecaps=bundlecaps)
742 766 elif 'HG2X' not in bundlecaps:
743 767 raise ValueError(_('request for bundle10 must include changegroup'))
744 768 if bundlecaps is None or 'HG2X' not in bundlecaps:
745 769 if kwargs:
746 770 raise ValueError(_('unsupported getbundle arguments: %s')
747 771 % ', '.join(sorted(kwargs.keys())))
748 772 return cg
749 773 # very crude first implementation,
750 774 # the bundle API will change and the generation will be done lazily.
751 775 b2caps = {}
752 776 for bcaps in bundlecaps:
753 777 if bcaps.startswith('bundle2='):
754 778 blob = urllib.unquote(bcaps[len('bundle2='):])
755 779 b2caps.update(bundle2.decodecaps(blob))
756 780 bundler = bundle2.bundle20(repo.ui, b2caps)
757 781 if cg:
758 782 bundler.newpart('b2x:changegroup', data=cg.getchunks())
759 783 listkeys = kwargs.get('listkeys', ())
760 784 for namespace in listkeys:
761 785 part = bundler.newpart('b2x:listkeys')
762 786 part.addparam('namespace', namespace)
763 787 keys = repo.listkeys(namespace).items()
764 788 part.data = pushkey.encodekeys(keys)
765 789 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
766 790 bundlecaps=bundlecaps, **kwargs)
767 791 return util.chunkbuffer(bundler.getchunks())
768 792
769 793 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
770 794 bundlecaps=None, **kwargs):
771 795 """hook function to let extensions add parts to the requested bundle"""
772 796 pass
773 797
774 798 def check_heads(repo, their_heads, context):
775 799 """check if the heads of a repo have been modified
776 800
777 801 Used by peer for unbundling.
778 802 """
779 803 heads = repo.heads()
780 804 heads_hash = util.sha1(''.join(sorted(heads))).digest()
781 805 if not (their_heads == ['force'] or their_heads == heads or
782 806 their_heads == ['hashed', heads_hash]):
783 807 # someone else committed/pushed/unbundled while we
784 808 # were transferring data
785 809 raise error.PushRaced('repository changed while %s - '
786 810 'please try again' % context)
787 811
788 812 def unbundle(repo, cg, heads, source, url):
789 813 """Apply a bundle to a repo.
790 814
791 815 this function makes sure the repo is locked during the application and have
792 816 mechanism to check that no push race occurred between the creation of the
793 817 bundle and its application.
794 818
795 819 If the push was raced as PushRaced exception is raised."""
796 820 r = 0
797 821 # need a transaction when processing a bundle2 stream
798 822 tr = None
799 823 lock = repo.lock()
800 824 try:
801 825 check_heads(repo, heads, 'uploading changes')
802 826 # push can proceed
803 827 if util.safehasattr(cg, 'params'):
804 828 try:
805 829 tr = repo.transaction('unbundle')
806 830 tr.hookargs['bundle2-exp'] = '1'
807 831 r = bundle2.processbundle(repo, cg, lambda: tr).reply
808 832 cl = repo.unfiltered().changelog
809 833 p = cl.writepending() and repo.root or ""
810 834 repo.hook('b2x-pretransactionclose', throw=True, source=source,
811 835 url=url, pending=p, **tr.hookargs)
812 836 tr.close()
813 837 repo.hook('b2x-transactionclose', source=source, url=url,
814 838 **tr.hookargs)
815 839 except Exception, exc:
816 840 exc.duringunbundle2 = True
817 841 raise
818 842 else:
819 843 r = changegroup.addchangegroup(repo, cg, source, url)
820 844 finally:
821 845 if tr is not None:
822 846 tr.release()
823 847 lock.release()
824 848 return r
@@ -1,1107 +1,1108
1 1
2 2 Create an extension to test bundle2 API
3 3
4 4 $ cat > bundle2.py << EOF
5 5 > """A small extension to test bundle2 implementation
6 6 >
7 7 > Current bundle2 implementation is far too limited to be used in any core
8 8 > code. We still need to be able to test it while it grow up.
9 9 > """
10 10 >
11 11 > import sys, os
12 12 > from mercurial import cmdutil
13 13 > from mercurial import util
14 14 > from mercurial import bundle2
15 15 > from mercurial import scmutil
16 16 > from mercurial import discovery
17 17 > from mercurial import changegroup
18 18 > from mercurial import error
19 19 >
20 20 > try:
21 21 > import msvcrt
22 22 > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
23 23 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
24 24 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
25 25 > except ImportError:
26 26 > pass
27 27 >
28 28 > cmdtable = {}
29 29 > command = cmdutil.command(cmdtable)
30 30 >
31 31 > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
32 32 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
33 33 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
34 34 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
35 35 >
36 36 > @bundle2.parthandler('test:song')
37 37 > def songhandler(op, part):
38 38 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
39 39 > op.ui.write('The choir starts singing:\n')
40 40 > verses = 0
41 41 > for line in part.read().split('\n'):
42 42 > op.ui.write(' %s\n' % line)
43 43 > verses += 1
44 44 > op.records.add('song', {'verses': verses})
45 45 >
46 46 > @bundle2.parthandler('test:ping')
47 47 > def pinghandler(op, part):
48 48 > op.ui.write('received ping request (id %i)\n' % part.id)
49 49 > if op.reply is not None and 'ping-pong' in op.reply.capabilities:
50 50 > op.ui.write_err('replying to ping request (id %i)\n' % part.id)
51 51 > op.reply.newpart('test:pong', [('in-reply-to', str(part.id))])
52 52 >
53 53 > @bundle2.parthandler('test:debugreply')
54 54 > def debugreply(op, part):
55 55 > """print data about the capacity of the bundle reply"""
56 56 > if op.reply is None:
57 57 > op.ui.write('debugreply: no reply\n')
58 58 > else:
59 59 > op.ui.write('debugreply: capabilities:\n')
60 60 > for cap in sorted(op.reply.capabilities):
61 61 > op.ui.write('debugreply: %r\n' % cap)
62 62 > for val in op.reply.capabilities[cap]:
63 63 > op.ui.write('debugreply: %r\n' % val)
64 64 >
65 65 > @command('bundle2',
66 66 > [('', 'param', [], 'stream level parameter'),
67 67 > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
68 68 > ('', 'unknownparams', False, 'include an unknown part parameters in the bundle'),
69 69 > ('', 'parts', False, 'include some arbitrary parts to the bundle'),
70 70 > ('', 'reply', False, 'produce a reply bundle'),
71 71 > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
72 72 > ('r', 'rev', [], 'includes those changeset in the bundle'),],
73 73 > '[OUTPUTFILE]')
74 74 > def cmdbundle2(ui, repo, path=None, **opts):
75 75 > """write a bundle2 container on standard ouput"""
76 76 > bundler = bundle2.bundle20(ui)
77 77 > for p in opts['param']:
78 78 > p = p.split('=', 1)
79 79 > try:
80 80 > bundler.addparam(*p)
81 81 > except ValueError, exc:
82 82 > raise util.Abort('%s' % exc)
83 83 >
84 84 > if opts['reply']:
85 85 > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
86 86 > bundler.newpart('b2x:replycaps', data=capsstring)
87 87 >
88 88 > if opts['pushrace']:
89 89 > # also serve to test the assignement of data outside of init
90 90 > part = bundler.newpart('b2x:check:heads')
91 91 > part.data = '01234567890123456789'
92 92 >
93 93 > revs = opts['rev']
94 94 > if 'rev' in opts:
95 95 > revs = scmutil.revrange(repo, opts['rev'])
96 96 > if revs:
97 97 > # very crude version of a changegroup part creation
98 98 > bundled = repo.revs('%ld::%ld', revs, revs)
99 99 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
100 100 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
101 101 > outgoing = discovery.outgoing(repo.changelog, headcommon, headmissing)
102 102 > cg = changegroup.getlocalbundle(repo, 'test:bundle2', outgoing, None)
103 103 > bundler.newpart('b2x:changegroup', data=cg.getchunks())
104 104 >
105 105 > if opts['parts']:
106 106 > bundler.newpart('test:empty')
107 107 > # add a second one to make sure we handle multiple parts
108 108 > bundler.newpart('test:empty')
109 109 > bundler.newpart('test:song', data=ELEPHANTSSONG)
110 110 > bundler.newpart('test:debugreply')
111 111 > mathpart = bundler.newpart('test:math')
112 112 > mathpart.addparam('pi', '3.14')
113 113 > mathpart.addparam('e', '2.72')
114 114 > mathpart.addparam('cooking', 'raw', mandatory=False)
115 115 > mathpart.data = '42'
116 116 > # advisory known part with unknown mandatory param
117 117 > bundler.newpart('test:song', [('randomparam','')])
118 118 > if opts['unknown']:
119 119 > bundler.newpart('test:UNKNOWN', data='some random content')
120 120 > if opts['unknownparams']:
121 121 > bundler.newpart('test:SONG', [('randomparams', '')])
122 122 > if opts['parts']:
123 123 > bundler.newpart('test:ping')
124 124 >
125 125 > if path is None:
126 126 > file = sys.stdout
127 127 > else:
128 128 > file = open(path, 'w')
129 129 >
130 130 > for chunk in bundler.getchunks():
131 131 > file.write(chunk)
132 132 >
133 133 > @command('unbundle2', [], '')
134 134 > def cmdunbundle2(ui, repo, replypath=None):
135 135 > """process a bundle2 stream from stdin on the current repo"""
136 136 > try:
137 137 > tr = None
138 138 > lock = repo.lock()
139 139 > tr = repo.transaction('processbundle')
140 140 > try:
141 141 > unbundler = bundle2.unbundle20(ui, sys.stdin)
142 142 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
143 143 > tr.close()
144 144 > except error.BundleValueError, exc:
145 145 > raise util.Abort('missing support for %s' % exc)
146 146 > except error.PushRaced, exc:
147 147 > raise util.Abort('push race: %s' % exc)
148 148 > finally:
149 149 > if tr is not None:
150 150 > tr.release()
151 151 > lock.release()
152 152 > remains = sys.stdin.read()
153 153 > ui.write('%i unread bytes\n' % len(remains))
154 154 > if op.records['song']:
155 155 > totalverses = sum(r['verses'] for r in op.records['song'])
156 156 > ui.write('%i total verses sung\n' % totalverses)
157 157 > for rec in op.records['changegroup']:
158 158 > ui.write('addchangegroup return: %i\n' % rec['return'])
159 159 > if op.reply is not None and replypath is not None:
160 160 > file = open(replypath, 'w')
161 161 > for chunk in op.reply.getchunks():
162 162 > file.write(chunk)
163 163 >
164 164 > @command('statbundle2', [], '')
165 165 > def cmdstatbundle2(ui, repo):
166 166 > """print statistic on the bundle2 container read from stdin"""
167 167 > unbundler = bundle2.unbundle20(ui, sys.stdin)
168 168 > try:
169 169 > params = unbundler.params
170 170 > except error.BundleValueError, exc:
171 171 > raise util.Abort('unknown parameters: %s' % exc)
172 172 > ui.write('options count: %i\n' % len(params))
173 173 > for key in sorted(params):
174 174 > ui.write('- %s\n' % key)
175 175 > value = params[key]
176 176 > if value is not None:
177 177 > ui.write(' %s\n' % value)
178 178 > count = 0
179 179 > for p in unbundler.iterparts():
180 180 > count += 1
181 181 > ui.write(' :%s:\n' % p.type)
182 182 > ui.write(' mandatory: %i\n' % len(p.mandatoryparams))
183 183 > ui.write(' advisory: %i\n' % len(p.advisoryparams))
184 184 > ui.write(' payload: %i bytes\n' % len(p.read()))
185 185 > ui.write('parts count: %i\n' % count)
186 186 > EOF
187 187 $ cat >> $HGRCPATH << EOF
188 188 > [extensions]
189 189 > bundle2=$TESTTMP/bundle2.py
190 190 > [experimental]
191 191 > bundle2-exp=True
192 192 > [ui]
193 193 > ssh=python "$TESTDIR/dummyssh"
194 194 > logtemplate={rev}:{node|short} {phase} {author} {desc|firstline}
195 195 > [web]
196 196 > push_ssl = false
197 197 > allow_push = *
198 198 > [phases]
199 199 > publish=False
200 200 > EOF
201 201
202 202 The extension requires a repo (currently unused)
203 203
204 204 $ hg init main
205 205 $ cd main
206 206 $ touch a
207 207 $ hg add a
208 208 $ hg commit -m 'a'
209 209
210 210
211 211 Empty bundle
212 212 =================
213 213
214 214 - no option
215 215 - no parts
216 216
217 217 Test bundling
218 218
219 219 $ hg bundle2
220 220 HG2X\x00\x00\x00\x00 (no-eol) (esc)
221 221
222 222 Test unbundling
223 223
224 224 $ hg bundle2 | hg statbundle2
225 225 options count: 0
226 226 parts count: 0
227 227
228 228 Test old style bundle are detected and refused
229 229
230 230 $ hg bundle --all ../bundle.hg
231 231 1 changesets found
232 232 $ hg statbundle2 < ../bundle.hg
233 233 abort: unknown bundle version 10
234 234 [255]
235 235
236 236 Test parameters
237 237 =================
238 238
239 239 - some options
240 240 - no parts
241 241
242 242 advisory parameters, no value
243 243 -------------------------------
244 244
245 245 Simplest possible parameters form
246 246
247 247 Test generation simple option
248 248
249 249 $ hg bundle2 --param 'caution'
250 250 HG2X\x00\x07caution\x00\x00 (no-eol) (esc)
251 251
252 252 Test unbundling
253 253
254 254 $ hg bundle2 --param 'caution' | hg statbundle2
255 255 options count: 1
256 256 - caution
257 257 parts count: 0
258 258
259 259 Test generation multiple option
260 260
261 261 $ hg bundle2 --param 'caution' --param 'meal'
262 262 HG2X\x00\x0ccaution meal\x00\x00 (no-eol) (esc)
263 263
264 264 Test unbundling
265 265
266 266 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
267 267 options count: 2
268 268 - caution
269 269 - meal
270 270 parts count: 0
271 271
272 272 advisory parameters, with value
273 273 -------------------------------
274 274
275 275 Test generation
276 276
277 277 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants'
278 278 HG2X\x00\x1ccaution meal=vegan elephants\x00\x00 (no-eol) (esc)
279 279
280 280 Test unbundling
281 281
282 282 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
283 283 options count: 3
284 284 - caution
285 285 - elephants
286 286 - meal
287 287 vegan
288 288 parts count: 0
289 289
290 290 parameter with special char in value
291 291 ---------------------------------------------------
292 292
293 293 Test generation
294 294
295 295 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple
296 296 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
297 297
298 298 Test unbundling
299 299
300 300 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
301 301 options count: 2
302 302 - e|! 7/
303 303 babar%#==tutu
304 304 - simple
305 305 parts count: 0
306 306
307 307 Test unknown mandatory option
308 308 ---------------------------------------------------
309 309
310 310 $ hg bundle2 --param 'Gravity' | hg statbundle2
311 311 abort: unknown parameters: Stream Parameter - Gravity
312 312 [255]
313 313
314 314 Test debug output
315 315 ---------------------------------------------------
316 316
317 317 bundling debug
318 318
319 319 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2
320 320 start emission of HG2X stream
321 321 bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
322 322 start of parts
323 323 end of bundle
324 324
325 325 file content is ok
326 326
327 327 $ cat ../out.hg2
328 328 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
329 329
330 330 unbundling debug
331 331
332 332 $ hg statbundle2 --debug < ../out.hg2
333 333 start processing of HG2X stream
334 334 reading bundle2 stream parameters
335 335 ignoring unknown parameter 'e|! 7/'
336 336 ignoring unknown parameter 'simple'
337 337 options count: 2
338 338 - e|! 7/
339 339 babar%#==tutu
340 340 - simple
341 341 start extraction of bundle2 parts
342 342 part header size: 0
343 343 end of bundle2 stream
344 344 parts count: 0
345 345
346 346
347 347 Test buggy input
348 348 ---------------------------------------------------
349 349
350 350 empty parameter name
351 351
352 352 $ hg bundle2 --param '' --quiet
353 353 abort: empty parameter name
354 354 [255]
355 355
356 356 bad parameter name
357 357
358 358 $ hg bundle2 --param 42babar
359 359 abort: non letter first character: '42babar'
360 360 [255]
361 361
362 362
363 363 Test part
364 364 =================
365 365
366 366 $ hg bundle2 --parts ../parts.hg2 --debug
367 367 start emission of HG2X stream
368 368 bundle parameter:
369 369 start of parts
370 370 bundle part: "test:empty"
371 371 bundle part: "test:empty"
372 372 bundle part: "test:song"
373 373 bundle part: "test:debugreply"
374 374 bundle part: "test:math"
375 375 bundle part: "test:song"
376 376 bundle part: "test:ping"
377 377 end of bundle
378 378
379 379 $ cat ../parts.hg2
380 380 HG2X\x00\x00\x00\x11 (esc)
381 381 test:empty\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
382 382 test:empty\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x10 test:song\x00\x00\x00\x02\x00\x00\x00\x00\x00\xb2Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko (esc)
383 383 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
384 384 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.\x00\x00\x00\x00\x00\x16\x0ftest:debugreply\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00+ test:math\x00\x00\x00\x04\x02\x01\x02\x04\x01\x04\x07\x03pi3.14e2.72cookingraw\x00\x00\x00\x0242\x00\x00\x00\x00\x00\x1d test:song\x00\x00\x00\x05\x01\x00\x0b\x00randomparam\x00\x00\x00\x00\x00\x10 test:ping\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
385 385
386 386
387 387 $ hg statbundle2 < ../parts.hg2
388 388 options count: 0
389 389 :test:empty:
390 390 mandatory: 0
391 391 advisory: 0
392 392 payload: 0 bytes
393 393 :test:empty:
394 394 mandatory: 0
395 395 advisory: 0
396 396 payload: 0 bytes
397 397 :test:song:
398 398 mandatory: 0
399 399 advisory: 0
400 400 payload: 178 bytes
401 401 :test:debugreply:
402 402 mandatory: 0
403 403 advisory: 0
404 404 payload: 0 bytes
405 405 :test:math:
406 406 mandatory: 2
407 407 advisory: 1
408 408 payload: 2 bytes
409 409 :test:song:
410 410 mandatory: 1
411 411 advisory: 0
412 412 payload: 0 bytes
413 413 :test:ping:
414 414 mandatory: 0
415 415 advisory: 0
416 416 payload: 0 bytes
417 417 parts count: 7
418 418
419 419 $ hg statbundle2 --debug < ../parts.hg2
420 420 start processing of HG2X stream
421 421 reading bundle2 stream parameters
422 422 options count: 0
423 423 start extraction of bundle2 parts
424 424 part header size: 17
425 425 part type: "test:empty"
426 426 part id: "0"
427 427 part parameters: 0
428 428 :test:empty:
429 429 mandatory: 0
430 430 advisory: 0
431 431 payload chunk size: 0
432 432 payload: 0 bytes
433 433 part header size: 17
434 434 part type: "test:empty"
435 435 part id: "1"
436 436 part parameters: 0
437 437 :test:empty:
438 438 mandatory: 0
439 439 advisory: 0
440 440 payload chunk size: 0
441 441 payload: 0 bytes
442 442 part header size: 16
443 443 part type: "test:song"
444 444 part id: "2"
445 445 part parameters: 0
446 446 :test:song:
447 447 mandatory: 0
448 448 advisory: 0
449 449 payload chunk size: 178
450 450 payload chunk size: 0
451 451 payload: 178 bytes
452 452 part header size: 22
453 453 part type: "test:debugreply"
454 454 part id: "3"
455 455 part parameters: 0
456 456 :test:debugreply:
457 457 mandatory: 0
458 458 advisory: 0
459 459 payload chunk size: 0
460 460 payload: 0 bytes
461 461 part header size: 43
462 462 part type: "test:math"
463 463 part id: "4"
464 464 part parameters: 3
465 465 :test:math:
466 466 mandatory: 2
467 467 advisory: 1
468 468 payload chunk size: 2
469 469 payload chunk size: 0
470 470 payload: 2 bytes
471 471 part header size: 29
472 472 part type: "test:song"
473 473 part id: "5"
474 474 part parameters: 1
475 475 :test:song:
476 476 mandatory: 1
477 477 advisory: 0
478 478 payload chunk size: 0
479 479 payload: 0 bytes
480 480 part header size: 16
481 481 part type: "test:ping"
482 482 part id: "6"
483 483 part parameters: 0
484 484 :test:ping:
485 485 mandatory: 0
486 486 advisory: 0
487 487 payload chunk size: 0
488 488 payload: 0 bytes
489 489 part header size: 0
490 490 end of bundle2 stream
491 491 parts count: 7
492 492
493 493 Test actual unbundling of test part
494 494 =======================================
495 495
496 496 Process the bundle
497 497
498 498 $ hg unbundle2 --debug < ../parts.hg2
499 499 start processing of HG2X stream
500 500 reading bundle2 stream parameters
501 501 start extraction of bundle2 parts
502 502 part header size: 17
503 503 part type: "test:empty"
504 504 part id: "0"
505 505 part parameters: 0
506 506 ignoring unsupported advisory part test:empty
507 507 payload chunk size: 0
508 508 part header size: 17
509 509 part type: "test:empty"
510 510 part id: "1"
511 511 part parameters: 0
512 512 ignoring unsupported advisory part test:empty
513 513 payload chunk size: 0
514 514 part header size: 16
515 515 part type: "test:song"
516 516 part id: "2"
517 517 part parameters: 0
518 518 found a handler for part 'test:song'
519 519 The choir starts singing:
520 520 payload chunk size: 178
521 521 payload chunk size: 0
522 522 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
523 523 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
524 524 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
525 525 part header size: 22
526 526 part type: "test:debugreply"
527 527 part id: "3"
528 528 part parameters: 0
529 529 found a handler for part 'test:debugreply'
530 530 debugreply: no reply
531 531 payload chunk size: 0
532 532 part header size: 43
533 533 part type: "test:math"
534 534 part id: "4"
535 535 part parameters: 3
536 536 ignoring unsupported advisory part test:math
537 537 payload chunk size: 2
538 538 payload chunk size: 0
539 539 part header size: 29
540 540 part type: "test:song"
541 541 part id: "5"
542 542 part parameters: 1
543 543 found a handler for part 'test:song'
544 544 ignoring unsupported advisory part test:song - randomparam
545 545 payload chunk size: 0
546 546 part header size: 16
547 547 part type: "test:ping"
548 548 part id: "6"
549 549 part parameters: 0
550 550 found a handler for part 'test:ping'
551 551 received ping request (id 6)
552 552 payload chunk size: 0
553 553 part header size: 0
554 554 end of bundle2 stream
555 555 0 unread bytes
556 556 3 total verses sung
557 557
558 558 Unbundle with an unknown mandatory part
559 559 (should abort)
560 560
561 561 $ hg bundle2 --parts --unknown ../unknown.hg2
562 562
563 563 $ hg unbundle2 < ../unknown.hg2
564 564 The choir starts singing:
565 565 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
566 566 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
567 567 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
568 568 debugreply: no reply
569 569 0 unread bytes
570 570 abort: missing support for test:unknown
571 571 [255]
572 572
573 573 Unbundle with an unknown mandatory part parameters
574 574 (should abort)
575 575
576 576 $ hg bundle2 --unknownparams ../unknown.hg2
577 577
578 578 $ hg unbundle2 < ../unknown.hg2
579 579 0 unread bytes
580 580 abort: missing support for test:song - randomparams
581 581 [255]
582 582
583 583 unbundle with a reply
584 584
585 585 $ hg bundle2 --parts --reply ../parts-reply.hg2
586 586 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
587 587 0 unread bytes
588 588 3 total verses sung
589 589
590 590 The reply is a bundle
591 591
592 592 $ cat ../reply.hg2
593 593 HG2X\x00\x00\x00\x1f (esc)
594 594 b2x:output\x00\x00\x00\x00\x00\x01\x0b\x01in-reply-to3\x00\x00\x00\xd9The choir starts singing: (esc)
595 595 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
596 596 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
597 597 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
598 598 \x00\x00\x00\x00\x00\x1f (esc)
599 599 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to4\x00\x00\x00\xc9debugreply: capabilities: (esc)
600 600 debugreply: 'city=!'
601 601 debugreply: 'celeste,ville'
602 602 debugreply: 'elephants'
603 603 debugreply: 'babar'
604 604 debugreply: 'celeste'
605 605 debugreply: 'ping-pong'
606 606 \x00\x00\x00\x00\x00\x1e test:pong\x00\x00\x00\x02\x01\x00\x0b\x01in-reply-to7\x00\x00\x00\x00\x00\x1f (esc)
607 607 b2x:output\x00\x00\x00\x03\x00\x01\x0b\x01in-reply-to7\x00\x00\x00=received ping request (id 7) (esc)
608 608 replying to ping request (id 7)
609 609 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
610 610
611 611 The reply is valid
612 612
613 613 $ hg statbundle2 < ../reply.hg2
614 614 options count: 0
615 615 :b2x:output:
616 616 mandatory: 0
617 617 advisory: 1
618 618 payload: 217 bytes
619 619 :b2x:output:
620 620 mandatory: 0
621 621 advisory: 1
622 622 payload: 201 bytes
623 623 :test:pong:
624 624 mandatory: 1
625 625 advisory: 0
626 626 payload: 0 bytes
627 627 :b2x:output:
628 628 mandatory: 0
629 629 advisory: 1
630 630 payload: 61 bytes
631 631 parts count: 4
632 632
633 633 Unbundle the reply to get the output:
634 634
635 635 $ hg unbundle2 < ../reply.hg2
636 636 remote: The choir starts singing:
637 637 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
638 638 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
639 639 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
640 640 remote: debugreply: capabilities:
641 641 remote: debugreply: 'city=!'
642 642 remote: debugreply: 'celeste,ville'
643 643 remote: debugreply: 'elephants'
644 644 remote: debugreply: 'babar'
645 645 remote: debugreply: 'celeste'
646 646 remote: debugreply: 'ping-pong'
647 647 remote: received ping request (id 7)
648 648 remote: replying to ping request (id 7)
649 649 0 unread bytes
650 650
651 651 Test push race detection
652 652
653 653 $ hg bundle2 --pushrace ../part-race.hg2
654 654
655 655 $ hg unbundle2 < ../part-race.hg2
656 656 0 unread bytes
657 657 abort: push race: repository changed while pushing - please try again
658 658 [255]
659 659
660 660 Support for changegroup
661 661 ===================================
662 662
663 663 $ hg unbundle $TESTDIR/bundles/rebase.hg
664 664 adding changesets
665 665 adding manifests
666 666 adding file changes
667 667 added 8 changesets with 7 changes to 7 files (+3 heads)
668 668 (run 'hg heads' to see heads, 'hg merge' to merge)
669 669
670 670 $ hg log -G
671 671 o 8:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
672 672 |
673 673 | o 7:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
674 674 |/|
675 675 o | 6:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
676 676 | |
677 677 | o 5:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
678 678 |/
679 679 | o 4:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
680 680 | |
681 681 | o 3:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
682 682 | |
683 683 | o 2:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
684 684 |/
685 685 o 1:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
686 686
687 687 @ 0:3903775176ed draft test a
688 688
689 689
690 690 $ hg bundle2 --debug --rev '8+7+5+4' ../rev.hg2
691 691 4 changesets found
692 692 list of changesets:
693 693 32af7686d403cf45b5d95f2d70cebea587ac806a
694 694 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
695 695 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
696 696 02de42196ebee42ef284b6780a87cdc96e8eaab6
697 697 start emission of HG2X stream
698 698 bundle parameter:
699 699 start of parts
700 700 bundle part: "b2x:changegroup"
701 701 bundling: 1/4 changesets (25.00%)
702 702 bundling: 2/4 changesets (50.00%)
703 703 bundling: 3/4 changesets (75.00%)
704 704 bundling: 4/4 changesets (100.00%)
705 705 bundling: 1/4 manifests (25.00%)
706 706 bundling: 2/4 manifests (50.00%)
707 707 bundling: 3/4 manifests (75.00%)
708 708 bundling: 4/4 manifests (100.00%)
709 709 bundling: D 1/3 files (33.33%)
710 710 bundling: E 2/3 files (66.67%)
711 711 bundling: H 3/3 files (100.00%)
712 712 end of bundle
713 713
714 714 $ cat ../rev.hg2
715 715 HG2X\x00\x00\x00\x16\x0fb2x:changegroup\x00\x00\x00\x00\x00\x00\x00\x00\x06\x13\x00\x00\x00\xa42\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j_\xdd\xd9\x89W\xc8\xa5JMCm\xfe\x1d\xa9\xd8\x7f!\xa1\xb9{\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c (esc)
716 716 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02D (esc)
717 717 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01D\x00\x00\x00\xa4\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xcd\x01\x0b\x8c\xd9\x98\xf3\x98\x1aZ\x81\x15\xf9O\x8d\xa4\xabP`\x89\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)4dece9c826f69490507b98c6383a3009b295837d (esc)
718 718 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02E (esc)
719 719 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01E\x00\x00\x00\xa2\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)365b93d57fdf4814e2b5911d6bacff2b12014441 (esc)
720 720 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x00\x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01G\x00\x00\x00\xa4\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
721 721 \x87\xcd\xc9n\x8e\xaa\xb6$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
722 722 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)8bee48edc7318541fc0013ee41b089276a8c24bf (esc)
723 723 \x00\x00\x00f\x00\x00\x00f\x00\x00\x00\x02H (esc)
724 724 \x00\x00\x00g\x00\x00\x00h\x00\x00\x00\x01H\x00\x00\x00\x00\x00\x00\x00\x8bn\x1fLG\xec\xb53\xff\xd0\xc8\xe5,\xdc\x88\xaf\xb6\xcd9\xe2\x0cf\xa5\xa0\x18\x17\xfd\xf5#\x9c'8\x02\xb5\xb7a\x8d\x05\x1c\x89\xe4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+D\x00c3f1ca2924c16a19b0656a84900e504e5b0aec2d (esc)
725 725 \x00\x00\x00\x8bM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\x00}\x8c\x9d\x88\x84\x13%\xf5\xc6\xb0cq\xb3[N\x8a+\x1a\x83\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00+\x00\x00\x00\xac\x00\x00\x00+E\x009c6fd0350a6c0d0c49d4a9c5017cf07043f54e58 (esc)
726 726 \x00\x00\x00\x8b6[\x93\xd5\x7f\xdfH\x14\xe2\xb5\x91\x1dk\xac\xff+\x12\x01DA(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xceM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00V\x00\x00\x00V\x00\x00\x00+F\x0022bfcfd62a21a3287edbd4d656218d0f525ed76a (esc)
727 727 \x00\x00\x00\x97\x8b\xeeH\xed\xc71\x85A\xfc\x00\x13\xeeA\xb0\x89'j\x8c$\xbf(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
728 728 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00+\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+H\x008500189e74a9e0475e822093bc7db0d631aeb0b4 (esc)
729 729 \x00\x00\x00\x00\x00\x00\x00\x05D\x00\x00\x00b\xc3\xf1\xca)$\xc1j\x19\xb0ej\x84\x90\x0ePN[ (esc)
730 730 \xec-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02D (esc)
731 731 \x00\x00\x00\x00\x00\x00\x00\x05E\x00\x00\x00b\x9co\xd05 (esc)
732 732 l\r (no-eol) (esc)
733 733 \x0cI\xd4\xa9\xc5\x01|\xf0pC\xf5NX\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02E (esc)
734 734 \x00\x00\x00\x00\x00\x00\x00\x05H\x00\x00\x00b\x85\x00\x18\x9et\xa9\xe0G^\x82 \x93\xbc}\xb0\xd61\xae\xb0\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
735 735 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02H (esc)
736 736 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
737 737
738 738 $ hg unbundle2 < ../rev.hg2
739 739 adding changesets
740 740 adding manifests
741 741 adding file changes
742 742 added 0 changesets with 0 changes to 3 files
743 743 0 unread bytes
744 744 addchangegroup return: 1
745 745
746 746 with reply
747 747
748 748 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
749 749 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
750 750 0 unread bytes
751 751 addchangegroup return: 1
752 752
753 753 $ cat ../rev-reply.hg2
754 754 HG2X\x00\x00\x003\x15b2x:reply:changegroup\x00\x00\x00\x00\x00\x02\x0b\x01\x06\x01in-reply-to1return1\x00\x00\x00\x00\x00\x1f (esc)
755 755 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to1\x00\x00\x00dadding changesets (esc)
756 756 adding manifests
757 757 adding file changes
758 758 added 0 changesets with 0 changes to 3 files
759 759 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
760 760
761 761 Real world exchange
762 762 =====================
763 763
764 764
765 765 clone --pull
766 766
767 767 $ cd ..
768 768 $ hg -R main phase --public cd010b8cd998
769 769 $ hg clone main other --pull --rev 9520eea781bc
770 770 adding changesets
771 771 adding manifests
772 772 adding file changes
773 773 added 2 changesets with 2 changes to 2 files
774 774 updating to branch default
775 775 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
776 776 $ hg -R other log -G
777 777 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
778 778 |
779 779 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
780 780
781 781
782 782 pull
783 783
784 784 $ hg -R main phase --public 9520eea781bc
785 785 $ hg -R other pull -r 24b6387c8c8c
786 786 pulling from $TESTTMP/main (glob)
787 787 searching for changes
788 788 adding changesets
789 789 adding manifests
790 790 adding file changes
791 791 added 1 changesets with 1 changes to 1 files (+1 heads)
792 792 (run 'hg heads' to see heads, 'hg merge' to merge)
793 793 $ hg -R other log -G
794 794 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
795 795 |
796 796 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
797 797 |/
798 798 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
799 799
800 800
801 801 pull empty (with phase movement)
802 802
803 803 $ hg -R main phase --public 24b6387c8c8c
804 804 $ hg -R other pull -r 24b6387c8c8c
805 805 pulling from $TESTTMP/main (glob)
806 806 no changes found
807 807 $ hg -R other log -G
808 808 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
809 809 |
810 810 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
811 811 |/
812 812 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
813 813
814 814 pull empty
815 815
816 816 $ hg -R other pull -r 24b6387c8c8c
817 817 pulling from $TESTTMP/main (glob)
818 818 no changes found
819 819 $ hg -R other log -G
820 820 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
821 821 |
822 822 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
823 823 |/
824 824 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
825 825
826 826
827 827 push
828 828
829 829 $ hg -R main phase --public eea13746799a
830 830 $ hg -R main push other --rev eea13746799a
831 831 pushing to other
832 832 searching for changes
833 833 remote: adding changesets
834 834 remote: adding manifests
835 835 remote: adding file changes
836 836 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
837 837 $ hg -R other log -G
838 838 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
839 839 |\
840 840 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
841 841 | |
842 842 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
843 843 |/
844 844 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
845 845
846 846
847 847 pull over ssh
848 848
849 849 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --traceback
850 850 pulling from ssh://user@dummy/main
851 851 searching for changes
852 852 adding changesets
853 853 adding manifests
854 854 adding file changes
855 855 added 1 changesets with 1 changes to 1 files (+1 heads)
856 856 (run 'hg heads' to see heads, 'hg merge' to merge)
857 857
858 858 pull over http
859 859
860 860 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
861 861 $ cat main.pid >> $DAEMON_PIDS
862 862
863 863 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16
864 864 pulling from http://localhost:$HGPORT/
865 865 searching for changes
866 866 adding changesets
867 867 adding manifests
868 868 adding file changes
869 869 added 1 changesets with 1 changes to 1 files (+1 heads)
870 870 (run 'hg heads .' to see heads, 'hg merge' to merge)
871 871 $ cat main-error.log
872 872
873 873 push over ssh
874 874
875 875 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8
876 876 pushing to ssh://user@dummy/other
877 877 searching for changes
878 878 remote: adding changesets
879 879 remote: adding manifests
880 880 remote: adding file changes
881 881 remote: added 1 changesets with 1 changes to 1 files
882 882 $ hg -R other log -G
883 883 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
884 884 |
885 885 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
886 886 |
887 887 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
888 888 | |
889 889 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
890 890 | |/|
891 891 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
892 892 |/ /
893 893 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
894 894 |/
895 895 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
896 896
897 897
898 898 push over http
899 899
900 900 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
901 901 $ cat other.pid >> $DAEMON_PIDS
902 902
903 903 $ hg -R main phase --public 32af7686d403
904 904 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403
905 905 pushing to http://localhost:$HGPORT2/
906 906 searching for changes
907 907 remote: adding changesets
908 908 remote: adding manifests
909 909 remote: adding file changes
910 910 remote: added 1 changesets with 1 changes to 1 files
911 911 $ cat other-error.log
912 912
913 913 Check final content.
914 914
915 915 $ hg -R other log -G
916 916 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
917 917 |
918 918 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
919 919 |
920 920 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
921 921 |
922 922 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
923 923 | |
924 924 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
925 925 | |/|
926 926 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
927 927 |/ /
928 928 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
929 929 |/
930 930 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
931 931
932 932
933 933 Error Handling
934 934 ==============
935 935
936 936 Check that errors are properly returned to the client during push.
937 937
938 938 Setting up
939 939
940 940 $ cat > failpush.py << EOF
941 941 > """A small extension that makes push fails when using bundle2
942 942 >
943 943 > used to test error handling in bundle2
944 944 > """
945 945 >
946 946 > from mercurial import util
947 947 > from mercurial import bundle2
948 948 > from mercurial import exchange
949 949 > from mercurial import extensions
950 950 >
951 951 > def _pushbundle2failpart(pushop, bundler):
952 952 > reason = pushop.ui.config('failpush', 'reason', None)
953 953 > part = None
954 954 > if reason == 'abort':
955 955 > bundler.newpart('test:abort')
956 956 > if reason == 'unknown':
957 957 > bundler.newpart('TEST:UNKNOWN')
958 958 > if reason == 'race':
959 959 > # 20 Bytes of crap
960 960 > bundler.newpart('b2x:check:heads', data='01234567890123456789')
961 961 >
962 962 > @bundle2.parthandler("test:abort")
963 963 > def handleabort(op, part):
964 964 > raise util.Abort('Abandon ship!', hint="don't panic")
965 965 >
966 966 > def uisetup(ui):
967 > exchange.bundle2partsgenerators.insert(0, _pushbundle2failpart)
967 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
968 > exchange.b2partsgenorder.insert(0, 'failpart')
968 969 >
969 970 > EOF
970 971
971 972 $ cd main
972 973 $ hg up tip
973 974 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
974 975 $ echo 'I' > I
975 976 $ hg add I
976 977 $ hg ci -m 'I'
977 978 $ hg id
978 979 e7ec4e813ba6 tip
979 980 $ cd ..
980 981
981 982 $ cat << EOF >> $HGRCPATH
982 983 > [extensions]
983 984 > failpush=$TESTTMP/failpush.py
984 985 > EOF
985 986
986 987 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
987 988 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
988 989 $ cat other.pid >> $DAEMON_PIDS
989 990
990 991 Doing the actual push: Abort error
991 992
992 993 $ cat << EOF >> $HGRCPATH
993 994 > [failpush]
994 995 > reason = abort
995 996 > EOF
996 997
997 998 $ hg -R main push other -r e7ec4e813ba6
998 999 pushing to other
999 1000 searching for changes
1000 1001 abort: Abandon ship!
1001 1002 (don't panic)
1002 1003 [255]
1003 1004
1004 1005 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1005 1006 pushing to ssh://user@dummy/other
1006 1007 searching for changes
1007 1008 abort: Abandon ship!
1008 1009 (don't panic)
1009 1010 [255]
1010 1011
1011 1012 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1012 1013 pushing to http://localhost:$HGPORT2/
1013 1014 searching for changes
1014 1015 abort: Abandon ship!
1015 1016 (don't panic)
1016 1017 [255]
1017 1018
1018 1019
1019 1020 Doing the actual push: unknown mandatory parts
1020 1021
1021 1022 $ cat << EOF >> $HGRCPATH
1022 1023 > [failpush]
1023 1024 > reason = unknown
1024 1025 > EOF
1025 1026
1026 1027 $ hg -R main push other -r e7ec4e813ba6
1027 1028 pushing to other
1028 1029 searching for changes
1029 1030 abort: missing support for test:unknown
1030 1031 [255]
1031 1032
1032 1033 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1033 1034 pushing to ssh://user@dummy/other
1034 1035 searching for changes
1035 1036 abort: missing support for test:unknown
1036 1037 [255]
1037 1038
1038 1039 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1039 1040 pushing to http://localhost:$HGPORT2/
1040 1041 searching for changes
1041 1042 abort: missing support for test:unknown
1042 1043 [255]
1043 1044
1044 1045 Doing the actual push: race
1045 1046
1046 1047 $ cat << EOF >> $HGRCPATH
1047 1048 > [failpush]
1048 1049 > reason = race
1049 1050 > EOF
1050 1051
1051 1052 $ hg -R main push other -r e7ec4e813ba6
1052 1053 pushing to other
1053 1054 searching for changes
1054 1055 abort: push failed:
1055 1056 'repository changed while pushing - please try again'
1056 1057 [255]
1057 1058
1058 1059 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1059 1060 pushing to ssh://user@dummy/other
1060 1061 searching for changes
1061 1062 abort: push failed:
1062 1063 'repository changed while pushing - please try again'
1063 1064 [255]
1064 1065
1065 1066 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1066 1067 pushing to http://localhost:$HGPORT2/
1067 1068 searching for changes
1068 1069 abort: push failed:
1069 1070 'repository changed while pushing - please try again'
1070 1071 [255]
1071 1072
1072 1073 Doing the actual push: hook abort
1073 1074
1074 1075 $ cat << EOF >> $HGRCPATH
1075 1076 > [failpush]
1076 1077 > reason =
1077 1078 > [hooks]
1078 1079 > b2x-pretransactionclose.failpush = false
1079 1080 > EOF
1080 1081
1081 1082 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
1082 1083 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
1083 1084 $ cat other.pid >> $DAEMON_PIDS
1084 1085
1085 1086 $ hg -R main push other -r e7ec4e813ba6
1086 1087 pushing to other
1087 1088 searching for changes
1088 1089 transaction abort!
1089 1090 rollback completed
1090 1091 abort: b2x-pretransactionclose.failpush hook exited with status 1
1091 1092 [255]
1092 1093
1093 1094 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1094 1095 pushing to ssh://user@dummy/other
1095 1096 searching for changes
1096 1097 abort: b2x-pretransactionclose.failpush hook exited with status 1
1097 1098 remote: transaction abort!
1098 1099 remote: rollback completed
1099 1100 [255]
1100 1101
1101 1102 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1102 1103 pushing to http://localhost:$HGPORT2/
1103 1104 searching for changes
1104 1105 abort: b2x-pretransactionclose.failpush hook exited with status 1
1105 1106 [255]
1106 1107
1107 1108
General Comments 0
You need to be logged in to leave comments. Login now