##// END OF EJS Templates
bundle2: gracefully handle hook abort...
Pierre-Yves David -
r21187:bcfd44ab stable
parent child Browse files
Show More
@@ -1,757 +1,761 b''
1 1 # exchange.py - utility to exchange data between repos.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 from node import hex, nullid
10 10 import errno, urllib
11 11 import util, scmutil, changegroup, base85, error
12 12 import discovery, phases, obsolete, bookmarks, bundle2
13 13
14 14 def readbundle(ui, fh, fname, vfs=None):
15 15 header = changegroup.readexactly(fh, 4)
16 16
17 17 alg = None
18 18 if not fname:
19 19 fname = "stream"
20 20 if not header.startswith('HG') and header.startswith('\0'):
21 21 fh = changegroup.headerlessfixup(fh, header)
22 22 header = "HG10"
23 23 alg = 'UN'
24 24 elif vfs:
25 25 fname = vfs.join(fname)
26 26
27 27 magic, version = header[0:2], header[2:4]
28 28
29 29 if magic != 'HG':
30 30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 31 if version == '10':
32 32 if alg is None:
33 33 alg = changegroup.readexactly(fh, 2)
34 34 return changegroup.unbundle10(fh, alg)
35 35 elif version == '2X':
36 36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 37 else:
38 38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39 39
40 40
41 41 class pushoperation(object):
42 42 """A object that represent a single push operation
43 43
44 44 It purpose is to carry push related state and very common operation.
45 45
46 46 A new should be created at the beginning of each push and discarded
47 47 afterward.
48 48 """
49 49
50 50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
51 51 # repo we push from
52 52 self.repo = repo
53 53 self.ui = repo.ui
54 54 # repo we push to
55 55 self.remote = remote
56 56 # force option provided
57 57 self.force = force
58 58 # revs to be pushed (None is "all")
59 59 self.revs = revs
60 60 # allow push of new branch
61 61 self.newbranch = newbranch
62 62 # did a local lock get acquired?
63 63 self.locallocked = None
64 64 # Integer version of the push result
65 65 # - None means nothing to push
66 66 # - 0 means HTTP error
67 67 # - 1 means we pushed and remote head count is unchanged *or*
68 68 # we have outgoing changesets but refused to push
69 69 # - other values as described by addchangegroup()
70 70 self.ret = None
71 71 # discover.outgoing object (contains common and outgoing data)
72 72 self.outgoing = None
73 73 # all remote heads before the push
74 74 self.remoteheads = None
75 75 # testable as a boolean indicating if any nodes are missing locally.
76 76 self.incoming = None
77 77 # set of all heads common after changeset bundle push
78 78 self.commonheads = None
79 79
80 80 def push(repo, remote, force=False, revs=None, newbranch=False):
81 81 '''Push outgoing changesets (limited by revs) from a local
82 82 repository to remote. Return an integer:
83 83 - None means nothing to push
84 84 - 0 means HTTP error
85 85 - 1 means we pushed and remote head count is unchanged *or*
86 86 we have outgoing changesets but refused to push
87 87 - other values as described by addchangegroup()
88 88 '''
89 89 pushop = pushoperation(repo, remote, force, revs, newbranch)
90 90 if pushop.remote.local():
91 91 missing = (set(pushop.repo.requirements)
92 92 - pushop.remote.local().supported)
93 93 if missing:
94 94 msg = _("required features are not"
95 95 " supported in the destination:"
96 96 " %s") % (', '.join(sorted(missing)))
97 97 raise util.Abort(msg)
98 98
99 99 # there are two ways to push to remote repo:
100 100 #
101 101 # addchangegroup assumes local user can lock remote
102 102 # repo (local filesystem, old ssh servers).
103 103 #
104 104 # unbundle assumes local user cannot lock remote repo (new ssh
105 105 # servers, http servers).
106 106
107 107 if not pushop.remote.canpush():
108 108 raise util.Abort(_("destination does not support push"))
109 109 # get local lock as we might write phase data
110 110 locallock = None
111 111 try:
112 112 locallock = pushop.repo.lock()
113 113 pushop.locallocked = True
114 114 except IOError, err:
115 115 pushop.locallocked = False
116 116 if err.errno != errno.EACCES:
117 117 raise
118 118 # source repo cannot be locked.
119 119 # We do not abort the push, but just disable the local phase
120 120 # synchronisation.
121 121 msg = 'cannot lock source repository: %s\n' % err
122 122 pushop.ui.debug(msg)
123 123 try:
124 124 pushop.repo.checkpush(pushop)
125 125 lock = None
126 126 unbundle = pushop.remote.capable('unbundle')
127 127 if not unbundle:
128 128 lock = pushop.remote.lock()
129 129 try:
130 130 _pushdiscovery(pushop)
131 131 if _pushcheckoutgoing(pushop):
132 132 pushop.repo.prepushoutgoinghooks(pushop.repo,
133 133 pushop.remote,
134 134 pushop.outgoing)
135 135 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
136 136 False)
137 137 and pushop.remote.capable('bundle2-exp')):
138 138 _pushbundle2(pushop)
139 139 else:
140 140 _pushchangeset(pushop)
141 141 _pushcomputecommonheads(pushop)
142 142 _pushsyncphase(pushop)
143 143 _pushobsolete(pushop)
144 144 finally:
145 145 if lock is not None:
146 146 lock.release()
147 147 finally:
148 148 if locallock is not None:
149 149 locallock.release()
150 150
151 151 _pushbookmark(pushop)
152 152 return pushop.ret
153 153
154 154 def _pushdiscovery(pushop):
155 155 # discovery
156 156 unfi = pushop.repo.unfiltered()
157 157 fci = discovery.findcommonincoming
158 158 commoninc = fci(unfi, pushop.remote, force=pushop.force)
159 159 common, inc, remoteheads = commoninc
160 160 fco = discovery.findcommonoutgoing
161 161 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
162 162 commoninc=commoninc, force=pushop.force)
163 163 pushop.outgoing = outgoing
164 164 pushop.remoteheads = remoteheads
165 165 pushop.incoming = inc
166 166
167 167 def _pushcheckoutgoing(pushop):
168 168 outgoing = pushop.outgoing
169 169 unfi = pushop.repo.unfiltered()
170 170 if not outgoing.missing:
171 171 # nothing to push
172 172 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
173 173 return False
174 174 # something to push
175 175 if not pushop.force:
176 176 # if repo.obsstore == False --> no obsolete
177 177 # then, save the iteration
178 178 if unfi.obsstore:
179 179 # this message are here for 80 char limit reason
180 180 mso = _("push includes obsolete changeset: %s!")
181 181 mst = "push includes %s changeset: %s!"
182 182 # plain versions for i18n tool to detect them
183 183 _("push includes unstable changeset: %s!")
184 184 _("push includes bumped changeset: %s!")
185 185 _("push includes divergent changeset: %s!")
186 186 # If we are to push if there is at least one
187 187 # obsolete or unstable changeset in missing, at
188 188 # least one of the missinghead will be obsolete or
189 189 # unstable. So checking heads only is ok
190 190 for node in outgoing.missingheads:
191 191 ctx = unfi[node]
192 192 if ctx.obsolete():
193 193 raise util.Abort(mso % ctx)
194 194 elif ctx.troubled():
195 195 raise util.Abort(_(mst)
196 196 % (ctx.troubles()[0],
197 197 ctx))
198 198 newbm = pushop.ui.configlist('bookmarks', 'pushing')
199 199 discovery.checkheads(unfi, pushop.remote, outgoing,
200 200 pushop.remoteheads,
201 201 pushop.newbranch,
202 202 bool(pushop.incoming),
203 203 newbm)
204 204 return True
205 205
206 206 def _pushbundle2(pushop):
207 207 """push data to the remote using bundle2
208 208
209 209 The only currently supported type of data is changegroup but this will
210 210 evolve in the future."""
211 211 # Send known head to the server for race detection.
212 212 capsblob = urllib.unquote(pushop.remote.capable('bundle2-exp'))
213 213 caps = bundle2.decodecaps(capsblob)
214 214 bundler = bundle2.bundle20(pushop.ui, caps)
215 215 # create reply capability
216 216 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
217 217 bundler.addpart(bundle2.bundlepart('b2x:replycaps', data=capsblob))
218 218 if not pushop.force:
219 219 part = bundle2.bundlepart('B2X:CHECK:HEADS',
220 220 data=iter(pushop.remoteheads))
221 221 bundler.addpart(part)
222 222 extrainfo = _pushbundle2extraparts(pushop, bundler)
223 223 # add the changegroup bundle
224 224 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
225 225 cgpart = bundle2.bundlepart('B2X:CHANGEGROUP', data=cg.getchunks())
226 226 bundler.addpart(cgpart)
227 227 stream = util.chunkbuffer(bundler.getchunks())
228 228 try:
229 229 reply = pushop.remote.unbundle(stream, ['force'], 'push')
230 230 except bundle2.UnknownPartError, exc:
231 231 raise util.Abort('missing support for %s' % exc)
232 232 try:
233 233 op = bundle2.processbundle(pushop.repo, reply)
234 234 except bundle2.UnknownPartError, exc:
235 235 raise util.Abort('missing support for %s' % exc)
236 236 cgreplies = op.records.getreplies(cgpart.id)
237 237 assert len(cgreplies['changegroup']) == 1
238 238 pushop.ret = cgreplies['changegroup'][0]['return']
239 239 _pushbundle2extrareply(pushop, op, extrainfo)
240 240
241 241 def _pushbundle2extraparts(pushop, bundler):
242 242 """hook function to let extensions add parts
243 243
244 244 Return a dict to let extensions pass data to the reply processing.
245 245 """
246 246 return {}
247 247
248 248 def _pushbundle2extrareply(pushop, op, extrainfo):
249 249 """hook function to let extensions react to part replies
250 250
251 251 The dict from _pushbundle2extrareply is fed to this function.
252 252 """
253 253 pass
254 254
255 255 def _pushchangeset(pushop):
256 256 """Make the actual push of changeset bundle to remote repo"""
257 257 outgoing = pushop.outgoing
258 258 unbundle = pushop.remote.capable('unbundle')
259 259 # TODO: get bundlecaps from remote
260 260 bundlecaps = None
261 261 # create a changegroup from local
262 262 if pushop.revs is None and not (outgoing.excluded
263 263 or pushop.repo.changelog.filteredrevs):
264 264 # push everything,
265 265 # use the fast path, no race possible on push
266 266 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
267 267 cg = changegroup.getsubset(pushop.repo,
268 268 outgoing,
269 269 bundler,
270 270 'push',
271 271 fastpath=True)
272 272 else:
273 273 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
274 274 bundlecaps)
275 275
276 276 # apply changegroup to remote
277 277 if unbundle:
278 278 # local repo finds heads on server, finds out what
279 279 # revs it must push. once revs transferred, if server
280 280 # finds it has different heads (someone else won
281 281 # commit/push race), server aborts.
282 282 if pushop.force:
283 283 remoteheads = ['force']
284 284 else:
285 285 remoteheads = pushop.remoteheads
286 286 # ssh: return remote's addchangegroup()
287 287 # http: return remote's addchangegroup() or 0 for error
288 288 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
289 289 'push')
290 290 else:
291 291 # we return an integer indicating remote head count
292 292 # change
293 293 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
294 294
295 295 def _pushcomputecommonheads(pushop):
296 296 unfi = pushop.repo.unfiltered()
297 297 if pushop.ret:
298 298 # push succeed, synchronize target of the push
299 299 cheads = pushop.outgoing.missingheads
300 300 elif pushop.revs is None:
301 301 # All out push fails. synchronize all common
302 302 cheads = pushop.outgoing.commonheads
303 303 else:
304 304 # I want cheads = heads(::missingheads and ::commonheads)
305 305 # (missingheads is revs with secret changeset filtered out)
306 306 #
307 307 # This can be expressed as:
308 308 # cheads = ( (missingheads and ::commonheads)
309 309 # + (commonheads and ::missingheads))"
310 310 # )
311 311 #
312 312 # while trying to push we already computed the following:
313 313 # common = (::commonheads)
314 314 # missing = ((commonheads::missingheads) - commonheads)
315 315 #
316 316 # We can pick:
317 317 # * missingheads part of common (::commonheads)
318 318 common = set(pushop.outgoing.common)
319 319 nm = pushop.repo.changelog.nodemap
320 320 cheads = [node for node in pushop.revs if nm[node] in common]
321 321 # and
322 322 # * commonheads parents on missing
323 323 revset = unfi.set('%ln and parents(roots(%ln))',
324 324 pushop.outgoing.commonheads,
325 325 pushop.outgoing.missing)
326 326 cheads.extend(c.node() for c in revset)
327 327 pushop.commonheads = cheads
328 328
329 329 def _pushsyncphase(pushop):
330 330 """synchronise phase information locally and remotely"""
331 331 unfi = pushop.repo.unfiltered()
332 332 cheads = pushop.commonheads
333 333 if pushop.ret:
334 334 # push succeed, synchronize target of the push
335 335 cheads = pushop.outgoing.missingheads
336 336 elif pushop.revs is None:
337 337 # All out push fails. synchronize all common
338 338 cheads = pushop.outgoing.commonheads
339 339 else:
340 340 # I want cheads = heads(::missingheads and ::commonheads)
341 341 # (missingheads is revs with secret changeset filtered out)
342 342 #
343 343 # This can be expressed as:
344 344 # cheads = ( (missingheads and ::commonheads)
345 345 # + (commonheads and ::missingheads))"
346 346 # )
347 347 #
348 348 # while trying to push we already computed the following:
349 349 # common = (::commonheads)
350 350 # missing = ((commonheads::missingheads) - commonheads)
351 351 #
352 352 # We can pick:
353 353 # * missingheads part of common (::commonheads)
354 354 common = set(pushop.outgoing.common)
355 355 nm = pushop.repo.changelog.nodemap
356 356 cheads = [node for node in pushop.revs if nm[node] in common]
357 357 # and
358 358 # * commonheads parents on missing
359 359 revset = unfi.set('%ln and parents(roots(%ln))',
360 360 pushop.outgoing.commonheads,
361 361 pushop.outgoing.missing)
362 362 cheads.extend(c.node() for c in revset)
363 363 pushop.commonheads = cheads
364 364 # even when we don't push, exchanging phase data is useful
365 365 remotephases = pushop.remote.listkeys('phases')
366 366 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
367 367 and remotephases # server supports phases
368 368 and pushop.ret is None # nothing was pushed
369 369 and remotephases.get('publishing', False)):
370 370 # When:
371 371 # - this is a subrepo push
372 372 # - and remote support phase
373 373 # - and no changeset was pushed
374 374 # - and remote is publishing
375 375 # We may be in issue 3871 case!
376 376 # We drop the possible phase synchronisation done by
377 377 # courtesy to publish changesets possibly locally draft
378 378 # on the remote.
379 379 remotephases = {'publishing': 'True'}
380 380 if not remotephases: # old server or public only reply from non-publishing
381 381 _localphasemove(pushop, cheads)
382 382 # don't push any phase data as there is nothing to push
383 383 else:
384 384 ana = phases.analyzeremotephases(pushop.repo, cheads,
385 385 remotephases)
386 386 pheads, droots = ana
387 387 ### Apply remote phase on local
388 388 if remotephases.get('publishing', False):
389 389 _localphasemove(pushop, cheads)
390 390 else: # publish = False
391 391 _localphasemove(pushop, pheads)
392 392 _localphasemove(pushop, cheads, phases.draft)
393 393 ### Apply local phase on remote
394 394
395 395 # Get the list of all revs draft on remote by public here.
396 396 # XXX Beware that revset break if droots is not strictly
397 397 # XXX root we may want to ensure it is but it is costly
398 398 outdated = unfi.set('heads((%ln::%ln) and public())',
399 399 droots, cheads)
400 400 for newremotehead in outdated:
401 401 r = pushop.remote.pushkey('phases',
402 402 newremotehead.hex(),
403 403 str(phases.draft),
404 404 str(phases.public))
405 405 if not r:
406 406 pushop.ui.warn(_('updating %s to public failed!\n')
407 407 % newremotehead)
408 408
409 409 def _localphasemove(pushop, nodes, phase=phases.public):
410 410 """move <nodes> to <phase> in the local source repo"""
411 411 if pushop.locallocked:
412 412 phases.advanceboundary(pushop.repo, phase, nodes)
413 413 else:
414 414 # repo is not locked, do not change any phases!
415 415 # Informs the user that phases should have been moved when
416 416 # applicable.
417 417 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
418 418 phasestr = phases.phasenames[phase]
419 419 if actualmoves:
420 420 pushop.ui.status(_('cannot lock source repo, skipping '
421 421 'local %s phase update\n') % phasestr)
422 422
423 423 def _pushobsolete(pushop):
424 424 """utility function to push obsolete markers to a remote"""
425 425 pushop.ui.debug('try to push obsolete markers to remote\n')
426 426 repo = pushop.repo
427 427 remote = pushop.remote
428 428 if (obsolete._enabled and repo.obsstore and
429 429 'obsolete' in remote.listkeys('namespaces')):
430 430 rslts = []
431 431 remotedata = repo.listkeys('obsolete')
432 432 for key in sorted(remotedata, reverse=True):
433 433 # reverse sort to ensure we end with dump0
434 434 data = remotedata[key]
435 435 rslts.append(remote.pushkey('obsolete', key, '', data))
436 436 if [r for r in rslts if not r]:
437 437 msg = _('failed to push some obsolete markers!\n')
438 438 repo.ui.warn(msg)
439 439
440 440 def _pushbookmark(pushop):
441 441 """Update bookmark position on remote"""
442 442 ui = pushop.ui
443 443 repo = pushop.repo.unfiltered()
444 444 remote = pushop.remote
445 445 ui.debug("checking for updated bookmarks\n")
446 446 revnums = map(repo.changelog.rev, pushop.revs or [])
447 447 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
448 448 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
449 449 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
450 450 srchex=hex)
451 451
452 452 for b, scid, dcid in advsrc:
453 453 if ancestors and repo[scid].rev() not in ancestors:
454 454 continue
455 455 if remote.pushkey('bookmarks', b, dcid, scid):
456 456 ui.status(_("updating bookmark %s\n") % b)
457 457 else:
458 458 ui.warn(_('updating bookmark %s failed!\n') % b)
459 459
460 460 class pulloperation(object):
461 461 """A object that represent a single pull operation
462 462
463 463 It purpose is to carry push related state and very common operation.
464 464
465 465 A new should be created at the beginning of each pull and discarded
466 466 afterward.
467 467 """
468 468
469 469 def __init__(self, repo, remote, heads=None, force=False):
470 470 # repo we pull into
471 471 self.repo = repo
472 472 # repo we pull from
473 473 self.remote = remote
474 474 # revision we try to pull (None is "all")
475 475 self.heads = heads
476 476 # do we force pull?
477 477 self.force = force
478 478 # the name the pull transaction
479 479 self._trname = 'pull\n' + util.hidepassword(remote.url())
480 480 # hold the transaction once created
481 481 self._tr = None
482 482 # set of common changeset between local and remote before pull
483 483 self.common = None
484 484 # set of pulled head
485 485 self.rheads = None
486 486 # list of missing changeset to fetch remotely
487 487 self.fetch = None
488 488 # result of changegroup pulling (used as return code by pull)
489 489 self.cgresult = None
490 490 # list of step remaining todo (related to future bundle2 usage)
491 491 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
492 492
493 493 @util.propertycache
494 494 def pulledsubset(self):
495 495 """heads of the set of changeset target by the pull"""
496 496 # compute target subset
497 497 if self.heads is None:
498 498 # We pulled every thing possible
499 499 # sync on everything common
500 500 c = set(self.common)
501 501 ret = list(self.common)
502 502 for n in self.rheads:
503 503 if n not in c:
504 504 ret.append(n)
505 505 return ret
506 506 else:
507 507 # We pulled a specific subset
508 508 # sync on this subset
509 509 return self.heads
510 510
511 511 def gettransaction(self):
512 512 """get appropriate pull transaction, creating it if needed"""
513 513 if self._tr is None:
514 514 self._tr = self.repo.transaction(self._trname)
515 515 return self._tr
516 516
517 517 def closetransaction(self):
518 518 """close transaction if created"""
519 519 if self._tr is not None:
520 520 self._tr.close()
521 521
522 522 def releasetransaction(self):
523 523 """release transaction if created"""
524 524 if self._tr is not None:
525 525 self._tr.release()
526 526
527 527 def pull(repo, remote, heads=None, force=False):
528 528 pullop = pulloperation(repo, remote, heads, force)
529 529 if pullop.remote.local():
530 530 missing = set(pullop.remote.requirements) - pullop.repo.supported
531 531 if missing:
532 532 msg = _("required features are not"
533 533 " supported in the destination:"
534 534 " %s") % (', '.join(sorted(missing)))
535 535 raise util.Abort(msg)
536 536
537 537 lock = pullop.repo.lock()
538 538 try:
539 539 _pulldiscovery(pullop)
540 540 if (pullop.repo.ui.configbool('server', 'bundle2', False)
541 541 and pullop.remote.capable('bundle2-exp')):
542 542 _pullbundle2(pullop)
543 543 if 'changegroup' in pullop.todosteps:
544 544 _pullchangeset(pullop)
545 545 if 'phases' in pullop.todosteps:
546 546 _pullphase(pullop)
547 547 if 'obsmarkers' in pullop.todosteps:
548 548 _pullobsolete(pullop)
549 549 pullop.closetransaction()
550 550 finally:
551 551 pullop.releasetransaction()
552 552 lock.release()
553 553
554 554 return pullop.cgresult
555 555
556 556 def _pulldiscovery(pullop):
557 557 """discovery phase for the pull
558 558
559 559 Current handle changeset discovery only, will change handle all discovery
560 560 at some point."""
561 561 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
562 562 pullop.remote,
563 563 heads=pullop.heads,
564 564 force=pullop.force)
565 565 pullop.common, pullop.fetch, pullop.rheads = tmp
566 566
567 567 def _pullbundle2(pullop):
568 568 """pull data using bundle2
569 569
570 570 For now, the only supported data are changegroup."""
571 571 kwargs = {'bundlecaps': set(['HG2X'])}
572 572 capsblob = bundle2.encodecaps(pullop.repo.bundle2caps)
573 573 kwargs['bundlecaps'].add('bundle2=' + urllib.quote(capsblob))
574 574 # pulling changegroup
575 575 pullop.todosteps.remove('changegroup')
576 576 if not pullop.fetch:
577 577 pullop.repo.ui.status(_("no changes found\n"))
578 578 pullop.cgresult = 0
579 579 else:
580 580 kwargs['common'] = pullop.common
581 581 kwargs['heads'] = pullop.heads or pullop.rheads
582 582 if pullop.heads is None and list(pullop.common) == [nullid]:
583 583 pullop.repo.ui.status(_("requesting all changes\n"))
584 584 _pullbundle2extraprepare(pullop, kwargs)
585 585 if kwargs.keys() == ['format']:
586 586 return # nothing to pull
587 587 bundle = pullop.remote.getbundle('pull', **kwargs)
588 588 try:
589 589 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
590 590 except UnknownPartError, exc:
591 591 raise util.Abort('missing support for %s' % exc)
592 592 assert len(op.records['changegroup']) == 1
593 593 pullop.cgresult = op.records['changegroup'][0]['return']
594 594
595 595 def _pullbundle2extraprepare(pullop, kwargs):
596 596 """hook function so that extensions can extend the getbundle call"""
597 597 pass
598 598
599 599 def _pullchangeset(pullop):
600 600 """pull changeset from unbundle into the local repo"""
601 601 # We delay the open of the transaction as late as possible so we
602 602 # don't open transaction for nothing or you break future useful
603 603 # rollback call
604 604 pullop.todosteps.remove('changegroup')
605 605 if not pullop.fetch:
606 606 pullop.repo.ui.status(_("no changes found\n"))
607 607 pullop.cgresult = 0
608 608 return
609 609 pullop.gettransaction()
610 610 if pullop.heads is None and list(pullop.common) == [nullid]:
611 611 pullop.repo.ui.status(_("requesting all changes\n"))
612 612 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
613 613 # issue1320, avoid a race if remote changed after discovery
614 614 pullop.heads = pullop.rheads
615 615
616 616 if pullop.remote.capable('getbundle'):
617 617 # TODO: get bundlecaps from remote
618 618 cg = pullop.remote.getbundle('pull', common=pullop.common,
619 619 heads=pullop.heads or pullop.rheads)
620 620 elif pullop.heads is None:
621 621 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
622 622 elif not pullop.remote.capable('changegroupsubset'):
623 623 raise util.Abort(_("partial pull cannot be done because "
624 624 "other repository doesn't support "
625 625 "changegroupsubset."))
626 626 else:
627 627 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
628 628 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
629 629 pullop.remote.url())
630 630
631 631 def _pullphase(pullop):
632 632 # Get remote phases data from remote
633 633 pullop.todosteps.remove('phases')
634 634 remotephases = pullop.remote.listkeys('phases')
635 635 publishing = bool(remotephases.get('publishing', False))
636 636 if remotephases and not publishing:
637 637 # remote is new and unpublishing
638 638 pheads, _dr = phases.analyzeremotephases(pullop.repo,
639 639 pullop.pulledsubset,
640 640 remotephases)
641 641 phases.advanceboundary(pullop.repo, phases.public, pheads)
642 642 phases.advanceboundary(pullop.repo, phases.draft,
643 643 pullop.pulledsubset)
644 644 else:
645 645 # Remote is old or publishing all common changesets
646 646 # should be seen as public
647 647 phases.advanceboundary(pullop.repo, phases.public,
648 648 pullop.pulledsubset)
649 649
650 650 def _pullobsolete(pullop):
651 651 """utility function to pull obsolete markers from a remote
652 652
653 653 The `gettransaction` is function that return the pull transaction, creating
654 654 one if necessary. We return the transaction to inform the calling code that
655 655 a new transaction have been created (when applicable).
656 656
657 657 Exists mostly to allow overriding for experimentation purpose"""
658 658 pullop.todosteps.remove('obsmarkers')
659 659 tr = None
660 660 if obsolete._enabled:
661 661 pullop.repo.ui.debug('fetching remote obsolete markers\n')
662 662 remoteobs = pullop.remote.listkeys('obsolete')
663 663 if 'dump0' in remoteobs:
664 664 tr = pullop.gettransaction()
665 665 for key in sorted(remoteobs, reverse=True):
666 666 if key.startswith('dump'):
667 667 data = base85.b85decode(remoteobs[key])
668 668 pullop.repo.obsstore.mergemarkers(tr, data)
669 669 pullop.repo.invalidatevolatilesets()
670 670 return tr
671 671
672 672 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
673 673 **kwargs):
674 674 """return a full bundle (with potentially multiple kind of parts)
675 675
676 676 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
677 677 passed. For now, the bundle can contain only changegroup, but this will
678 678 changes when more part type will be available for bundle2.
679 679
680 680 This is different from changegroup.getbundle that only returns an HG10
681 681 changegroup bundle. They may eventually get reunited in the future when we
682 682 have a clearer idea of the API we what to query different data.
683 683
684 684 The implementation is at a very early stage and will get massive rework
685 685 when the API of bundle is refined.
686 686 """
687 687 # build bundle here.
688 688 cg = changegroup.getbundle(repo, source, heads=heads,
689 689 common=common, bundlecaps=bundlecaps)
690 690 if bundlecaps is None or 'HG2X' not in bundlecaps:
691 691 return cg
692 692 # very crude first implementation,
693 693 # the bundle API will change and the generation will be done lazily.
694 694 b2caps = {}
695 695 for bcaps in bundlecaps:
696 696 if bcaps.startswith('bundle2='):
697 697 blob = urllib.unquote(bcaps[len('bundle2='):])
698 698 b2caps.update(bundle2.decodecaps(blob))
699 699 bundler = bundle2.bundle20(repo.ui, b2caps)
700 700 part = bundle2.bundlepart('b2x:changegroup', data=cg.getchunks())
701 701 bundler.addpart(part)
702 702 _getbundleextrapart(bundler, repo, source, heads=None, common=None,
703 703 bundlecaps=None, **kwargs)
704 704 return util.chunkbuffer(bundler.getchunks())
705 705
706 706 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
707 707 bundlecaps=None, **kwargs):
708 708 """hook function to let extensions add parts to the requested bundle"""
709 709 pass
710 710
711 711 def check_heads(repo, their_heads, context):
712 712 """check if the heads of a repo have been modified
713 713
714 714 Used by peer for unbundling.
715 715 """
716 716 heads = repo.heads()
717 717 heads_hash = util.sha1(''.join(sorted(heads))).digest()
718 718 if not (their_heads == ['force'] or their_heads == heads or
719 719 their_heads == ['hashed', heads_hash]):
720 720 # someone else committed/pushed/unbundled while we
721 721 # were transferring data
722 722 raise error.PushRaced('repository changed while %s - '
723 723 'please try again' % context)
724 724
725 725 def unbundle(repo, cg, heads, source, url):
726 726 """Apply a bundle to a repo.
727 727
728 728 this function makes sure the repo is locked during the application and have
729 729 mechanism to check that no push race occurred between the creation of the
730 730 bundle and its application.
731 731
732 732 If the push was raced as PushRaced exception is raised."""
733 733 r = 0
734 734 # need a transaction when processing a bundle2 stream
735 735 tr = None
736 736 lock = repo.lock()
737 737 try:
738 738 check_heads(repo, heads, 'uploading changes')
739 739 # push can proceed
740 740 if util.safehasattr(cg, 'params'):
741 tr = repo.transaction('unbundle')
742 tr.hookargs['bundle2-exp'] = '1'
743 r = bundle2.processbundle(repo, cg, lambda: tr).reply
744 cl = repo.unfiltered().changelog
745 p = cl.writepending() and repo.root or ""
746 repo.hook('b2x-pretransactionclose', throw=True, source=source,
747 url=url, pending=p, **tr.hookargs)
748 tr.close()
749 repo.hook('b2x-transactionclose', source=source, url=url,
750 **tr.hookargs)
741 try:
742 tr = repo.transaction('unbundle')
743 tr.hookargs['bundle2-exp'] = '1'
744 r = bundle2.processbundle(repo, cg, lambda: tr).reply
745 cl = repo.unfiltered().changelog
746 p = cl.writepending() and repo.root or ""
747 repo.hook('b2x-pretransactionclose', throw=True, source=source,
748 url=url, pending=p, **tr.hookargs)
749 tr.close()
750 repo.hook('b2x-transactionclose', source=source, url=url,
751 **tr.hookargs)
752 except Exception, exc:
753 exc.duringunbundle2 = True
754 raise
751 755 else:
752 756 r = changegroup.addchangegroup(repo, cg, source, url)
753 757 finally:
754 758 if tr is not None:
755 759 tr.release()
756 760 lock.release()
757 761 return r
@@ -1,1045 +1,1082 b''
1 1
2 2 Create an extension to test bundle2 API
3 3
4 4 $ cat > bundle2.py << EOF
5 5 > """A small extension to test bundle2 implementation
6 6 >
7 7 > Current bundle2 implementation is far too limited to be used in any core
8 8 > code. We still need to be able to test it while it grow up.
9 9 > """
10 10 >
11 11 > import sys
12 12 > from mercurial import cmdutil
13 13 > from mercurial import util
14 14 > from mercurial import bundle2
15 15 > from mercurial import scmutil
16 16 > from mercurial import discovery
17 17 > from mercurial import changegroup
18 18 > from mercurial import error
19 19 > cmdtable = {}
20 20 > command = cmdutil.command(cmdtable)
21 21 >
22 22 > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
23 23 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
24 24 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
25 25 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
26 26 >
27 27 > @bundle2.parthandler('test:song')
28 28 > def songhandler(op, part):
29 29 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
30 30 > op.ui.write('The choir starts singing:\n')
31 31 > verses = 0
32 32 > for line in part.read().split('\n'):
33 33 > op.ui.write(' %s\n' % line)
34 34 > verses += 1
35 35 > op.records.add('song', {'verses': verses})
36 36 >
37 37 > @bundle2.parthandler('test:ping')
38 38 > def pinghandler(op, part):
39 39 > op.ui.write('received ping request (id %i)\n' % part.id)
40 40 > if op.reply is not None and 'ping-pong' in op.reply.capabilities:
41 41 > op.ui.write_err('replying to ping request (id %i)\n' % part.id)
42 42 > rpart = bundle2.bundlepart('test:pong',
43 43 > [('in-reply-to', str(part.id))])
44 44 > op.reply.addpart(rpart)
45 45 >
46 46 > @bundle2.parthandler('test:debugreply')
47 47 > def debugreply(op, part):
48 48 > """print data about the capacity of the bundle reply"""
49 49 > if op.reply is None:
50 50 > op.ui.write('debugreply: no reply\n')
51 51 > else:
52 52 > op.ui.write('debugreply: capabilities:\n')
53 53 > for cap in sorted(op.reply.capabilities):
54 54 > op.ui.write('debugreply: %r\n' % cap)
55 55 > for val in op.reply.capabilities[cap]:
56 56 > op.ui.write('debugreply: %r\n' % val)
57 57 >
58 58 > @command('bundle2',
59 59 > [('', 'param', [], 'stream level parameter'),
60 60 > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
61 61 > ('', 'parts', False, 'include some arbitrary parts to the bundle'),
62 62 > ('', 'reply', False, 'produce a reply bundle'),
63 63 > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
64 64 > ('r', 'rev', [], 'includes those changeset in the bundle'),],
65 65 > '[OUTPUTFILE]')
66 66 > def cmdbundle2(ui, repo, path=None, **opts):
67 67 > """write a bundle2 container on standard ouput"""
68 68 > bundler = bundle2.bundle20(ui)
69 69 > for p in opts['param']:
70 70 > p = p.split('=', 1)
71 71 > try:
72 72 > bundler.addparam(*p)
73 73 > except ValueError, exc:
74 74 > raise util.Abort('%s' % exc)
75 75 >
76 76 > if opts['reply']:
77 77 > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
78 78 > bundler.addpart(bundle2.bundlepart('b2x:replycaps', data=capsstring))
79 79 >
80 80 > if opts['pushrace']:
81 81 > dummynode = '01234567890123456789'
82 82 > bundler.addpart(bundle2.bundlepart('b2x:check:heads', data=dummynode))
83 83 >
84 84 > revs = opts['rev']
85 85 > if 'rev' in opts:
86 86 > revs = scmutil.revrange(repo, opts['rev'])
87 87 > if revs:
88 88 > # very crude version of a changegroup part creation
89 89 > bundled = repo.revs('%ld::%ld', revs, revs)
90 90 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
91 91 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
92 92 > outgoing = discovery.outgoing(repo.changelog, headcommon, headmissing)
93 93 > cg = changegroup.getlocalbundle(repo, 'test:bundle2', outgoing, None)
94 94 > part = bundle2.bundlepart('b2x:changegroup', data=cg.getchunks())
95 95 > bundler.addpart(part)
96 96 >
97 97 > if opts['parts']:
98 98 > part = bundle2.bundlepart('test:empty')
99 99 > bundler.addpart(part)
100 100 > # add a second one to make sure we handle multiple parts
101 101 > part = bundle2.bundlepart('test:empty')
102 102 > bundler.addpart(part)
103 103 > part = bundle2.bundlepart('test:song', data=ELEPHANTSSONG)
104 104 > bundler.addpart(part)
105 105 > part = bundle2.bundlepart('test:debugreply')
106 106 > bundler.addpart(part)
107 107 > part = bundle2.bundlepart('test:math',
108 108 > [('pi', '3.14'), ('e', '2.72')],
109 109 > [('cooking', 'raw')],
110 110 > '42')
111 111 > bundler.addpart(part)
112 112 > if opts['unknown']:
113 113 > part = bundle2.bundlepart('test:UNKNOWN',
114 114 > data='some random content')
115 115 > bundler.addpart(part)
116 116 > if opts['parts']:
117 117 > part = bundle2.bundlepart('test:ping')
118 118 > bundler.addpart(part)
119 119 >
120 120 > if path is None:
121 121 > file = sys.stdout
122 122 > else:
123 123 > file = open(path, 'w')
124 124 >
125 125 > for chunk in bundler.getchunks():
126 126 > file.write(chunk)
127 127 >
128 128 > @command('unbundle2', [], '')
129 129 > def cmdunbundle2(ui, repo, replypath=None):
130 130 > """process a bundle2 stream from stdin on the current repo"""
131 131 > try:
132 132 > tr = None
133 133 > lock = repo.lock()
134 134 > tr = repo.transaction('processbundle')
135 135 > try:
136 136 > unbundler = bundle2.unbundle20(ui, sys.stdin)
137 137 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
138 138 > tr.close()
139 139 > except KeyError, exc:
140 140 > raise util.Abort('missing support for %s' % exc)
141 141 > except error.PushRaced, exc:
142 142 > raise util.Abort('push race: %s' % exc)
143 143 > finally:
144 144 > if tr is not None:
145 145 > tr.release()
146 146 > lock.release()
147 147 > remains = sys.stdin.read()
148 148 > ui.write('%i unread bytes\n' % len(remains))
149 149 > if op.records['song']:
150 150 > totalverses = sum(r['verses'] for r in op.records['song'])
151 151 > ui.write('%i total verses sung\n' % totalverses)
152 152 > for rec in op.records['changegroup']:
153 153 > ui.write('addchangegroup return: %i\n' % rec['return'])
154 154 > if op.reply is not None and replypath is not None:
155 155 > file = open(replypath, 'w')
156 156 > for chunk in op.reply.getchunks():
157 157 > file.write(chunk)
158 158 >
159 159 > @command('statbundle2', [], '')
160 160 > def cmdstatbundle2(ui, repo):
161 161 > """print statistic on the bundle2 container read from stdin"""
162 162 > unbundler = bundle2.unbundle20(ui, sys.stdin)
163 163 > try:
164 164 > params = unbundler.params
165 165 > except KeyError, exc:
166 166 > raise util.Abort('unknown parameters: %s' % exc)
167 167 > ui.write('options count: %i\n' % len(params))
168 168 > for key in sorted(params):
169 169 > ui.write('- %s\n' % key)
170 170 > value = params[key]
171 171 > if value is not None:
172 172 > ui.write(' %s\n' % value)
173 173 > count = 0
174 174 > for p in unbundler.iterparts():
175 175 > count += 1
176 176 > ui.write(' :%s:\n' % p.type)
177 177 > ui.write(' mandatory: %i\n' % len(p.mandatoryparams))
178 178 > ui.write(' advisory: %i\n' % len(p.advisoryparams))
179 179 > ui.write(' payload: %i bytes\n' % len(p.read()))
180 180 > ui.write('parts count: %i\n' % count)
181 181 > EOF
182 182 $ cat >> $HGRCPATH << EOF
183 183 > [extensions]
184 184 > bundle2=$TESTTMP/bundle2.py
185 185 > [experimental]
186 186 > bundle2-exp=True
187 187 > [ui]
188 188 > ssh=python "$TESTDIR/dummyssh"
189 189 > [web]
190 190 > push_ssl = false
191 191 > allow_push = *
192 192 > EOF
193 193
194 194 The extension requires a repo (currently unused)
195 195
196 196 $ hg init main
197 197 $ cd main
198 198 $ touch a
199 199 $ hg add a
200 200 $ hg commit -m 'a'
201 201
202 202
203 203 Empty bundle
204 204 =================
205 205
206 206 - no option
207 207 - no parts
208 208
209 209 Test bundling
210 210
211 211 $ hg bundle2
212 212 HG2X\x00\x00\x00\x00 (no-eol) (esc)
213 213
214 214 Test unbundling
215 215
216 216 $ hg bundle2 | hg statbundle2
217 217 options count: 0
218 218 parts count: 0
219 219
220 220 Test old style bundle are detected and refused
221 221
222 222 $ hg bundle --all ../bundle.hg
223 223 1 changesets found
224 224 $ hg statbundle2 < ../bundle.hg
225 225 abort: unknown bundle version 10
226 226 [255]
227 227
228 228 Test parameters
229 229 =================
230 230
231 231 - some options
232 232 - no parts
233 233
234 234 advisory parameters, no value
235 235 -------------------------------
236 236
237 237 Simplest possible parameters form
238 238
239 239 Test generation simple option
240 240
241 241 $ hg bundle2 --param 'caution'
242 242 HG2X\x00\x07caution\x00\x00 (no-eol) (esc)
243 243
244 244 Test unbundling
245 245
246 246 $ hg bundle2 --param 'caution' | hg statbundle2
247 247 options count: 1
248 248 - caution
249 249 parts count: 0
250 250
251 251 Test generation multiple option
252 252
253 253 $ hg bundle2 --param 'caution' --param 'meal'
254 254 HG2X\x00\x0ccaution meal\x00\x00 (no-eol) (esc)
255 255
256 256 Test unbundling
257 257
258 258 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
259 259 options count: 2
260 260 - caution
261 261 - meal
262 262 parts count: 0
263 263
264 264 advisory parameters, with value
265 265 -------------------------------
266 266
267 267 Test generation
268 268
269 269 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants'
270 270 HG2X\x00\x1ccaution meal=vegan elephants\x00\x00 (no-eol) (esc)
271 271
272 272 Test unbundling
273 273
274 274 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
275 275 options count: 3
276 276 - caution
277 277 - elephants
278 278 - meal
279 279 vegan
280 280 parts count: 0
281 281
282 282 parameter with special char in value
283 283 ---------------------------------------------------
284 284
285 285 Test generation
286 286
287 287 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple
288 288 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
289 289
290 290 Test unbundling
291 291
292 292 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
293 293 options count: 2
294 294 - e|! 7/
295 295 babar%#==tutu
296 296 - simple
297 297 parts count: 0
298 298
299 299 Test unknown mandatory option
300 300 ---------------------------------------------------
301 301
302 302 $ hg bundle2 --param 'Gravity' | hg statbundle2
303 303 abort: unknown parameters: 'Gravity'
304 304 [255]
305 305
306 306 Test debug output
307 307 ---------------------------------------------------
308 308
309 309 bundling debug
310 310
311 311 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2
312 312 start emission of HG2X stream
313 313 bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
314 314 start of parts
315 315 end of bundle
316 316
317 317 file content is ok
318 318
319 319 $ cat ../out.hg2
320 320 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
321 321
322 322 unbundling debug
323 323
324 324 $ hg statbundle2 --debug < ../out.hg2
325 325 start processing of HG2X stream
326 326 reading bundle2 stream parameters
327 327 ignoring unknown parameter 'e|! 7/'
328 328 ignoring unknown parameter 'simple'
329 329 options count: 2
330 330 - e|! 7/
331 331 babar%#==tutu
332 332 - simple
333 333 start extraction of bundle2 parts
334 334 part header size: 0
335 335 end of bundle2 stream
336 336 parts count: 0
337 337
338 338
339 339 Test buggy input
340 340 ---------------------------------------------------
341 341
342 342 empty parameter name
343 343
344 344 $ hg bundle2 --param '' --quiet
345 345 abort: empty parameter name
346 346 [255]
347 347
348 348 bad parameter name
349 349
350 350 $ hg bundle2 --param 42babar
351 351 abort: non letter first character: '42babar'
352 352 [255]
353 353
354 354
355 355 Test part
356 356 =================
357 357
358 358 $ hg bundle2 --parts ../parts.hg2 --debug
359 359 start emission of HG2X stream
360 360 bundle parameter:
361 361 start of parts
362 362 bundle part: "test:empty"
363 363 bundle part: "test:empty"
364 364 bundle part: "test:song"
365 365 bundle part: "test:debugreply"
366 366 bundle part: "test:math"
367 367 bundle part: "test:ping"
368 368 end of bundle
369 369
370 370 $ cat ../parts.hg2
371 371 HG2X\x00\x00\x00\x11 (esc)
372 372 test:empty\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
373 373 test:empty\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x10 test:song\x00\x00\x00\x02\x00\x00\x00\x00\x00\xb2Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko (esc)
374 374 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
375 375 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.\x00\x00\x00\x00\x00\x16\x0ftest:debugreply\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00+ test:math\x00\x00\x00\x04\x02\x01\x02\x04\x01\x04\x07\x03pi3.14e2.72cookingraw\x00\x00\x00\x0242\x00\x00\x00\x00\x00\x10 test:ping\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
376 376
377 377
378 378 $ hg statbundle2 < ../parts.hg2
379 379 options count: 0
380 380 :test:empty:
381 381 mandatory: 0
382 382 advisory: 0
383 383 payload: 0 bytes
384 384 :test:empty:
385 385 mandatory: 0
386 386 advisory: 0
387 387 payload: 0 bytes
388 388 :test:song:
389 389 mandatory: 0
390 390 advisory: 0
391 391 payload: 178 bytes
392 392 :test:debugreply:
393 393 mandatory: 0
394 394 advisory: 0
395 395 payload: 0 bytes
396 396 :test:math:
397 397 mandatory: 2
398 398 advisory: 1
399 399 payload: 2 bytes
400 400 :test:ping:
401 401 mandatory: 0
402 402 advisory: 0
403 403 payload: 0 bytes
404 404 parts count: 6
405 405
406 406 $ hg statbundle2 --debug < ../parts.hg2
407 407 start processing of HG2X stream
408 408 reading bundle2 stream parameters
409 409 options count: 0
410 410 start extraction of bundle2 parts
411 411 part header size: 17
412 412 part type: "test:empty"
413 413 part id: "0"
414 414 part parameters: 0
415 415 :test:empty:
416 416 mandatory: 0
417 417 advisory: 0
418 418 payload chunk size: 0
419 419 payload: 0 bytes
420 420 part header size: 17
421 421 part type: "test:empty"
422 422 part id: "1"
423 423 part parameters: 0
424 424 :test:empty:
425 425 mandatory: 0
426 426 advisory: 0
427 427 payload chunk size: 0
428 428 payload: 0 bytes
429 429 part header size: 16
430 430 part type: "test:song"
431 431 part id: "2"
432 432 part parameters: 0
433 433 :test:song:
434 434 mandatory: 0
435 435 advisory: 0
436 436 payload chunk size: 178
437 437 payload chunk size: 0
438 438 payload: 178 bytes
439 439 part header size: 22
440 440 part type: "test:debugreply"
441 441 part id: "3"
442 442 part parameters: 0
443 443 :test:debugreply:
444 444 mandatory: 0
445 445 advisory: 0
446 446 payload chunk size: 0
447 447 payload: 0 bytes
448 448 part header size: 43
449 449 part type: "test:math"
450 450 part id: "4"
451 451 part parameters: 3
452 452 :test:math:
453 453 mandatory: 2
454 454 advisory: 1
455 455 payload chunk size: 2
456 456 payload chunk size: 0
457 457 payload: 2 bytes
458 458 part header size: 16
459 459 part type: "test:ping"
460 460 part id: "5"
461 461 part parameters: 0
462 462 :test:ping:
463 463 mandatory: 0
464 464 advisory: 0
465 465 payload chunk size: 0
466 466 payload: 0 bytes
467 467 part header size: 0
468 468 end of bundle2 stream
469 469 parts count: 6
470 470
471 471 Test actual unbundling of test part
472 472 =======================================
473 473
474 474 Process the bundle
475 475
476 476 $ hg unbundle2 --debug < ../parts.hg2
477 477 start processing of HG2X stream
478 478 reading bundle2 stream parameters
479 479 start extraction of bundle2 parts
480 480 part header size: 17
481 481 part type: "test:empty"
482 482 part id: "0"
483 483 part parameters: 0
484 484 ignoring unknown advisory part 'test:empty'
485 485 payload chunk size: 0
486 486 part header size: 17
487 487 part type: "test:empty"
488 488 part id: "1"
489 489 part parameters: 0
490 490 ignoring unknown advisory part 'test:empty'
491 491 payload chunk size: 0
492 492 part header size: 16
493 493 part type: "test:song"
494 494 part id: "2"
495 495 part parameters: 0
496 496 found a handler for part 'test:song'
497 497 The choir starts singing:
498 498 payload chunk size: 178
499 499 payload chunk size: 0
500 500 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
501 501 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
502 502 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
503 503 part header size: 22
504 504 part type: "test:debugreply"
505 505 part id: "3"
506 506 part parameters: 0
507 507 found a handler for part 'test:debugreply'
508 508 debugreply: no reply
509 509 payload chunk size: 0
510 510 part header size: 43
511 511 part type: "test:math"
512 512 part id: "4"
513 513 part parameters: 3
514 514 ignoring unknown advisory part 'test:math'
515 515 payload chunk size: 2
516 516 payload chunk size: 0
517 517 part header size: 16
518 518 part type: "test:ping"
519 519 part id: "5"
520 520 part parameters: 0
521 521 found a handler for part 'test:ping'
522 522 received ping request (id 5)
523 523 payload chunk size: 0
524 524 part header size: 0
525 525 end of bundle2 stream
526 526 0 unread bytes
527 527 3 total verses sung
528 528
529 529 Unbundle with an unknown mandatory part
530 530 (should abort)
531 531
532 532 $ hg bundle2 --parts --unknown ../unknown.hg2
533 533
534 534 $ hg unbundle2 < ../unknown.hg2
535 535 The choir starts singing:
536 536 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
537 537 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
538 538 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
539 539 debugreply: no reply
540 540 0 unread bytes
541 541 abort: missing support for 'test:unknown'
542 542 [255]
543 543
544 544 unbundle with a reply
545 545
546 546 $ hg bundle2 --parts --reply ../parts-reply.hg2
547 547 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
548 548 0 unread bytes
549 549 3 total verses sung
550 550
551 551 The reply is a bundle
552 552
553 553 $ cat ../reply.hg2
554 554 HG2X\x00\x00\x00\x1f (esc)
555 555 b2x:output\x00\x00\x00\x00\x00\x01\x0b\x01in-reply-to3\x00\x00\x00\xd9The choir starts singing: (esc)
556 556 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
557 557 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
558 558 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
559 559 \x00\x00\x00\x00\x00\x1f (esc)
560 560 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to4\x00\x00\x00\xc9debugreply: capabilities: (esc)
561 561 debugreply: 'city=!'
562 562 debugreply: 'celeste,ville'
563 563 debugreply: 'elephants'
564 564 debugreply: 'babar'
565 565 debugreply: 'celeste'
566 566 debugreply: 'ping-pong'
567 567 \x00\x00\x00\x00\x00\x1e test:pong\x00\x00\x00\x02\x01\x00\x0b\x01in-reply-to6\x00\x00\x00\x00\x00\x1f (esc)
568 568 b2x:output\x00\x00\x00\x03\x00\x01\x0b\x01in-reply-to6\x00\x00\x00=received ping request (id 6) (esc)
569 569 replying to ping request (id 6)
570 570 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
571 571
572 572 The reply is valid
573 573
574 574 $ hg statbundle2 < ../reply.hg2
575 575 options count: 0
576 576 :b2x:output:
577 577 mandatory: 0
578 578 advisory: 1
579 579 payload: 217 bytes
580 580 :b2x:output:
581 581 mandatory: 0
582 582 advisory: 1
583 583 payload: 201 bytes
584 584 :test:pong:
585 585 mandatory: 1
586 586 advisory: 0
587 587 payload: 0 bytes
588 588 :b2x:output:
589 589 mandatory: 0
590 590 advisory: 1
591 591 payload: 61 bytes
592 592 parts count: 4
593 593
594 594 Unbundle the reply to get the output:
595 595
596 596 $ hg unbundle2 < ../reply.hg2
597 597 remote: The choir starts singing:
598 598 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
599 599 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
600 600 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
601 601 remote: debugreply: capabilities:
602 602 remote: debugreply: 'city=!'
603 603 remote: debugreply: 'celeste,ville'
604 604 remote: debugreply: 'elephants'
605 605 remote: debugreply: 'babar'
606 606 remote: debugreply: 'celeste'
607 607 remote: debugreply: 'ping-pong'
608 608 remote: received ping request (id 6)
609 609 remote: replying to ping request (id 6)
610 610 0 unread bytes
611 611
612 612 Test push race detection
613 613
614 614 $ hg bundle2 --pushrace ../part-race.hg2
615 615
616 616 $ hg unbundle2 < ../part-race.hg2
617 617 0 unread bytes
618 618 abort: push race: repository changed while pushing - please try again
619 619 [255]
620 620
621 621 Support for changegroup
622 622 ===================================
623 623
624 624 $ hg unbundle $TESTDIR/bundles/rebase.hg
625 625 adding changesets
626 626 adding manifests
627 627 adding file changes
628 628 added 8 changesets with 7 changes to 7 files (+3 heads)
629 629 (run 'hg heads' to see heads, 'hg merge' to merge)
630 630
631 631 $ hg log -G
632 632 o changeset: 8:02de42196ebe
633 633 | tag: tip
634 634 | parent: 6:24b6387c8c8c
635 635 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
636 636 | date: Sat Apr 30 15:24:48 2011 +0200
637 637 | summary: H
638 638 |
639 639 | o changeset: 7:eea13746799a
640 640 |/| parent: 6:24b6387c8c8c
641 641 | | parent: 5:9520eea781bc
642 642 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
643 643 | | date: Sat Apr 30 15:24:48 2011 +0200
644 644 | | summary: G
645 645 | |
646 646 o | changeset: 6:24b6387c8c8c
647 647 | | parent: 1:cd010b8cd998
648 648 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
649 649 | | date: Sat Apr 30 15:24:48 2011 +0200
650 650 | | summary: F
651 651 | |
652 652 | o changeset: 5:9520eea781bc
653 653 |/ parent: 1:cd010b8cd998
654 654 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
655 655 | date: Sat Apr 30 15:24:48 2011 +0200
656 656 | summary: E
657 657 |
658 658 | o changeset: 4:32af7686d403
659 659 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
660 660 | | date: Sat Apr 30 15:24:48 2011 +0200
661 661 | | summary: D
662 662 | |
663 663 | o changeset: 3:5fddd98957c8
664 664 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
665 665 | | date: Sat Apr 30 15:24:48 2011 +0200
666 666 | | summary: C
667 667 | |
668 668 | o changeset: 2:42ccdea3bb16
669 669 |/ user: Nicolas Dumazet <nicdumz.commits@gmail.com>
670 670 | date: Sat Apr 30 15:24:48 2011 +0200
671 671 | summary: B
672 672 |
673 673 o changeset: 1:cd010b8cd998
674 674 parent: -1:000000000000
675 675 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
676 676 date: Sat Apr 30 15:24:48 2011 +0200
677 677 summary: A
678 678
679 679 @ changeset: 0:3903775176ed
680 680 user: test
681 681 date: Thu Jan 01 00:00:00 1970 +0000
682 682 summary: a
683 683
684 684
685 685 $ hg bundle2 --debug --rev '8+7+5+4' ../rev.hg2
686 686 4 changesets found
687 687 list of changesets:
688 688 32af7686d403cf45b5d95f2d70cebea587ac806a
689 689 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
690 690 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
691 691 02de42196ebee42ef284b6780a87cdc96e8eaab6
692 692 start emission of HG2X stream
693 693 bundle parameter:
694 694 start of parts
695 695 bundle part: "b2x:changegroup"
696 696 bundling: 1/4 changesets (25.00%)
697 697 bundling: 2/4 changesets (50.00%)
698 698 bundling: 3/4 changesets (75.00%)
699 699 bundling: 4/4 changesets (100.00%)
700 700 bundling: 1/4 manifests (25.00%)
701 701 bundling: 2/4 manifests (50.00%)
702 702 bundling: 3/4 manifests (75.00%)
703 703 bundling: 4/4 manifests (100.00%)
704 704 bundling: D 1/3 files (33.33%)
705 705 bundling: E 2/3 files (66.67%)
706 706 bundling: H 3/3 files (100.00%)
707 707 end of bundle
708 708
709 709 $ cat ../rev.hg2
710 710 HG2X\x00\x00\x00\x16\x0fb2x:changegroup\x00\x00\x00\x00\x00\x00\x00\x00\x06\x13\x00\x00\x00\xa42\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j_\xdd\xd9\x89W\xc8\xa5JMCm\xfe\x1d\xa9\xd8\x7f!\xa1\xb9{\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c (esc)
711 711 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02D (esc)
712 712 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01D\x00\x00\x00\xa4\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xcd\x01\x0b\x8c\xd9\x98\xf3\x98\x1aZ\x81\x15\xf9O\x8d\xa4\xabP`\x89\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)4dece9c826f69490507b98c6383a3009b295837d (esc)
713 713 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02E (esc)
714 714 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01E\x00\x00\x00\xa2\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)365b93d57fdf4814e2b5911d6bacff2b12014441 (esc)
715 715 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x00\x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01G\x00\x00\x00\xa4\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
716 716 \x87\xcd\xc9n\x8e\xaa\xb6$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
717 717 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)8bee48edc7318541fc0013ee41b089276a8c24bf (esc)
718 718 \x00\x00\x00f\x00\x00\x00f\x00\x00\x00\x02H (esc)
719 719 \x00\x00\x00g\x00\x00\x00h\x00\x00\x00\x01H\x00\x00\x00\x00\x00\x00\x00\x8bn\x1fLG\xec\xb53\xff\xd0\xc8\xe5,\xdc\x88\xaf\xb6\xcd9\xe2\x0cf\xa5\xa0\x18\x17\xfd\xf5#\x9c'8\x02\xb5\xb7a\x8d\x05\x1c\x89\xe4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+D\x00c3f1ca2924c16a19b0656a84900e504e5b0aec2d (esc)
720 720 \x00\x00\x00\x8bM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\x00}\x8c\x9d\x88\x84\x13%\xf5\xc6\xb0cq\xb3[N\x8a+\x1a\x83\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00+\x00\x00\x00\xac\x00\x00\x00+E\x009c6fd0350a6c0d0c49d4a9c5017cf07043f54e58 (esc)
721 721 \x00\x00\x00\x8b6[\x93\xd5\x7f\xdfH\x14\xe2\xb5\x91\x1dk\xac\xff+\x12\x01DA(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xceM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00V\x00\x00\x00V\x00\x00\x00+F\x0022bfcfd62a21a3287edbd4d656218d0f525ed76a (esc)
722 722 \x00\x00\x00\x97\x8b\xeeH\xed\xc71\x85A\xfc\x00\x13\xeeA\xb0\x89'j\x8c$\xbf(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
723 723 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00+\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+H\x008500189e74a9e0475e822093bc7db0d631aeb0b4 (esc)
724 724 \x00\x00\x00\x00\x00\x00\x00\x05D\x00\x00\x00b\xc3\xf1\xca)$\xc1j\x19\xb0ej\x84\x90\x0ePN[ (esc)
725 725 \xec-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02D (esc)
726 726 \x00\x00\x00\x00\x00\x00\x00\x05E\x00\x00\x00b\x9co\xd05 (esc)
727 727 l\r (no-eol) (esc)
728 728 \x0cI\xd4\xa9\xc5\x01|\xf0pC\xf5NX\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02E (esc)
729 729 \x00\x00\x00\x00\x00\x00\x00\x05H\x00\x00\x00b\x85\x00\x18\x9et\xa9\xe0G^\x82 \x93\xbc}\xb0\xd61\xae\xb0\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
730 730 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02H (esc)
731 731 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
732 732
733 733 $ hg unbundle2 < ../rev.hg2
734 734 adding changesets
735 735 adding manifests
736 736 adding file changes
737 737 added 0 changesets with 0 changes to 3 files
738 738 0 unread bytes
739 739 addchangegroup return: 1
740 740
741 741 with reply
742 742
743 743 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
744 744 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
745 745 0 unread bytes
746 746 addchangegroup return: 1
747 747
748 748 $ cat ../rev-reply.hg2
749 749 HG2X\x00\x00\x003\x15b2x:reply:changegroup\x00\x00\x00\x00\x00\x02\x0b\x01\x06\x01in-reply-to1return1\x00\x00\x00\x00\x00\x1f (esc)
750 750 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to1\x00\x00\x00dadding changesets (esc)
751 751 adding manifests
752 752 adding file changes
753 753 added 0 changesets with 0 changes to 3 files
754 754 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
755 755
756 756 Real world exchange
757 757 =====================
758 758
759 759
760 760 clone --pull
761 761
762 762 $ cd ..
763 763 $ hg clone main other --pull --rev 9520eea781bc
764 764 adding changesets
765 765 adding manifests
766 766 adding file changes
767 767 added 2 changesets with 2 changes to 2 files
768 768 updating to branch default
769 769 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
770 770 $ hg -R other log -G
771 771 @ changeset: 1:9520eea781bc
772 772 | tag: tip
773 773 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
774 774 | date: Sat Apr 30 15:24:48 2011 +0200
775 775 | summary: E
776 776 |
777 777 o changeset: 0:cd010b8cd998
778 778 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
779 779 date: Sat Apr 30 15:24:48 2011 +0200
780 780 summary: A
781 781
782 782
783 783 pull
784 784
785 785 $ hg -R other pull -r 24b6387c8c8c
786 786 pulling from $TESTTMP/main (glob)
787 787 searching for changes
788 788 adding changesets
789 789 adding manifests
790 790 adding file changes
791 791 added 1 changesets with 1 changes to 1 files (+1 heads)
792 792 (run 'hg heads' to see heads, 'hg merge' to merge)
793 793
794 794 push
795 795
796 796 $ hg -R main push other --rev eea13746799a
797 797 pushing to other
798 798 searching for changes
799 799 remote: adding changesets
800 800 remote: adding manifests
801 801 remote: adding file changes
802 802 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
803 803
804 804 pull over ssh
805 805
806 806 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --traceback
807 807 pulling from ssh://user@dummy/main
808 808 searching for changes
809 809 adding changesets
810 810 adding manifests
811 811 adding file changes
812 812 added 1 changesets with 1 changes to 1 files (+1 heads)
813 813 (run 'hg heads' to see heads, 'hg merge' to merge)
814 814
815 815 pull over http
816 816
817 817 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
818 818 $ cat main.pid >> $DAEMON_PIDS
819 819
820 820 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16
821 821 pulling from http://localhost:$HGPORT/
822 822 searching for changes
823 823 adding changesets
824 824 adding manifests
825 825 adding file changes
826 826 added 1 changesets with 1 changes to 1 files (+1 heads)
827 827 (run 'hg heads .' to see heads, 'hg merge' to merge)
828 828 $ cat main-error.log
829 829
830 830 push over ssh
831 831
832 832 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8
833 833 pushing to ssh://user@dummy/other
834 834 searching for changes
835 835 remote: adding changesets
836 836 remote: adding manifests
837 837 remote: adding file changes
838 838 remote: added 1 changesets with 1 changes to 1 files
839 839
840 840 push over http
841 841
842 842 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
843 843 $ cat other.pid >> $DAEMON_PIDS
844 844
845 845 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403
846 846 pushing to http://localhost:$HGPORT2/
847 847 searching for changes
848 848 remote: adding changesets
849 849 remote: adding manifests
850 850 remote: adding file changes
851 851 remote: added 1 changesets with 1 changes to 1 files
852 852 $ cat other-error.log
853 853
854 854 Check final content.
855 855
856 856 $ hg -R other log -G
857 857 o changeset: 7:32af7686d403
858 858 | tag: tip
859 859 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
860 860 | date: Sat Apr 30 15:24:48 2011 +0200
861 861 | summary: D
862 862 |
863 863 o changeset: 6:5fddd98957c8
864 864 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
865 865 | date: Sat Apr 30 15:24:48 2011 +0200
866 866 | summary: C
867 867 |
868 868 o changeset: 5:42ccdea3bb16
869 869 | parent: 0:cd010b8cd998
870 870 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
871 871 | date: Sat Apr 30 15:24:48 2011 +0200
872 872 | summary: B
873 873 |
874 874 | o changeset: 4:02de42196ebe
875 875 | | parent: 2:24b6387c8c8c
876 876 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
877 877 | | date: Sat Apr 30 15:24:48 2011 +0200
878 878 | | summary: H
879 879 | |
880 880 | | o changeset: 3:eea13746799a
881 881 | |/| parent: 2:24b6387c8c8c
882 882 | | | parent: 1:9520eea781bc
883 883 | | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
884 884 | | | date: Sat Apr 30 15:24:48 2011 +0200
885 885 | | | summary: G
886 886 | | |
887 887 | o | changeset: 2:24b6387c8c8c
888 888 |/ / parent: 0:cd010b8cd998
889 889 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
890 890 | | date: Sat Apr 30 15:24:48 2011 +0200
891 891 | | summary: F
892 892 | |
893 893 | @ changeset: 1:9520eea781bc
894 894 |/ user: Nicolas Dumazet <nicdumz.commits@gmail.com>
895 895 | date: Sat Apr 30 15:24:48 2011 +0200
896 896 | summary: E
897 897 |
898 898 o changeset: 0:cd010b8cd998
899 899 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
900 900 date: Sat Apr 30 15:24:48 2011 +0200
901 901 summary: A
902 902
903 903
904 904 Error Handling
905 905 ==============
906 906
907 907 Check that errors are properly returned to the client during push.
908 908
909 909 Setting up
910 910
911 911 $ cat > failpush.py << EOF
912 912 > """A small extension that makes push fails when using bundle2
913 913 >
914 914 > used to test error handling in bundle2
915 915 > """
916 916 >
917 917 > from mercurial import util
918 918 > from mercurial import bundle2
919 919 > from mercurial import exchange
920 920 > from mercurial import extensions
921 921 >
922 922 > def _pushbundle2failpart(orig, pushop, bundler):
923 923 > extradata = orig(pushop, bundler)
924 924 > reason = pushop.ui.config('failpush', 'reason', None)
925 925 > part = None
926 926 > if reason == 'abort':
927 927 > part = bundle2.bundlepart('test:abort')
928 928 > if reason == 'unknown':
929 929 > part = bundle2.bundlepart('TEST:UNKNOWN')
930 930 > if reason == 'race':
931 931 > # 20 Bytes of crap
932 932 > part = bundle2.bundlepart('b2x:check:heads', data='01234567890123456789')
933 933 > if part is not None:
934 934 > bundler.addpart(part)
935 935 > return extradata
936 936 >
937 937 > @bundle2.parthandler("test:abort")
938 938 > def handleabort(op, part):
939 939 > raise util.Abort('Abandon ship!', hint="don't panic")
940 940 >
941 941 > def uisetup(ui):
942 942 > extensions.wrapfunction(exchange, '_pushbundle2extraparts', _pushbundle2failpart)
943 943 >
944 944 > EOF
945 945
946 946 $ cd main
947 947 $ hg up tip
948 948 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
949 949 $ echo 'I' > I
950 950 $ hg add I
951 951 $ hg ci -m 'I'
952 952 $ hg id
953 953 e7ec4e813ba6 tip
954 954 $ cd ..
955 955
956 956 $ cat << EOF >> $HGRCPATH
957 957 > [extensions]
958 958 > failpush=$TESTTMP/failpush.py
959 959 > EOF
960 960
961 961 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
962 962 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
963 963 $ cat other.pid >> $DAEMON_PIDS
964 964
965 965 Doing the actual push: Abort error
966 966
967 967 $ cat << EOF >> $HGRCPATH
968 968 > [failpush]
969 969 > reason = abort
970 970 > EOF
971 971
972 972 $ hg -R main push other -r e7ec4e813ba6
973 973 pushing to other
974 974 searching for changes
975 975 abort: Abandon ship!
976 976 (don't panic)
977 977 [255]
978 978
979 979 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
980 980 pushing to ssh://user@dummy/other
981 981 searching for changes
982 982 abort: Abandon ship!
983 983 (don't panic)
984 984 [255]
985 985
986 986 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
987 987 pushing to http://localhost:$HGPORT2/
988 988 searching for changes
989 989 abort: Abandon ship!
990 990 (don't panic)
991 991 [255]
992 992
993 993
994 994 Doing the actual push: unknown mandatory parts
995 995
996 996 $ cat << EOF >> $HGRCPATH
997 997 > [failpush]
998 998 > reason = unknown
999 999 > EOF
1000 1000
1001 1001 $ hg -R main push other -r e7ec4e813ba6
1002 1002 pushing to other
1003 1003 searching for changes
1004 1004 abort: missing support for 'test:unknown'
1005 1005 [255]
1006 1006
1007 1007 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1008 1008 pushing to ssh://user@dummy/other
1009 1009 searching for changes
1010 1010 abort: missing support for "'test:unknown'"
1011 1011 [255]
1012 1012
1013 1013 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1014 1014 pushing to http://localhost:$HGPORT2/
1015 1015 searching for changes
1016 1016 abort: missing support for "'test:unknown'"
1017 1017 [255]
1018 1018
1019 1019 Doing the actual push: race
1020 1020
1021 1021 $ cat << EOF >> $HGRCPATH
1022 1022 > [failpush]
1023 1023 > reason = race
1024 1024 > EOF
1025 1025
1026 1026 $ hg -R main push other -r e7ec4e813ba6
1027 1027 pushing to other
1028 1028 searching for changes
1029 1029 abort: push failed:
1030 1030 'repository changed while pushing - please try again'
1031 1031 [255]
1032 1032
1033 1033 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1034 1034 pushing to ssh://user@dummy/other
1035 1035 searching for changes
1036 1036 abort: push failed:
1037 1037 'repository changed while pushing - please try again'
1038 1038 [255]
1039 1039
1040 1040 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1041 1041 pushing to http://localhost:$HGPORT2/
1042 1042 searching for changes
1043 1043 abort: push failed:
1044 1044 'repository changed while pushing - please try again'
1045 1045 [255]
1046
1047 Doing the actual push: hook abort
1048
1049 $ cat << EOF >> $HGRCPATH
1050 > [failpush]
1051 > reason =
1052 > [hooks]
1053 > b2x-pretransactionclose.failpush = false
1054 > EOF
1055
1056 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
1057 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
1058 $ cat other.pid >> $DAEMON_PIDS
1059
1060 $ hg -R main push other -r e7ec4e813ba6
1061 pushing to other
1062 searching for changes
1063 transaction abort!
1064 rollback completed
1065 abort: b2x-pretransactionclose.failpush hook exited with status 1
1066 [255]
1067
1068 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1069 pushing to ssh://user@dummy/other
1070 searching for changes
1071 abort: b2x-pretransactionclose.failpush hook exited with status 1
1072 remote: transaction abort!
1073 remote: rollback completed
1074 [255]
1075
1076 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1077 pushing to http://localhost:$HGPORT2/
1078 searching for changes
1079 abort: b2x-pretransactionclose.failpush hook exited with status 1
1080 [255]
1081
1082
General Comments 0
You need to be logged in to leave comments. Login now