##// END OF EJS Templates
bundle2: fix bundle2 pulling all revs on empty pulls...
Durham Goode -
r21259:ab5040cd default
parent child Browse files
Show More
@@ -1,761 +1,765
1 1 # exchange.py - utility to exchange data between repos.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 from node import hex, nullid
10 10 import errno, urllib
11 11 import util, scmutil, changegroup, base85, error
12 12 import discovery, phases, obsolete, bookmarks, bundle2
13 13
14 14 def readbundle(ui, fh, fname, vfs=None):
15 15 header = changegroup.readexactly(fh, 4)
16 16
17 17 alg = None
18 18 if not fname:
19 19 fname = "stream"
20 20 if not header.startswith('HG') and header.startswith('\0'):
21 21 fh = changegroup.headerlessfixup(fh, header)
22 22 header = "HG10"
23 23 alg = 'UN'
24 24 elif vfs:
25 25 fname = vfs.join(fname)
26 26
27 27 magic, version = header[0:2], header[2:4]
28 28
29 29 if magic != 'HG':
30 30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 31 if version == '10':
32 32 if alg is None:
33 33 alg = changegroup.readexactly(fh, 2)
34 34 return changegroup.unbundle10(fh, alg)
35 35 elif version == '2X':
36 36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 37 else:
38 38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39 39
40 40
41 41 class pushoperation(object):
42 42 """A object that represent a single push operation
43 43
44 44 It purpose is to carry push related state and very common operation.
45 45
46 46 A new should be created at the beginning of each push and discarded
47 47 afterward.
48 48 """
49 49
50 50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
51 51 # repo we push from
52 52 self.repo = repo
53 53 self.ui = repo.ui
54 54 # repo we push to
55 55 self.remote = remote
56 56 # force option provided
57 57 self.force = force
58 58 # revs to be pushed (None is "all")
59 59 self.revs = revs
60 60 # allow push of new branch
61 61 self.newbranch = newbranch
62 62 # did a local lock get acquired?
63 63 self.locallocked = None
64 64 # Integer version of the push result
65 65 # - None means nothing to push
66 66 # - 0 means HTTP error
67 67 # - 1 means we pushed and remote head count is unchanged *or*
68 68 # we have outgoing changesets but refused to push
69 69 # - other values as described by addchangegroup()
70 70 self.ret = None
71 71 # discover.outgoing object (contains common and outgoing data)
72 72 self.outgoing = None
73 73 # all remote heads before the push
74 74 self.remoteheads = None
75 75 # testable as a boolean indicating if any nodes are missing locally.
76 76 self.incoming = None
77 77 # set of all heads common after changeset bundle push
78 78 self.commonheads = None
79 79
80 80 def push(repo, remote, force=False, revs=None, newbranch=False):
81 81 '''Push outgoing changesets (limited by revs) from a local
82 82 repository to remote. Return an integer:
83 83 - None means nothing to push
84 84 - 0 means HTTP error
85 85 - 1 means we pushed and remote head count is unchanged *or*
86 86 we have outgoing changesets but refused to push
87 87 - other values as described by addchangegroup()
88 88 '''
89 89 pushop = pushoperation(repo, remote, force, revs, newbranch)
90 90 if pushop.remote.local():
91 91 missing = (set(pushop.repo.requirements)
92 92 - pushop.remote.local().supported)
93 93 if missing:
94 94 msg = _("required features are not"
95 95 " supported in the destination:"
96 96 " %s") % (', '.join(sorted(missing)))
97 97 raise util.Abort(msg)
98 98
99 99 # there are two ways to push to remote repo:
100 100 #
101 101 # addchangegroup assumes local user can lock remote
102 102 # repo (local filesystem, old ssh servers).
103 103 #
104 104 # unbundle assumes local user cannot lock remote repo (new ssh
105 105 # servers, http servers).
106 106
107 107 if not pushop.remote.canpush():
108 108 raise util.Abort(_("destination does not support push"))
109 109 # get local lock as we might write phase data
110 110 locallock = None
111 111 try:
112 112 locallock = pushop.repo.lock()
113 113 pushop.locallocked = True
114 114 except IOError, err:
115 115 pushop.locallocked = False
116 116 if err.errno != errno.EACCES:
117 117 raise
118 118 # source repo cannot be locked.
119 119 # We do not abort the push, but just disable the local phase
120 120 # synchronisation.
121 121 msg = 'cannot lock source repository: %s\n' % err
122 122 pushop.ui.debug(msg)
123 123 try:
124 124 pushop.repo.checkpush(pushop)
125 125 lock = None
126 126 unbundle = pushop.remote.capable('unbundle')
127 127 if not unbundle:
128 128 lock = pushop.remote.lock()
129 129 try:
130 130 _pushdiscovery(pushop)
131 131 if _pushcheckoutgoing(pushop):
132 132 pushop.repo.prepushoutgoinghooks(pushop.repo,
133 133 pushop.remote,
134 134 pushop.outgoing)
135 135 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
136 136 False)
137 137 and pushop.remote.capable('bundle2-exp')):
138 138 _pushbundle2(pushop)
139 139 else:
140 140 _pushchangeset(pushop)
141 141 _pushcomputecommonheads(pushop)
142 142 _pushsyncphase(pushop)
143 143 _pushobsolete(pushop)
144 144 finally:
145 145 if lock is not None:
146 146 lock.release()
147 147 finally:
148 148 if locallock is not None:
149 149 locallock.release()
150 150
151 151 _pushbookmark(pushop)
152 152 return pushop.ret
153 153
154 154 def _pushdiscovery(pushop):
155 155 # discovery
156 156 unfi = pushop.repo.unfiltered()
157 157 fci = discovery.findcommonincoming
158 158 commoninc = fci(unfi, pushop.remote, force=pushop.force)
159 159 common, inc, remoteheads = commoninc
160 160 fco = discovery.findcommonoutgoing
161 161 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
162 162 commoninc=commoninc, force=pushop.force)
163 163 pushop.outgoing = outgoing
164 164 pushop.remoteheads = remoteheads
165 165 pushop.incoming = inc
166 166
167 167 def _pushcheckoutgoing(pushop):
168 168 outgoing = pushop.outgoing
169 169 unfi = pushop.repo.unfiltered()
170 170 if not outgoing.missing:
171 171 # nothing to push
172 172 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
173 173 return False
174 174 # something to push
175 175 if not pushop.force:
176 176 # if repo.obsstore == False --> no obsolete
177 177 # then, save the iteration
178 178 if unfi.obsstore:
179 179 # this message are here for 80 char limit reason
180 180 mso = _("push includes obsolete changeset: %s!")
181 181 mst = "push includes %s changeset: %s!"
182 182 # plain versions for i18n tool to detect them
183 183 _("push includes unstable changeset: %s!")
184 184 _("push includes bumped changeset: %s!")
185 185 _("push includes divergent changeset: %s!")
186 186 # If we are to push if there is at least one
187 187 # obsolete or unstable changeset in missing, at
188 188 # least one of the missinghead will be obsolete or
189 189 # unstable. So checking heads only is ok
190 190 for node in outgoing.missingheads:
191 191 ctx = unfi[node]
192 192 if ctx.obsolete():
193 193 raise util.Abort(mso % ctx)
194 194 elif ctx.troubled():
195 195 raise util.Abort(_(mst)
196 196 % (ctx.troubles()[0],
197 197 ctx))
198 198 newbm = pushop.ui.configlist('bookmarks', 'pushing')
199 199 discovery.checkheads(unfi, pushop.remote, outgoing,
200 200 pushop.remoteheads,
201 201 pushop.newbranch,
202 202 bool(pushop.incoming),
203 203 newbm)
204 204 return True
205 205
206 206 def _pushbundle2(pushop):
207 207 """push data to the remote using bundle2
208 208
209 209 The only currently supported type of data is changegroup but this will
210 210 evolve in the future."""
211 211 # Send known head to the server for race detection.
212 212 capsblob = urllib.unquote(pushop.remote.capable('bundle2-exp'))
213 213 caps = bundle2.decodecaps(capsblob)
214 214 bundler = bundle2.bundle20(pushop.ui, caps)
215 215 # create reply capability
216 216 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
217 217 bundler.addpart(bundle2.bundlepart('b2x:replycaps', data=capsblob))
218 218 if not pushop.force:
219 219 part = bundle2.bundlepart('B2X:CHECK:HEADS',
220 220 data=iter(pushop.remoteheads))
221 221 bundler.addpart(part)
222 222 extrainfo = _pushbundle2extraparts(pushop, bundler)
223 223 # add the changegroup bundle
224 224 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
225 225 cgpart = bundle2.bundlepart('B2X:CHANGEGROUP', data=cg.getchunks())
226 226 bundler.addpart(cgpart)
227 227 stream = util.chunkbuffer(bundler.getchunks())
228 228 try:
229 229 reply = pushop.remote.unbundle(stream, ['force'], 'push')
230 230 except bundle2.UnknownPartError, exc:
231 231 raise util.Abort('missing support for %s' % exc)
232 232 try:
233 233 op = bundle2.processbundle(pushop.repo, reply)
234 234 except bundle2.UnknownPartError, exc:
235 235 raise util.Abort('missing support for %s' % exc)
236 236 cgreplies = op.records.getreplies(cgpart.id)
237 237 assert len(cgreplies['changegroup']) == 1
238 238 pushop.ret = cgreplies['changegroup'][0]['return']
239 239 _pushbundle2extrareply(pushop, op, extrainfo)
240 240
241 241 def _pushbundle2extraparts(pushop, bundler):
242 242 """hook function to let extensions add parts
243 243
244 244 Return a dict to let extensions pass data to the reply processing.
245 245 """
246 246 return {}
247 247
248 248 def _pushbundle2extrareply(pushop, op, extrainfo):
249 249 """hook function to let extensions react to part replies
250 250
251 251 The dict from _pushbundle2extrareply is fed to this function.
252 252 """
253 253 pass
254 254
255 255 def _pushchangeset(pushop):
256 256 """Make the actual push of changeset bundle to remote repo"""
257 257 outgoing = pushop.outgoing
258 258 unbundle = pushop.remote.capable('unbundle')
259 259 # TODO: get bundlecaps from remote
260 260 bundlecaps = None
261 261 # create a changegroup from local
262 262 if pushop.revs is None and not (outgoing.excluded
263 263 or pushop.repo.changelog.filteredrevs):
264 264 # push everything,
265 265 # use the fast path, no race possible on push
266 266 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
267 267 cg = changegroup.getsubset(pushop.repo,
268 268 outgoing,
269 269 bundler,
270 270 'push',
271 271 fastpath=True)
272 272 else:
273 273 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
274 274 bundlecaps)
275 275
276 276 # apply changegroup to remote
277 277 if unbundle:
278 278 # local repo finds heads on server, finds out what
279 279 # revs it must push. once revs transferred, if server
280 280 # finds it has different heads (someone else won
281 281 # commit/push race), server aborts.
282 282 if pushop.force:
283 283 remoteheads = ['force']
284 284 else:
285 285 remoteheads = pushop.remoteheads
286 286 # ssh: return remote's addchangegroup()
287 287 # http: return remote's addchangegroup() or 0 for error
288 288 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
289 289 'push')
290 290 else:
291 291 # we return an integer indicating remote head count
292 292 # change
293 293 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
294 294
295 295 def _pushcomputecommonheads(pushop):
296 296 unfi = pushop.repo.unfiltered()
297 297 if pushop.ret:
298 298 # push succeed, synchronize target of the push
299 299 cheads = pushop.outgoing.missingheads
300 300 elif pushop.revs is None:
301 301 # All out push fails. synchronize all common
302 302 cheads = pushop.outgoing.commonheads
303 303 else:
304 304 # I want cheads = heads(::missingheads and ::commonheads)
305 305 # (missingheads is revs with secret changeset filtered out)
306 306 #
307 307 # This can be expressed as:
308 308 # cheads = ( (missingheads and ::commonheads)
309 309 # + (commonheads and ::missingheads))"
310 310 # )
311 311 #
312 312 # while trying to push we already computed the following:
313 313 # common = (::commonheads)
314 314 # missing = ((commonheads::missingheads) - commonheads)
315 315 #
316 316 # We can pick:
317 317 # * missingheads part of common (::commonheads)
318 318 common = set(pushop.outgoing.common)
319 319 nm = pushop.repo.changelog.nodemap
320 320 cheads = [node for node in pushop.revs if nm[node] in common]
321 321 # and
322 322 # * commonheads parents on missing
323 323 revset = unfi.set('%ln and parents(roots(%ln))',
324 324 pushop.outgoing.commonheads,
325 325 pushop.outgoing.missing)
326 326 cheads.extend(c.node() for c in revset)
327 327 pushop.commonheads = cheads
328 328
329 329 def _pushsyncphase(pushop):
330 330 """synchronise phase information locally and remotely"""
331 331 unfi = pushop.repo.unfiltered()
332 332 cheads = pushop.commonheads
333 333 if pushop.ret:
334 334 # push succeed, synchronize target of the push
335 335 cheads = pushop.outgoing.missingheads
336 336 elif pushop.revs is None:
337 337 # All out push fails. synchronize all common
338 338 cheads = pushop.outgoing.commonheads
339 339 else:
340 340 # I want cheads = heads(::missingheads and ::commonheads)
341 341 # (missingheads is revs with secret changeset filtered out)
342 342 #
343 343 # This can be expressed as:
344 344 # cheads = ( (missingheads and ::commonheads)
345 345 # + (commonheads and ::missingheads))"
346 346 # )
347 347 #
348 348 # while trying to push we already computed the following:
349 349 # common = (::commonheads)
350 350 # missing = ((commonheads::missingheads) - commonheads)
351 351 #
352 352 # We can pick:
353 353 # * missingheads part of common (::commonheads)
354 354 common = set(pushop.outgoing.common)
355 355 nm = pushop.repo.changelog.nodemap
356 356 cheads = [node for node in pushop.revs if nm[node] in common]
357 357 # and
358 358 # * commonheads parents on missing
359 359 revset = unfi.set('%ln and parents(roots(%ln))',
360 360 pushop.outgoing.commonheads,
361 361 pushop.outgoing.missing)
362 362 cheads.extend(c.node() for c in revset)
363 363 pushop.commonheads = cheads
364 364 # even when we don't push, exchanging phase data is useful
365 365 remotephases = pushop.remote.listkeys('phases')
366 366 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
367 367 and remotephases # server supports phases
368 368 and pushop.ret is None # nothing was pushed
369 369 and remotephases.get('publishing', False)):
370 370 # When:
371 371 # - this is a subrepo push
372 372 # - and remote support phase
373 373 # - and no changeset was pushed
374 374 # - and remote is publishing
375 375 # We may be in issue 3871 case!
376 376 # We drop the possible phase synchronisation done by
377 377 # courtesy to publish changesets possibly locally draft
378 378 # on the remote.
379 379 remotephases = {'publishing': 'True'}
380 380 if not remotephases: # old server or public only reply from non-publishing
381 381 _localphasemove(pushop, cheads)
382 382 # don't push any phase data as there is nothing to push
383 383 else:
384 384 ana = phases.analyzeremotephases(pushop.repo, cheads,
385 385 remotephases)
386 386 pheads, droots = ana
387 387 ### Apply remote phase on local
388 388 if remotephases.get('publishing', False):
389 389 _localphasemove(pushop, cheads)
390 390 else: # publish = False
391 391 _localphasemove(pushop, pheads)
392 392 _localphasemove(pushop, cheads, phases.draft)
393 393 ### Apply local phase on remote
394 394
395 395 # Get the list of all revs draft on remote by public here.
396 396 # XXX Beware that revset break if droots is not strictly
397 397 # XXX root we may want to ensure it is but it is costly
398 398 outdated = unfi.set('heads((%ln::%ln) and public())',
399 399 droots, cheads)
400 400 for newremotehead in outdated:
401 401 r = pushop.remote.pushkey('phases',
402 402 newremotehead.hex(),
403 403 str(phases.draft),
404 404 str(phases.public))
405 405 if not r:
406 406 pushop.ui.warn(_('updating %s to public failed!\n')
407 407 % newremotehead)
408 408
409 409 def _localphasemove(pushop, nodes, phase=phases.public):
410 410 """move <nodes> to <phase> in the local source repo"""
411 411 if pushop.locallocked:
412 412 phases.advanceboundary(pushop.repo, phase, nodes)
413 413 else:
414 414 # repo is not locked, do not change any phases!
415 415 # Informs the user that phases should have been moved when
416 416 # applicable.
417 417 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
418 418 phasestr = phases.phasenames[phase]
419 419 if actualmoves:
420 420 pushop.ui.status(_('cannot lock source repo, skipping '
421 421 'local %s phase update\n') % phasestr)
422 422
423 423 def _pushobsolete(pushop):
424 424 """utility function to push obsolete markers to a remote"""
425 425 pushop.ui.debug('try to push obsolete markers to remote\n')
426 426 repo = pushop.repo
427 427 remote = pushop.remote
428 428 if (obsolete._enabled and repo.obsstore and
429 429 'obsolete' in remote.listkeys('namespaces')):
430 430 rslts = []
431 431 remotedata = repo.listkeys('obsolete')
432 432 for key in sorted(remotedata, reverse=True):
433 433 # reverse sort to ensure we end with dump0
434 434 data = remotedata[key]
435 435 rslts.append(remote.pushkey('obsolete', key, '', data))
436 436 if [r for r in rslts if not r]:
437 437 msg = _('failed to push some obsolete markers!\n')
438 438 repo.ui.warn(msg)
439 439
440 440 def _pushbookmark(pushop):
441 441 """Update bookmark position on remote"""
442 442 ui = pushop.ui
443 443 repo = pushop.repo.unfiltered()
444 444 remote = pushop.remote
445 445 ui.debug("checking for updated bookmarks\n")
446 446 revnums = map(repo.changelog.rev, pushop.revs or [])
447 447 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
448 448 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
449 449 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
450 450 srchex=hex)
451 451
452 452 for b, scid, dcid in advsrc:
453 453 if ancestors and repo[scid].rev() not in ancestors:
454 454 continue
455 455 if remote.pushkey('bookmarks', b, dcid, scid):
456 456 ui.status(_("updating bookmark %s\n") % b)
457 457 else:
458 458 ui.warn(_('updating bookmark %s failed!\n') % b)
459 459
460 460 class pulloperation(object):
461 461 """A object that represent a single pull operation
462 462
463 463 It purpose is to carry push related state and very common operation.
464 464
465 465 A new should be created at the beginning of each pull and discarded
466 466 afterward.
467 467 """
468 468
469 469 def __init__(self, repo, remote, heads=None, force=False):
470 470 # repo we pull into
471 471 self.repo = repo
472 472 # repo we pull from
473 473 self.remote = remote
474 474 # revision we try to pull (None is "all")
475 475 self.heads = heads
476 476 # do we force pull?
477 477 self.force = force
478 478 # the name the pull transaction
479 479 self._trname = 'pull\n' + util.hidepassword(remote.url())
480 480 # hold the transaction once created
481 481 self._tr = None
482 482 # set of common changeset between local and remote before pull
483 483 self.common = None
484 484 # set of pulled head
485 485 self.rheads = None
486 486 # list of missing changeset to fetch remotely
487 487 self.fetch = None
488 488 # result of changegroup pulling (used as return code by pull)
489 489 self.cgresult = None
490 490 # list of step remaining todo (related to future bundle2 usage)
491 491 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
492 492
493 493 @util.propertycache
494 494 def pulledsubset(self):
495 495 """heads of the set of changeset target by the pull"""
496 496 # compute target subset
497 497 if self.heads is None:
498 498 # We pulled every thing possible
499 499 # sync on everything common
500 500 c = set(self.common)
501 501 ret = list(self.common)
502 502 for n in self.rheads:
503 503 if n not in c:
504 504 ret.append(n)
505 505 return ret
506 506 else:
507 507 # We pulled a specific subset
508 508 # sync on this subset
509 509 return self.heads
510 510
511 511 def gettransaction(self):
512 512 """get appropriate pull transaction, creating it if needed"""
513 513 if self._tr is None:
514 514 self._tr = self.repo.transaction(self._trname)
515 515 return self._tr
516 516
517 517 def closetransaction(self):
518 518 """close transaction if created"""
519 519 if self._tr is not None:
520 520 self._tr.close()
521 521
522 522 def releasetransaction(self):
523 523 """release transaction if created"""
524 524 if self._tr is not None:
525 525 self._tr.release()
526 526
527 527 def pull(repo, remote, heads=None, force=False):
528 528 pullop = pulloperation(repo, remote, heads, force)
529 529 if pullop.remote.local():
530 530 missing = set(pullop.remote.requirements) - pullop.repo.supported
531 531 if missing:
532 532 msg = _("required features are not"
533 533 " supported in the destination:"
534 534 " %s") % (', '.join(sorted(missing)))
535 535 raise util.Abort(msg)
536 536
537 537 lock = pullop.repo.lock()
538 538 try:
539 539 _pulldiscovery(pullop)
540 540 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
541 541 and pullop.remote.capable('bundle2-exp')):
542 542 _pullbundle2(pullop)
543 543 if 'changegroup' in pullop.todosteps:
544 544 _pullchangeset(pullop)
545 545 if 'phases' in pullop.todosteps:
546 546 _pullphase(pullop)
547 547 if 'obsmarkers' in pullop.todosteps:
548 548 _pullobsolete(pullop)
549 549 pullop.closetransaction()
550 550 finally:
551 551 pullop.releasetransaction()
552 552 lock.release()
553 553
554 554 return pullop.cgresult
555 555
556 556 def _pulldiscovery(pullop):
557 557 """discovery phase for the pull
558 558
559 559 Current handle changeset discovery only, will change handle all discovery
560 560 at some point."""
561 561 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
562 562 pullop.remote,
563 563 heads=pullop.heads,
564 564 force=pullop.force)
565 565 pullop.common, pullop.fetch, pullop.rheads = tmp
566 566
567 567 def _pullbundle2(pullop):
568 568 """pull data using bundle2
569 569
570 570 For now, the only supported data are changegroup."""
571 571 kwargs = {'bundlecaps': set(['HG2X'])}
572 572 capsblob = bundle2.encodecaps(pullop.repo.bundle2caps)
573 573 kwargs['bundlecaps'].add('bundle2=' + urllib.quote(capsblob))
574 574 # pulling changegroup
575 575 pullop.todosteps.remove('changegroup')
576
577 kwargs['common'] = pullop.common
578 kwargs['heads'] = pullop.heads or pullop.rheads
576 579 if not pullop.fetch:
577 580 pullop.repo.ui.status(_("no changes found\n"))
578 581 pullop.cgresult = 0
579 582 else:
580 kwargs['common'] = pullop.common
581 kwargs['heads'] = pullop.heads or pullop.rheads
582 583 if pullop.heads is None and list(pullop.common) == [nullid]:
583 584 pullop.repo.ui.status(_("requesting all changes\n"))
584 585 _pullbundle2extraprepare(pullop, kwargs)
585 586 if kwargs.keys() == ['format']:
586 587 return # nothing to pull
587 588 bundle = pullop.remote.getbundle('pull', **kwargs)
588 589 try:
589 590 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
590 591 except bundle2.UnknownPartError, exc:
591 592 raise util.Abort('missing support for %s' % exc)
592 assert len(op.records['changegroup']) == 1
593 pullop.cgresult = op.records['changegroup'][0]['return']
593
594 if pullop.fetch:
595 assert len(op.records['changegroup']) == 1
596 pullop.cgresult = op.records['changegroup'][0]['return']
594 597
595 598 def _pullbundle2extraprepare(pullop, kwargs):
596 599 """hook function so that extensions can extend the getbundle call"""
597 600 pass
598 601
599 602 def _pullchangeset(pullop):
600 603 """pull changeset from unbundle into the local repo"""
601 604 # We delay the open of the transaction as late as possible so we
602 605 # don't open transaction for nothing or you break future useful
603 606 # rollback call
604 607 pullop.todosteps.remove('changegroup')
605 608 if not pullop.fetch:
606 609 pullop.repo.ui.status(_("no changes found\n"))
607 610 pullop.cgresult = 0
608 611 return
609 612 pullop.gettransaction()
610 613 if pullop.heads is None and list(pullop.common) == [nullid]:
611 614 pullop.repo.ui.status(_("requesting all changes\n"))
612 615 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
613 616 # issue1320, avoid a race if remote changed after discovery
614 617 pullop.heads = pullop.rheads
615 618
616 619 if pullop.remote.capable('getbundle'):
617 620 # TODO: get bundlecaps from remote
618 621 cg = pullop.remote.getbundle('pull', common=pullop.common,
619 622 heads=pullop.heads or pullop.rheads)
620 623 elif pullop.heads is None:
621 624 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
622 625 elif not pullop.remote.capable('changegroupsubset'):
623 626 raise util.Abort(_("partial pull cannot be done because "
624 627 "other repository doesn't support "
625 628 "changegroupsubset."))
626 629 else:
627 630 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
628 631 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
629 632 pullop.remote.url())
630 633
631 634 def _pullphase(pullop):
632 635 # Get remote phases data from remote
633 636 pullop.todosteps.remove('phases')
634 637 remotephases = pullop.remote.listkeys('phases')
635 638 publishing = bool(remotephases.get('publishing', False))
636 639 if remotephases and not publishing:
637 640 # remote is new and unpublishing
638 641 pheads, _dr = phases.analyzeremotephases(pullop.repo,
639 642 pullop.pulledsubset,
640 643 remotephases)
641 644 phases.advanceboundary(pullop.repo, phases.public, pheads)
642 645 phases.advanceboundary(pullop.repo, phases.draft,
643 646 pullop.pulledsubset)
644 647 else:
645 648 # Remote is old or publishing all common changesets
646 649 # should be seen as public
647 650 phases.advanceboundary(pullop.repo, phases.public,
648 651 pullop.pulledsubset)
649 652
650 653 def _pullobsolete(pullop):
651 654 """utility function to pull obsolete markers from a remote
652 655
653 656 The `gettransaction` is function that return the pull transaction, creating
654 657 one if necessary. We return the transaction to inform the calling code that
655 658 a new transaction have been created (when applicable).
656 659
657 660 Exists mostly to allow overriding for experimentation purpose"""
658 661 pullop.todosteps.remove('obsmarkers')
659 662 tr = None
660 663 if obsolete._enabled:
661 664 pullop.repo.ui.debug('fetching remote obsolete markers\n')
662 665 remoteobs = pullop.remote.listkeys('obsolete')
663 666 if 'dump0' in remoteobs:
664 667 tr = pullop.gettransaction()
665 668 for key in sorted(remoteobs, reverse=True):
666 669 if key.startswith('dump'):
667 670 data = base85.b85decode(remoteobs[key])
668 671 pullop.repo.obsstore.mergemarkers(tr, data)
669 672 pullop.repo.invalidatevolatilesets()
670 673 return tr
671 674
672 675 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
673 676 **kwargs):
674 677 """return a full bundle (with potentially multiple kind of parts)
675 678
676 679 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
677 680 passed. For now, the bundle can contain only changegroup, but this will
678 681 changes when more part type will be available for bundle2.
679 682
680 683 This is different from changegroup.getbundle that only returns an HG10
681 684 changegroup bundle. They may eventually get reunited in the future when we
682 685 have a clearer idea of the API we what to query different data.
683 686
684 687 The implementation is at a very early stage and will get massive rework
685 688 when the API of bundle is refined.
686 689 """
687 # build bundle here.
690 # build changegroup bundle here.
688 691 cg = changegroup.getbundle(repo, source, heads=heads,
689 692 common=common, bundlecaps=bundlecaps)
690 693 if bundlecaps is None or 'HG2X' not in bundlecaps:
691 694 return cg
692 695 # very crude first implementation,
693 696 # the bundle API will change and the generation will be done lazily.
694 697 b2caps = {}
695 698 for bcaps in bundlecaps:
696 699 if bcaps.startswith('bundle2='):
697 700 blob = urllib.unquote(bcaps[len('bundle2='):])
698 701 b2caps.update(bundle2.decodecaps(blob))
699 702 bundler = bundle2.bundle20(repo.ui, b2caps)
700 part = bundle2.bundlepart('b2x:changegroup', data=cg.getchunks())
701 bundler.addpart(part)
703 if cg:
704 part = bundle2.bundlepart('b2x:changegroup', data=cg.getchunks())
705 bundler.addpart(part)
702 706 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
703 707 bundlecaps=bundlecaps, **kwargs)
704 708 return util.chunkbuffer(bundler.getchunks())
705 709
706 710 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
707 711 bundlecaps=None, **kwargs):
708 712 """hook function to let extensions add parts to the requested bundle"""
709 713 pass
710 714
711 715 def check_heads(repo, their_heads, context):
712 716 """check if the heads of a repo have been modified
713 717
714 718 Used by peer for unbundling.
715 719 """
716 720 heads = repo.heads()
717 721 heads_hash = util.sha1(''.join(sorted(heads))).digest()
718 722 if not (their_heads == ['force'] or their_heads == heads or
719 723 their_heads == ['hashed', heads_hash]):
720 724 # someone else committed/pushed/unbundled while we
721 725 # were transferring data
722 726 raise error.PushRaced('repository changed while %s - '
723 727 'please try again' % context)
724 728
725 729 def unbundle(repo, cg, heads, source, url):
726 730 """Apply a bundle to a repo.
727 731
728 732 this function makes sure the repo is locked during the application and have
729 733 mechanism to check that no push race occurred between the creation of the
730 734 bundle and its application.
731 735
732 736 If the push was raced as PushRaced exception is raised."""
733 737 r = 0
734 738 # need a transaction when processing a bundle2 stream
735 739 tr = None
736 740 lock = repo.lock()
737 741 try:
738 742 check_heads(repo, heads, 'uploading changes')
739 743 # push can proceed
740 744 if util.safehasattr(cg, 'params'):
741 745 try:
742 746 tr = repo.transaction('unbundle')
743 747 tr.hookargs['bundle2-exp'] = '1'
744 748 r = bundle2.processbundle(repo, cg, lambda: tr).reply
745 749 cl = repo.unfiltered().changelog
746 750 p = cl.writepending() and repo.root or ""
747 751 repo.hook('b2x-pretransactionclose', throw=True, source=source,
748 752 url=url, pending=p, **tr.hookargs)
749 753 tr.close()
750 754 repo.hook('b2x-transactionclose', source=source, url=url,
751 755 **tr.hookargs)
752 756 except Exception, exc:
753 757 exc.duringunbundle2 = True
754 758 raise
755 759 else:
756 760 r = changegroup.addchangegroup(repo, cg, source, url)
757 761 finally:
758 762 if tr is not None:
759 763 tr.release()
760 764 lock.release()
761 765 return r
@@ -1,1082 +1,1088
1 1
2 2 Create an extension to test bundle2 API
3 3
4 4 $ cat > bundle2.py << EOF
5 5 > """A small extension to test bundle2 implementation
6 6 >
7 7 > Current bundle2 implementation is far too limited to be used in any core
8 8 > code. We still need to be able to test it while it grow up.
9 9 > """
10 10 >
11 11 > import sys
12 12 > from mercurial import cmdutil
13 13 > from mercurial import util
14 14 > from mercurial import bundle2
15 15 > from mercurial import scmutil
16 16 > from mercurial import discovery
17 17 > from mercurial import changegroup
18 18 > from mercurial import error
19 19 > cmdtable = {}
20 20 > command = cmdutil.command(cmdtable)
21 21 >
22 22 > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
23 23 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
24 24 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
25 25 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
26 26 >
27 27 > @bundle2.parthandler('test:song')
28 28 > def songhandler(op, part):
29 29 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
30 30 > op.ui.write('The choir starts singing:\n')
31 31 > verses = 0
32 32 > for line in part.read().split('\n'):
33 33 > op.ui.write(' %s\n' % line)
34 34 > verses += 1
35 35 > op.records.add('song', {'verses': verses})
36 36 >
37 37 > @bundle2.parthandler('test:ping')
38 38 > def pinghandler(op, part):
39 39 > op.ui.write('received ping request (id %i)\n' % part.id)
40 40 > if op.reply is not None and 'ping-pong' in op.reply.capabilities:
41 41 > op.ui.write_err('replying to ping request (id %i)\n' % part.id)
42 42 > rpart = bundle2.bundlepart('test:pong',
43 43 > [('in-reply-to', str(part.id))])
44 44 > op.reply.addpart(rpart)
45 45 >
46 46 > @bundle2.parthandler('test:debugreply')
47 47 > def debugreply(op, part):
48 48 > """print data about the capacity of the bundle reply"""
49 49 > if op.reply is None:
50 50 > op.ui.write('debugreply: no reply\n')
51 51 > else:
52 52 > op.ui.write('debugreply: capabilities:\n')
53 53 > for cap in sorted(op.reply.capabilities):
54 54 > op.ui.write('debugreply: %r\n' % cap)
55 55 > for val in op.reply.capabilities[cap]:
56 56 > op.ui.write('debugreply: %r\n' % val)
57 57 >
58 58 > @command('bundle2',
59 59 > [('', 'param', [], 'stream level parameter'),
60 60 > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
61 61 > ('', 'parts', False, 'include some arbitrary parts to the bundle'),
62 62 > ('', 'reply', False, 'produce a reply bundle'),
63 63 > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
64 64 > ('r', 'rev', [], 'includes those changeset in the bundle'),],
65 65 > '[OUTPUTFILE]')
66 66 > def cmdbundle2(ui, repo, path=None, **opts):
67 67 > """write a bundle2 container on standard ouput"""
68 68 > bundler = bundle2.bundle20(ui)
69 69 > for p in opts['param']:
70 70 > p = p.split('=', 1)
71 71 > try:
72 72 > bundler.addparam(*p)
73 73 > except ValueError, exc:
74 74 > raise util.Abort('%s' % exc)
75 75 >
76 76 > if opts['reply']:
77 77 > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
78 78 > bundler.addpart(bundle2.bundlepart('b2x:replycaps', data=capsstring))
79 79 >
80 80 > if opts['pushrace']:
81 81 > dummynode = '01234567890123456789'
82 82 > bundler.addpart(bundle2.bundlepart('b2x:check:heads', data=dummynode))
83 83 >
84 84 > revs = opts['rev']
85 85 > if 'rev' in opts:
86 86 > revs = scmutil.revrange(repo, opts['rev'])
87 87 > if revs:
88 88 > # very crude version of a changegroup part creation
89 89 > bundled = repo.revs('%ld::%ld', revs, revs)
90 90 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
91 91 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
92 92 > outgoing = discovery.outgoing(repo.changelog, headcommon, headmissing)
93 93 > cg = changegroup.getlocalbundle(repo, 'test:bundle2', outgoing, None)
94 94 > part = bundle2.bundlepart('b2x:changegroup', data=cg.getchunks())
95 95 > bundler.addpart(part)
96 96 >
97 97 > if opts['parts']:
98 98 > part = bundle2.bundlepart('test:empty')
99 99 > bundler.addpart(part)
100 100 > # add a second one to make sure we handle multiple parts
101 101 > part = bundle2.bundlepart('test:empty')
102 102 > bundler.addpart(part)
103 103 > part = bundle2.bundlepart('test:song', data=ELEPHANTSSONG)
104 104 > bundler.addpart(part)
105 105 > part = bundle2.bundlepart('test:debugreply')
106 106 > bundler.addpart(part)
107 107 > part = bundle2.bundlepart('test:math',
108 108 > [('pi', '3.14'), ('e', '2.72')],
109 109 > [('cooking', 'raw')],
110 110 > '42')
111 111 > bundler.addpart(part)
112 112 > if opts['unknown']:
113 113 > part = bundle2.bundlepart('test:UNKNOWN',
114 114 > data='some random content')
115 115 > bundler.addpart(part)
116 116 > if opts['parts']:
117 117 > part = bundle2.bundlepart('test:ping')
118 118 > bundler.addpart(part)
119 119 >
120 120 > if path is None:
121 121 > file = sys.stdout
122 122 > else:
123 123 > file = open(path, 'w')
124 124 >
125 125 > for chunk in bundler.getchunks():
126 126 > file.write(chunk)
127 127 >
128 128 > @command('unbundle2', [], '')
129 129 > def cmdunbundle2(ui, repo, replypath=None):
130 130 > """process a bundle2 stream from stdin on the current repo"""
131 131 > try:
132 132 > tr = None
133 133 > lock = repo.lock()
134 134 > tr = repo.transaction('processbundle')
135 135 > try:
136 136 > unbundler = bundle2.unbundle20(ui, sys.stdin)
137 137 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
138 138 > tr.close()
139 139 > except KeyError, exc:
140 140 > raise util.Abort('missing support for %s' % exc)
141 141 > except error.PushRaced, exc:
142 142 > raise util.Abort('push race: %s' % exc)
143 143 > finally:
144 144 > if tr is not None:
145 145 > tr.release()
146 146 > lock.release()
147 147 > remains = sys.stdin.read()
148 148 > ui.write('%i unread bytes\n' % len(remains))
149 149 > if op.records['song']:
150 150 > totalverses = sum(r['verses'] for r in op.records['song'])
151 151 > ui.write('%i total verses sung\n' % totalverses)
152 152 > for rec in op.records['changegroup']:
153 153 > ui.write('addchangegroup return: %i\n' % rec['return'])
154 154 > if op.reply is not None and replypath is not None:
155 155 > file = open(replypath, 'w')
156 156 > for chunk in op.reply.getchunks():
157 157 > file.write(chunk)
158 158 >
159 159 > @command('statbundle2', [], '')
160 160 > def cmdstatbundle2(ui, repo):
161 161 > """print statistic on the bundle2 container read from stdin"""
162 162 > unbundler = bundle2.unbundle20(ui, sys.stdin)
163 163 > try:
164 164 > params = unbundler.params
165 165 > except KeyError, exc:
166 166 > raise util.Abort('unknown parameters: %s' % exc)
167 167 > ui.write('options count: %i\n' % len(params))
168 168 > for key in sorted(params):
169 169 > ui.write('- %s\n' % key)
170 170 > value = params[key]
171 171 > if value is not None:
172 172 > ui.write(' %s\n' % value)
173 173 > count = 0
174 174 > for p in unbundler.iterparts():
175 175 > count += 1
176 176 > ui.write(' :%s:\n' % p.type)
177 177 > ui.write(' mandatory: %i\n' % len(p.mandatoryparams))
178 178 > ui.write(' advisory: %i\n' % len(p.advisoryparams))
179 179 > ui.write(' payload: %i bytes\n' % len(p.read()))
180 180 > ui.write('parts count: %i\n' % count)
181 181 > EOF
182 182 $ cat >> $HGRCPATH << EOF
183 183 > [extensions]
184 184 > bundle2=$TESTTMP/bundle2.py
185 185 > [experimental]
186 186 > bundle2-exp=True
187 187 > [ui]
188 188 > ssh=python "$TESTDIR/dummyssh"
189 189 > [web]
190 190 > push_ssl = false
191 191 > allow_push = *
192 192 > EOF
193 193
194 194 The extension requires a repo (currently unused)
195 195
196 196 $ hg init main
197 197 $ cd main
198 198 $ touch a
199 199 $ hg add a
200 200 $ hg commit -m 'a'
201 201
202 202
203 203 Empty bundle
204 204 =================
205 205
206 206 - no option
207 207 - no parts
208 208
209 209 Test bundling
210 210
211 211 $ hg bundle2
212 212 HG2X\x00\x00\x00\x00 (no-eol) (esc)
213 213
214 214 Test unbundling
215 215
216 216 $ hg bundle2 | hg statbundle2
217 217 options count: 0
218 218 parts count: 0
219 219
220 220 Test old style bundle are detected and refused
221 221
222 222 $ hg bundle --all ../bundle.hg
223 223 1 changesets found
224 224 $ hg statbundle2 < ../bundle.hg
225 225 abort: unknown bundle version 10
226 226 [255]
227 227
228 228 Test parameters
229 229 =================
230 230
231 231 - some options
232 232 - no parts
233 233
234 234 advisory parameters, no value
235 235 -------------------------------
236 236
237 237 Simplest possible parameters form
238 238
239 239 Test generation simple option
240 240
241 241 $ hg bundle2 --param 'caution'
242 242 HG2X\x00\x07caution\x00\x00 (no-eol) (esc)
243 243
244 244 Test unbundling
245 245
246 246 $ hg bundle2 --param 'caution' | hg statbundle2
247 247 options count: 1
248 248 - caution
249 249 parts count: 0
250 250
251 251 Test generation multiple option
252 252
253 253 $ hg bundle2 --param 'caution' --param 'meal'
254 254 HG2X\x00\x0ccaution meal\x00\x00 (no-eol) (esc)
255 255
256 256 Test unbundling
257 257
258 258 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
259 259 options count: 2
260 260 - caution
261 261 - meal
262 262 parts count: 0
263 263
264 264 advisory parameters, with value
265 265 -------------------------------
266 266
267 267 Test generation
268 268
269 269 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants'
270 270 HG2X\x00\x1ccaution meal=vegan elephants\x00\x00 (no-eol) (esc)
271 271
272 272 Test unbundling
273 273
274 274 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
275 275 options count: 3
276 276 - caution
277 277 - elephants
278 278 - meal
279 279 vegan
280 280 parts count: 0
281 281
282 282 parameter with special char in value
283 283 ---------------------------------------------------
284 284
285 285 Test generation
286 286
287 287 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple
288 288 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
289 289
290 290 Test unbundling
291 291
292 292 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
293 293 options count: 2
294 294 - e|! 7/
295 295 babar%#==tutu
296 296 - simple
297 297 parts count: 0
298 298
299 299 Test unknown mandatory option
300 300 ---------------------------------------------------
301 301
302 302 $ hg bundle2 --param 'Gravity' | hg statbundle2
303 303 abort: unknown parameters: 'Gravity'
304 304 [255]
305 305
306 306 Test debug output
307 307 ---------------------------------------------------
308 308
309 309 bundling debug
310 310
311 311 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2
312 312 start emission of HG2X stream
313 313 bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
314 314 start of parts
315 315 end of bundle
316 316
317 317 file content is ok
318 318
319 319 $ cat ../out.hg2
320 320 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
321 321
322 322 unbundling debug
323 323
324 324 $ hg statbundle2 --debug < ../out.hg2
325 325 start processing of HG2X stream
326 326 reading bundle2 stream parameters
327 327 ignoring unknown parameter 'e|! 7/'
328 328 ignoring unknown parameter 'simple'
329 329 options count: 2
330 330 - e|! 7/
331 331 babar%#==tutu
332 332 - simple
333 333 start extraction of bundle2 parts
334 334 part header size: 0
335 335 end of bundle2 stream
336 336 parts count: 0
337 337
338 338
339 339 Test buggy input
340 340 ---------------------------------------------------
341 341
342 342 empty parameter name
343 343
344 344 $ hg bundle2 --param '' --quiet
345 345 abort: empty parameter name
346 346 [255]
347 347
348 348 bad parameter name
349 349
350 350 $ hg bundle2 --param 42babar
351 351 abort: non letter first character: '42babar'
352 352 [255]
353 353
354 354
355 355 Test part
356 356 =================
357 357
358 358 $ hg bundle2 --parts ../parts.hg2 --debug
359 359 start emission of HG2X stream
360 360 bundle parameter:
361 361 start of parts
362 362 bundle part: "test:empty"
363 363 bundle part: "test:empty"
364 364 bundle part: "test:song"
365 365 bundle part: "test:debugreply"
366 366 bundle part: "test:math"
367 367 bundle part: "test:ping"
368 368 end of bundle
369 369
370 370 $ cat ../parts.hg2
371 371 HG2X\x00\x00\x00\x11 (esc)
372 372 test:empty\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
373 373 test:empty\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x10 test:song\x00\x00\x00\x02\x00\x00\x00\x00\x00\xb2Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko (esc)
374 374 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
375 375 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.\x00\x00\x00\x00\x00\x16\x0ftest:debugreply\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00+ test:math\x00\x00\x00\x04\x02\x01\x02\x04\x01\x04\x07\x03pi3.14e2.72cookingraw\x00\x00\x00\x0242\x00\x00\x00\x00\x00\x10 test:ping\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
376 376
377 377
378 378 $ hg statbundle2 < ../parts.hg2
379 379 options count: 0
380 380 :test:empty:
381 381 mandatory: 0
382 382 advisory: 0
383 383 payload: 0 bytes
384 384 :test:empty:
385 385 mandatory: 0
386 386 advisory: 0
387 387 payload: 0 bytes
388 388 :test:song:
389 389 mandatory: 0
390 390 advisory: 0
391 391 payload: 178 bytes
392 392 :test:debugreply:
393 393 mandatory: 0
394 394 advisory: 0
395 395 payload: 0 bytes
396 396 :test:math:
397 397 mandatory: 2
398 398 advisory: 1
399 399 payload: 2 bytes
400 400 :test:ping:
401 401 mandatory: 0
402 402 advisory: 0
403 403 payload: 0 bytes
404 404 parts count: 6
405 405
406 406 $ hg statbundle2 --debug < ../parts.hg2
407 407 start processing of HG2X stream
408 408 reading bundle2 stream parameters
409 409 options count: 0
410 410 start extraction of bundle2 parts
411 411 part header size: 17
412 412 part type: "test:empty"
413 413 part id: "0"
414 414 part parameters: 0
415 415 :test:empty:
416 416 mandatory: 0
417 417 advisory: 0
418 418 payload chunk size: 0
419 419 payload: 0 bytes
420 420 part header size: 17
421 421 part type: "test:empty"
422 422 part id: "1"
423 423 part parameters: 0
424 424 :test:empty:
425 425 mandatory: 0
426 426 advisory: 0
427 427 payload chunk size: 0
428 428 payload: 0 bytes
429 429 part header size: 16
430 430 part type: "test:song"
431 431 part id: "2"
432 432 part parameters: 0
433 433 :test:song:
434 434 mandatory: 0
435 435 advisory: 0
436 436 payload chunk size: 178
437 437 payload chunk size: 0
438 438 payload: 178 bytes
439 439 part header size: 22
440 440 part type: "test:debugreply"
441 441 part id: "3"
442 442 part parameters: 0
443 443 :test:debugreply:
444 444 mandatory: 0
445 445 advisory: 0
446 446 payload chunk size: 0
447 447 payload: 0 bytes
448 448 part header size: 43
449 449 part type: "test:math"
450 450 part id: "4"
451 451 part parameters: 3
452 452 :test:math:
453 453 mandatory: 2
454 454 advisory: 1
455 455 payload chunk size: 2
456 456 payload chunk size: 0
457 457 payload: 2 bytes
458 458 part header size: 16
459 459 part type: "test:ping"
460 460 part id: "5"
461 461 part parameters: 0
462 462 :test:ping:
463 463 mandatory: 0
464 464 advisory: 0
465 465 payload chunk size: 0
466 466 payload: 0 bytes
467 467 part header size: 0
468 468 end of bundle2 stream
469 469 parts count: 6
470 470
471 471 Test actual unbundling of test part
472 472 =======================================
473 473
474 474 Process the bundle
475 475
476 476 $ hg unbundle2 --debug < ../parts.hg2
477 477 start processing of HG2X stream
478 478 reading bundle2 stream parameters
479 479 start extraction of bundle2 parts
480 480 part header size: 17
481 481 part type: "test:empty"
482 482 part id: "0"
483 483 part parameters: 0
484 484 ignoring unknown advisory part 'test:empty'
485 485 payload chunk size: 0
486 486 part header size: 17
487 487 part type: "test:empty"
488 488 part id: "1"
489 489 part parameters: 0
490 490 ignoring unknown advisory part 'test:empty'
491 491 payload chunk size: 0
492 492 part header size: 16
493 493 part type: "test:song"
494 494 part id: "2"
495 495 part parameters: 0
496 496 found a handler for part 'test:song'
497 497 The choir starts singing:
498 498 payload chunk size: 178
499 499 payload chunk size: 0
500 500 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
501 501 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
502 502 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
503 503 part header size: 22
504 504 part type: "test:debugreply"
505 505 part id: "3"
506 506 part parameters: 0
507 507 found a handler for part 'test:debugreply'
508 508 debugreply: no reply
509 509 payload chunk size: 0
510 510 part header size: 43
511 511 part type: "test:math"
512 512 part id: "4"
513 513 part parameters: 3
514 514 ignoring unknown advisory part 'test:math'
515 515 payload chunk size: 2
516 516 payload chunk size: 0
517 517 part header size: 16
518 518 part type: "test:ping"
519 519 part id: "5"
520 520 part parameters: 0
521 521 found a handler for part 'test:ping'
522 522 received ping request (id 5)
523 523 payload chunk size: 0
524 524 part header size: 0
525 525 end of bundle2 stream
526 526 0 unread bytes
527 527 3 total verses sung
528 528
529 529 Unbundle with an unknown mandatory part
530 530 (should abort)
531 531
532 532 $ hg bundle2 --parts --unknown ../unknown.hg2
533 533
534 534 $ hg unbundle2 < ../unknown.hg2
535 535 The choir starts singing:
536 536 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
537 537 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
538 538 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
539 539 debugreply: no reply
540 540 0 unread bytes
541 541 abort: missing support for 'test:unknown'
542 542 [255]
543 543
544 544 unbundle with a reply
545 545
546 546 $ hg bundle2 --parts --reply ../parts-reply.hg2
547 547 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
548 548 0 unread bytes
549 549 3 total verses sung
550 550
551 551 The reply is a bundle
552 552
553 553 $ cat ../reply.hg2
554 554 HG2X\x00\x00\x00\x1f (esc)
555 555 b2x:output\x00\x00\x00\x00\x00\x01\x0b\x01in-reply-to3\x00\x00\x00\xd9The choir starts singing: (esc)
556 556 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
557 557 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
558 558 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
559 559 \x00\x00\x00\x00\x00\x1f (esc)
560 560 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to4\x00\x00\x00\xc9debugreply: capabilities: (esc)
561 561 debugreply: 'city=!'
562 562 debugreply: 'celeste,ville'
563 563 debugreply: 'elephants'
564 564 debugreply: 'babar'
565 565 debugreply: 'celeste'
566 566 debugreply: 'ping-pong'
567 567 \x00\x00\x00\x00\x00\x1e test:pong\x00\x00\x00\x02\x01\x00\x0b\x01in-reply-to6\x00\x00\x00\x00\x00\x1f (esc)
568 568 b2x:output\x00\x00\x00\x03\x00\x01\x0b\x01in-reply-to6\x00\x00\x00=received ping request (id 6) (esc)
569 569 replying to ping request (id 6)
570 570 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
571 571
572 572 The reply is valid
573 573
574 574 $ hg statbundle2 < ../reply.hg2
575 575 options count: 0
576 576 :b2x:output:
577 577 mandatory: 0
578 578 advisory: 1
579 579 payload: 217 bytes
580 580 :b2x:output:
581 581 mandatory: 0
582 582 advisory: 1
583 583 payload: 201 bytes
584 584 :test:pong:
585 585 mandatory: 1
586 586 advisory: 0
587 587 payload: 0 bytes
588 588 :b2x:output:
589 589 mandatory: 0
590 590 advisory: 1
591 591 payload: 61 bytes
592 592 parts count: 4
593 593
594 594 Unbundle the reply to get the output:
595 595
596 596 $ hg unbundle2 < ../reply.hg2
597 597 remote: The choir starts singing:
598 598 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
599 599 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
600 600 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
601 601 remote: debugreply: capabilities:
602 602 remote: debugreply: 'city=!'
603 603 remote: debugreply: 'celeste,ville'
604 604 remote: debugreply: 'elephants'
605 605 remote: debugreply: 'babar'
606 606 remote: debugreply: 'celeste'
607 607 remote: debugreply: 'ping-pong'
608 608 remote: received ping request (id 6)
609 609 remote: replying to ping request (id 6)
610 610 0 unread bytes
611 611
612 612 Test push race detection
613 613
614 614 $ hg bundle2 --pushrace ../part-race.hg2
615 615
616 616 $ hg unbundle2 < ../part-race.hg2
617 617 0 unread bytes
618 618 abort: push race: repository changed while pushing - please try again
619 619 [255]
620 620
621 621 Support for changegroup
622 622 ===================================
623 623
624 624 $ hg unbundle $TESTDIR/bundles/rebase.hg
625 625 adding changesets
626 626 adding manifests
627 627 adding file changes
628 628 added 8 changesets with 7 changes to 7 files (+3 heads)
629 629 (run 'hg heads' to see heads, 'hg merge' to merge)
630 630
631 631 $ hg log -G
632 632 o changeset: 8:02de42196ebe
633 633 | tag: tip
634 634 | parent: 6:24b6387c8c8c
635 635 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
636 636 | date: Sat Apr 30 15:24:48 2011 +0200
637 637 | summary: H
638 638 |
639 639 | o changeset: 7:eea13746799a
640 640 |/| parent: 6:24b6387c8c8c
641 641 | | parent: 5:9520eea781bc
642 642 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
643 643 | | date: Sat Apr 30 15:24:48 2011 +0200
644 644 | | summary: G
645 645 | |
646 646 o | changeset: 6:24b6387c8c8c
647 647 | | parent: 1:cd010b8cd998
648 648 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
649 649 | | date: Sat Apr 30 15:24:48 2011 +0200
650 650 | | summary: F
651 651 | |
652 652 | o changeset: 5:9520eea781bc
653 653 |/ parent: 1:cd010b8cd998
654 654 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
655 655 | date: Sat Apr 30 15:24:48 2011 +0200
656 656 | summary: E
657 657 |
658 658 | o changeset: 4:32af7686d403
659 659 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
660 660 | | date: Sat Apr 30 15:24:48 2011 +0200
661 661 | | summary: D
662 662 | |
663 663 | o changeset: 3:5fddd98957c8
664 664 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
665 665 | | date: Sat Apr 30 15:24:48 2011 +0200
666 666 | | summary: C
667 667 | |
668 668 | o changeset: 2:42ccdea3bb16
669 669 |/ user: Nicolas Dumazet <nicdumz.commits@gmail.com>
670 670 | date: Sat Apr 30 15:24:48 2011 +0200
671 671 | summary: B
672 672 |
673 673 o changeset: 1:cd010b8cd998
674 674 parent: -1:000000000000
675 675 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
676 676 date: Sat Apr 30 15:24:48 2011 +0200
677 677 summary: A
678 678
679 679 @ changeset: 0:3903775176ed
680 680 user: test
681 681 date: Thu Jan 01 00:00:00 1970 +0000
682 682 summary: a
683 683
684 684
685 685 $ hg bundle2 --debug --rev '8+7+5+4' ../rev.hg2
686 686 4 changesets found
687 687 list of changesets:
688 688 32af7686d403cf45b5d95f2d70cebea587ac806a
689 689 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
690 690 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
691 691 02de42196ebee42ef284b6780a87cdc96e8eaab6
692 692 start emission of HG2X stream
693 693 bundle parameter:
694 694 start of parts
695 695 bundle part: "b2x:changegroup"
696 696 bundling: 1/4 changesets (25.00%)
697 697 bundling: 2/4 changesets (50.00%)
698 698 bundling: 3/4 changesets (75.00%)
699 699 bundling: 4/4 changesets (100.00%)
700 700 bundling: 1/4 manifests (25.00%)
701 701 bundling: 2/4 manifests (50.00%)
702 702 bundling: 3/4 manifests (75.00%)
703 703 bundling: 4/4 manifests (100.00%)
704 704 bundling: D 1/3 files (33.33%)
705 705 bundling: E 2/3 files (66.67%)
706 706 bundling: H 3/3 files (100.00%)
707 707 end of bundle
708 708
709 709 $ cat ../rev.hg2
710 710 HG2X\x00\x00\x00\x16\x0fb2x:changegroup\x00\x00\x00\x00\x00\x00\x00\x00\x06\x13\x00\x00\x00\xa42\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j_\xdd\xd9\x89W\xc8\xa5JMCm\xfe\x1d\xa9\xd8\x7f!\xa1\xb9{\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c (esc)
711 711 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02D (esc)
712 712 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01D\x00\x00\x00\xa4\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xcd\x01\x0b\x8c\xd9\x98\xf3\x98\x1aZ\x81\x15\xf9O\x8d\xa4\xabP`\x89\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)4dece9c826f69490507b98c6383a3009b295837d (esc)
713 713 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02E (esc)
714 714 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01E\x00\x00\x00\xa2\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)365b93d57fdf4814e2b5911d6bacff2b12014441 (esc)
715 715 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x00\x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01G\x00\x00\x00\xa4\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
716 716 \x87\xcd\xc9n\x8e\xaa\xb6$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
717 717 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)8bee48edc7318541fc0013ee41b089276a8c24bf (esc)
718 718 \x00\x00\x00f\x00\x00\x00f\x00\x00\x00\x02H (esc)
719 719 \x00\x00\x00g\x00\x00\x00h\x00\x00\x00\x01H\x00\x00\x00\x00\x00\x00\x00\x8bn\x1fLG\xec\xb53\xff\xd0\xc8\xe5,\xdc\x88\xaf\xb6\xcd9\xe2\x0cf\xa5\xa0\x18\x17\xfd\xf5#\x9c'8\x02\xb5\xb7a\x8d\x05\x1c\x89\xe4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+D\x00c3f1ca2924c16a19b0656a84900e504e5b0aec2d (esc)
720 720 \x00\x00\x00\x8bM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\x00}\x8c\x9d\x88\x84\x13%\xf5\xc6\xb0cq\xb3[N\x8a+\x1a\x83\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00+\x00\x00\x00\xac\x00\x00\x00+E\x009c6fd0350a6c0d0c49d4a9c5017cf07043f54e58 (esc)
721 721 \x00\x00\x00\x8b6[\x93\xd5\x7f\xdfH\x14\xe2\xb5\x91\x1dk\xac\xff+\x12\x01DA(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xceM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00V\x00\x00\x00V\x00\x00\x00+F\x0022bfcfd62a21a3287edbd4d656218d0f525ed76a (esc)
722 722 \x00\x00\x00\x97\x8b\xeeH\xed\xc71\x85A\xfc\x00\x13\xeeA\xb0\x89'j\x8c$\xbf(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
723 723 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00+\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+H\x008500189e74a9e0475e822093bc7db0d631aeb0b4 (esc)
724 724 \x00\x00\x00\x00\x00\x00\x00\x05D\x00\x00\x00b\xc3\xf1\xca)$\xc1j\x19\xb0ej\x84\x90\x0ePN[ (esc)
725 725 \xec-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02D (esc)
726 726 \x00\x00\x00\x00\x00\x00\x00\x05E\x00\x00\x00b\x9co\xd05 (esc)
727 727 l\r (no-eol) (esc)
728 728 \x0cI\xd4\xa9\xc5\x01|\xf0pC\xf5NX\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02E (esc)
729 729 \x00\x00\x00\x00\x00\x00\x00\x05H\x00\x00\x00b\x85\x00\x18\x9et\xa9\xe0G^\x82 \x93\xbc}\xb0\xd61\xae\xb0\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
730 730 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02H (esc)
731 731 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
732 732
733 733 $ hg unbundle2 < ../rev.hg2
734 734 adding changesets
735 735 adding manifests
736 736 adding file changes
737 737 added 0 changesets with 0 changes to 3 files
738 738 0 unread bytes
739 739 addchangegroup return: 1
740 740
741 741 with reply
742 742
743 743 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
744 744 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
745 745 0 unread bytes
746 746 addchangegroup return: 1
747 747
748 748 $ cat ../rev-reply.hg2
749 749 HG2X\x00\x00\x003\x15b2x:reply:changegroup\x00\x00\x00\x00\x00\x02\x0b\x01\x06\x01in-reply-to1return1\x00\x00\x00\x00\x00\x1f (esc)
750 750 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to1\x00\x00\x00dadding changesets (esc)
751 751 adding manifests
752 752 adding file changes
753 753 added 0 changesets with 0 changes to 3 files
754 754 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
755 755
756 756 Real world exchange
757 757 =====================
758 758
759 759
760 760 clone --pull
761 761
762 762 $ cd ..
763 763 $ hg clone main other --pull --rev 9520eea781bc
764 764 adding changesets
765 765 adding manifests
766 766 adding file changes
767 767 added 2 changesets with 2 changes to 2 files
768 768 updating to branch default
769 769 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
770 770 $ hg -R other log -G
771 771 @ changeset: 1:9520eea781bc
772 772 | tag: tip
773 773 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
774 774 | date: Sat Apr 30 15:24:48 2011 +0200
775 775 | summary: E
776 776 |
777 777 o changeset: 0:cd010b8cd998
778 778 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
779 779 date: Sat Apr 30 15:24:48 2011 +0200
780 780 summary: A
781 781
782 782
783 783 pull
784 784
785 785 $ hg -R other pull -r 24b6387c8c8c
786 786 pulling from $TESTTMP/main (glob)
787 787 searching for changes
788 788 adding changesets
789 789 adding manifests
790 790 adding file changes
791 791 added 1 changesets with 1 changes to 1 files (+1 heads)
792 792 (run 'hg heads' to see heads, 'hg merge' to merge)
793 793
794 pull empty
795
796 $ hg -R other pull -r 24b6387c8c8c
797 pulling from $TESTTMP/main (glob)
798 no changes found
799
794 800 push
795 801
796 802 $ hg -R main push other --rev eea13746799a
797 803 pushing to other
798 804 searching for changes
799 805 remote: adding changesets
800 806 remote: adding manifests
801 807 remote: adding file changes
802 808 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
803 809
804 810 pull over ssh
805 811
806 812 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --traceback
807 813 pulling from ssh://user@dummy/main
808 814 searching for changes
809 815 adding changesets
810 816 adding manifests
811 817 adding file changes
812 818 added 1 changesets with 1 changes to 1 files (+1 heads)
813 819 (run 'hg heads' to see heads, 'hg merge' to merge)
814 820
815 821 pull over http
816 822
817 823 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
818 824 $ cat main.pid >> $DAEMON_PIDS
819 825
820 826 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16
821 827 pulling from http://localhost:$HGPORT/
822 828 searching for changes
823 829 adding changesets
824 830 adding manifests
825 831 adding file changes
826 832 added 1 changesets with 1 changes to 1 files (+1 heads)
827 833 (run 'hg heads .' to see heads, 'hg merge' to merge)
828 834 $ cat main-error.log
829 835
830 836 push over ssh
831 837
832 838 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8
833 839 pushing to ssh://user@dummy/other
834 840 searching for changes
835 841 remote: adding changesets
836 842 remote: adding manifests
837 843 remote: adding file changes
838 844 remote: added 1 changesets with 1 changes to 1 files
839 845
840 846 push over http
841 847
842 848 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
843 849 $ cat other.pid >> $DAEMON_PIDS
844 850
845 851 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403
846 852 pushing to http://localhost:$HGPORT2/
847 853 searching for changes
848 854 remote: adding changesets
849 855 remote: adding manifests
850 856 remote: adding file changes
851 857 remote: added 1 changesets with 1 changes to 1 files
852 858 $ cat other-error.log
853 859
854 860 Check final content.
855 861
856 862 $ hg -R other log -G
857 863 o changeset: 7:32af7686d403
858 864 | tag: tip
859 865 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
860 866 | date: Sat Apr 30 15:24:48 2011 +0200
861 867 | summary: D
862 868 |
863 869 o changeset: 6:5fddd98957c8
864 870 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
865 871 | date: Sat Apr 30 15:24:48 2011 +0200
866 872 | summary: C
867 873 |
868 874 o changeset: 5:42ccdea3bb16
869 875 | parent: 0:cd010b8cd998
870 876 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
871 877 | date: Sat Apr 30 15:24:48 2011 +0200
872 878 | summary: B
873 879 |
874 880 | o changeset: 4:02de42196ebe
875 881 | | parent: 2:24b6387c8c8c
876 882 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
877 883 | | date: Sat Apr 30 15:24:48 2011 +0200
878 884 | | summary: H
879 885 | |
880 886 | | o changeset: 3:eea13746799a
881 887 | |/| parent: 2:24b6387c8c8c
882 888 | | | parent: 1:9520eea781bc
883 889 | | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
884 890 | | | date: Sat Apr 30 15:24:48 2011 +0200
885 891 | | | summary: G
886 892 | | |
887 893 | o | changeset: 2:24b6387c8c8c
888 894 |/ / parent: 0:cd010b8cd998
889 895 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
890 896 | | date: Sat Apr 30 15:24:48 2011 +0200
891 897 | | summary: F
892 898 | |
893 899 | @ changeset: 1:9520eea781bc
894 900 |/ user: Nicolas Dumazet <nicdumz.commits@gmail.com>
895 901 | date: Sat Apr 30 15:24:48 2011 +0200
896 902 | summary: E
897 903 |
898 904 o changeset: 0:cd010b8cd998
899 905 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
900 906 date: Sat Apr 30 15:24:48 2011 +0200
901 907 summary: A
902 908
903 909
904 910 Error Handling
905 911 ==============
906 912
907 913 Check that errors are properly returned to the client during push.
908 914
909 915 Setting up
910 916
911 917 $ cat > failpush.py << EOF
912 918 > """A small extension that makes push fails when using bundle2
913 919 >
914 920 > used to test error handling in bundle2
915 921 > """
916 922 >
917 923 > from mercurial import util
918 924 > from mercurial import bundle2
919 925 > from mercurial import exchange
920 926 > from mercurial import extensions
921 927 >
922 928 > def _pushbundle2failpart(orig, pushop, bundler):
923 929 > extradata = orig(pushop, bundler)
924 930 > reason = pushop.ui.config('failpush', 'reason', None)
925 931 > part = None
926 932 > if reason == 'abort':
927 933 > part = bundle2.bundlepart('test:abort')
928 934 > if reason == 'unknown':
929 935 > part = bundle2.bundlepart('TEST:UNKNOWN')
930 936 > if reason == 'race':
931 937 > # 20 Bytes of crap
932 938 > part = bundle2.bundlepart('b2x:check:heads', data='01234567890123456789')
933 939 > if part is not None:
934 940 > bundler.addpart(part)
935 941 > return extradata
936 942 >
937 943 > @bundle2.parthandler("test:abort")
938 944 > def handleabort(op, part):
939 945 > raise util.Abort('Abandon ship!', hint="don't panic")
940 946 >
941 947 > def uisetup(ui):
942 948 > extensions.wrapfunction(exchange, '_pushbundle2extraparts', _pushbundle2failpart)
943 949 >
944 950 > EOF
945 951
946 952 $ cd main
947 953 $ hg up tip
948 954 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
949 955 $ echo 'I' > I
950 956 $ hg add I
951 957 $ hg ci -m 'I'
952 958 $ hg id
953 959 e7ec4e813ba6 tip
954 960 $ cd ..
955 961
956 962 $ cat << EOF >> $HGRCPATH
957 963 > [extensions]
958 964 > failpush=$TESTTMP/failpush.py
959 965 > EOF
960 966
961 967 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
962 968 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
963 969 $ cat other.pid >> $DAEMON_PIDS
964 970
965 971 Doing the actual push: Abort error
966 972
967 973 $ cat << EOF >> $HGRCPATH
968 974 > [failpush]
969 975 > reason = abort
970 976 > EOF
971 977
972 978 $ hg -R main push other -r e7ec4e813ba6
973 979 pushing to other
974 980 searching for changes
975 981 abort: Abandon ship!
976 982 (don't panic)
977 983 [255]
978 984
979 985 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
980 986 pushing to ssh://user@dummy/other
981 987 searching for changes
982 988 abort: Abandon ship!
983 989 (don't panic)
984 990 [255]
985 991
986 992 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
987 993 pushing to http://localhost:$HGPORT2/
988 994 searching for changes
989 995 abort: Abandon ship!
990 996 (don't panic)
991 997 [255]
992 998
993 999
994 1000 Doing the actual push: unknown mandatory parts
995 1001
996 1002 $ cat << EOF >> $HGRCPATH
997 1003 > [failpush]
998 1004 > reason = unknown
999 1005 > EOF
1000 1006
1001 1007 $ hg -R main push other -r e7ec4e813ba6
1002 1008 pushing to other
1003 1009 searching for changes
1004 1010 abort: missing support for 'test:unknown'
1005 1011 [255]
1006 1012
1007 1013 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1008 1014 pushing to ssh://user@dummy/other
1009 1015 searching for changes
1010 1016 abort: missing support for "'test:unknown'"
1011 1017 [255]
1012 1018
1013 1019 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1014 1020 pushing to http://localhost:$HGPORT2/
1015 1021 searching for changes
1016 1022 abort: missing support for "'test:unknown'"
1017 1023 [255]
1018 1024
1019 1025 Doing the actual push: race
1020 1026
1021 1027 $ cat << EOF >> $HGRCPATH
1022 1028 > [failpush]
1023 1029 > reason = race
1024 1030 > EOF
1025 1031
1026 1032 $ hg -R main push other -r e7ec4e813ba6
1027 1033 pushing to other
1028 1034 searching for changes
1029 1035 abort: push failed:
1030 1036 'repository changed while pushing - please try again'
1031 1037 [255]
1032 1038
1033 1039 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1034 1040 pushing to ssh://user@dummy/other
1035 1041 searching for changes
1036 1042 abort: push failed:
1037 1043 'repository changed while pushing - please try again'
1038 1044 [255]
1039 1045
1040 1046 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1041 1047 pushing to http://localhost:$HGPORT2/
1042 1048 searching for changes
1043 1049 abort: push failed:
1044 1050 'repository changed while pushing - please try again'
1045 1051 [255]
1046 1052
1047 1053 Doing the actual push: hook abort
1048 1054
1049 1055 $ cat << EOF >> $HGRCPATH
1050 1056 > [failpush]
1051 1057 > reason =
1052 1058 > [hooks]
1053 1059 > b2x-pretransactionclose.failpush = false
1054 1060 > EOF
1055 1061
1056 1062 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
1057 1063 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
1058 1064 $ cat other.pid >> $DAEMON_PIDS
1059 1065
1060 1066 $ hg -R main push other -r e7ec4e813ba6
1061 1067 pushing to other
1062 1068 searching for changes
1063 1069 transaction abort!
1064 1070 rollback completed
1065 1071 abort: b2x-pretransactionclose.failpush hook exited with status 1
1066 1072 [255]
1067 1073
1068 1074 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1069 1075 pushing to ssh://user@dummy/other
1070 1076 searching for changes
1071 1077 abort: b2x-pretransactionclose.failpush hook exited with status 1
1072 1078 remote: transaction abort!
1073 1079 remote: rollback completed
1074 1080 [255]
1075 1081
1076 1082 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1077 1083 pushing to http://localhost:$HGPORT2/
1078 1084 searching for changes
1079 1085 abort: b2x-pretransactionclose.failpush hook exited with status 1
1080 1086 [255]
1081 1087
1082 1088
General Comments 0
You need to be logged in to leave comments. Login now