##// END OF EJS Templates
bundle2: pull obsmarkers relevant to the pulled set through bundle2...
Pierre-Yves David -
r22354:a89add6c default
parent child Browse files
Show More
@@ -1,1072 +1,1077
1 1 # exchange.py - utility to exchange data between repos.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 from node import hex, nullid
10 10 import errno, urllib
11 11 import util, scmutil, changegroup, base85, error
12 12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
13 13
14 14 def readbundle(ui, fh, fname, vfs=None):
15 15 header = changegroup.readexactly(fh, 4)
16 16
17 17 alg = None
18 18 if not fname:
19 19 fname = "stream"
20 20 if not header.startswith('HG') and header.startswith('\0'):
21 21 fh = changegroup.headerlessfixup(fh, header)
22 22 header = "HG10"
23 23 alg = 'UN'
24 24 elif vfs:
25 25 fname = vfs.join(fname)
26 26
27 27 magic, version = header[0:2], header[2:4]
28 28
29 29 if magic != 'HG':
30 30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 31 if version == '10':
32 32 if alg is None:
33 33 alg = changegroup.readexactly(fh, 2)
34 34 return changegroup.unbundle10(fh, alg)
35 35 elif version == '2X':
36 36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 37 else:
38 38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39 39
40 40 def buildobsmarkerspart(bundler, markers):
41 41 """add an obsmarker part to the bundler with <markers>
42 42
43 43 No part is created if markers is empty.
44 44 Raises ValueError if the bundler doesn't support any known obsmarker format.
45 45 """
46 46 if markers:
47 47 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
48 48 version = obsolete.commonversion(remoteversions)
49 49 if version is None:
50 50 raise ValueError('bundler do not support common obsmarker format')
51 51 stream = obsolete.encodemarkers(markers, True, version=version)
52 52 return bundler.newpart('B2X:OBSMARKERS', data=stream)
53 53 return None
54 54
55 55 class pushoperation(object):
56 56 """A object that represent a single push operation
57 57
58 58 It purpose is to carry push related state and very common operation.
59 59
60 60 A new should be created at the beginning of each push and discarded
61 61 afterward.
62 62 """
63 63
64 64 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
65 65 # repo we push from
66 66 self.repo = repo
67 67 self.ui = repo.ui
68 68 # repo we push to
69 69 self.remote = remote
70 70 # force option provided
71 71 self.force = force
72 72 # revs to be pushed (None is "all")
73 73 self.revs = revs
74 74 # allow push of new branch
75 75 self.newbranch = newbranch
76 76 # did a local lock get acquired?
77 77 self.locallocked = None
78 78 # step already performed
79 79 # (used to check what steps have been already performed through bundle2)
80 80 self.stepsdone = set()
81 81 # Integer version of the push result
82 82 # - None means nothing to push
83 83 # - 0 means HTTP error
84 84 # - 1 means we pushed and remote head count is unchanged *or*
85 85 # we have outgoing changesets but refused to push
86 86 # - other values as described by addchangegroup()
87 87 self.ret = None
88 88 # discover.outgoing object (contains common and outgoing data)
89 89 self.outgoing = None
90 90 # all remote heads before the push
91 91 self.remoteheads = None
92 92 # testable as a boolean indicating if any nodes are missing locally.
93 93 self.incoming = None
94 94 # phases changes that must be pushed along side the changesets
95 95 self.outdatedphases = None
96 96 # phases changes that must be pushed if changeset push fails
97 97 self.fallbackoutdatedphases = None
98 98 # outgoing obsmarkers
99 99 self.outobsmarkers = set()
100 100 # outgoing bookmarks
101 101 self.outbookmarks = []
102 102
103 103 @util.propertycache
104 104 def futureheads(self):
105 105 """future remote heads if the changeset push succeeds"""
106 106 return self.outgoing.missingheads
107 107
108 108 @util.propertycache
109 109 def fallbackheads(self):
110 110 """future remote heads if the changeset push fails"""
111 111 if self.revs is None:
112 112 # not target to push, all common are relevant
113 113 return self.outgoing.commonheads
114 114 unfi = self.repo.unfiltered()
115 115 # I want cheads = heads(::missingheads and ::commonheads)
116 116 # (missingheads is revs with secret changeset filtered out)
117 117 #
118 118 # This can be expressed as:
119 119 # cheads = ( (missingheads and ::commonheads)
120 120 # + (commonheads and ::missingheads))"
121 121 # )
122 122 #
123 123 # while trying to push we already computed the following:
124 124 # common = (::commonheads)
125 125 # missing = ((commonheads::missingheads) - commonheads)
126 126 #
127 127 # We can pick:
128 128 # * missingheads part of common (::commonheads)
129 129 common = set(self.outgoing.common)
130 130 nm = self.repo.changelog.nodemap
131 131 cheads = [node for node in self.revs if nm[node] in common]
132 132 # and
133 133 # * commonheads parents on missing
134 134 revset = unfi.set('%ln and parents(roots(%ln))',
135 135 self.outgoing.commonheads,
136 136 self.outgoing.missing)
137 137 cheads.extend(c.node() for c in revset)
138 138 return cheads
139 139
140 140 @property
141 141 def commonheads(self):
142 142 """set of all common heads after changeset bundle push"""
143 143 if self.ret:
144 144 return self.futureheads
145 145 else:
146 146 return self.fallbackheads
147 147
148 148 def push(repo, remote, force=False, revs=None, newbranch=False):
149 149 '''Push outgoing changesets (limited by revs) from a local
150 150 repository to remote. Return an integer:
151 151 - None means nothing to push
152 152 - 0 means HTTP error
153 153 - 1 means we pushed and remote head count is unchanged *or*
154 154 we have outgoing changesets but refused to push
155 155 - other values as described by addchangegroup()
156 156 '''
157 157 pushop = pushoperation(repo, remote, force, revs, newbranch)
158 158 if pushop.remote.local():
159 159 missing = (set(pushop.repo.requirements)
160 160 - pushop.remote.local().supported)
161 161 if missing:
162 162 msg = _("required features are not"
163 163 " supported in the destination:"
164 164 " %s") % (', '.join(sorted(missing)))
165 165 raise util.Abort(msg)
166 166
167 167 # there are two ways to push to remote repo:
168 168 #
169 169 # addchangegroup assumes local user can lock remote
170 170 # repo (local filesystem, old ssh servers).
171 171 #
172 172 # unbundle assumes local user cannot lock remote repo (new ssh
173 173 # servers, http servers).
174 174
175 175 if not pushop.remote.canpush():
176 176 raise util.Abort(_("destination does not support push"))
177 177 # get local lock as we might write phase data
178 178 locallock = None
179 179 try:
180 180 locallock = pushop.repo.lock()
181 181 pushop.locallocked = True
182 182 except IOError, err:
183 183 pushop.locallocked = False
184 184 if err.errno != errno.EACCES:
185 185 raise
186 186 # source repo cannot be locked.
187 187 # We do not abort the push, but just disable the local phase
188 188 # synchronisation.
189 189 msg = 'cannot lock source repository: %s\n' % err
190 190 pushop.ui.debug(msg)
191 191 try:
192 192 pushop.repo.checkpush(pushop)
193 193 lock = None
194 194 unbundle = pushop.remote.capable('unbundle')
195 195 if not unbundle:
196 196 lock = pushop.remote.lock()
197 197 try:
198 198 _pushdiscovery(pushop)
199 199 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
200 200 False)
201 201 and pushop.remote.capable('bundle2-exp')):
202 202 _pushbundle2(pushop)
203 203 _pushchangeset(pushop)
204 204 _pushsyncphase(pushop)
205 205 _pushobsolete(pushop)
206 206 _pushbookmark(pushop)
207 207 finally:
208 208 if lock is not None:
209 209 lock.release()
210 210 finally:
211 211 if locallock is not None:
212 212 locallock.release()
213 213
214 214 return pushop.ret
215 215
216 216 # list of steps to perform discovery before push
217 217 pushdiscoveryorder = []
218 218
219 219 # Mapping between step name and function
220 220 #
221 221 # This exists to help extensions wrap steps if necessary
222 222 pushdiscoverymapping = {}
223 223
224 224 def pushdiscovery(stepname):
225 225 """decorator for function performing discovery before push
226 226
227 227 The function is added to the step -> function mapping and appended to the
228 228 list of steps. Beware that decorated function will be added in order (this
229 229 may matter).
230 230
231 231 You can only use this decorator for a new step, if you want to wrap a step
232 232 from an extension, change the pushdiscovery dictionary directly."""
233 233 def dec(func):
234 234 assert stepname not in pushdiscoverymapping
235 235 pushdiscoverymapping[stepname] = func
236 236 pushdiscoveryorder.append(stepname)
237 237 return func
238 238 return dec
239 239
240 240 def _pushdiscovery(pushop):
241 241 """Run all discovery steps"""
242 242 for stepname in pushdiscoveryorder:
243 243 step = pushdiscoverymapping[stepname]
244 244 step(pushop)
245 245
246 246 @pushdiscovery('changeset')
247 247 def _pushdiscoverychangeset(pushop):
248 248 """discover the changeset that need to be pushed"""
249 249 unfi = pushop.repo.unfiltered()
250 250 fci = discovery.findcommonincoming
251 251 commoninc = fci(unfi, pushop.remote, force=pushop.force)
252 252 common, inc, remoteheads = commoninc
253 253 fco = discovery.findcommonoutgoing
254 254 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
255 255 commoninc=commoninc, force=pushop.force)
256 256 pushop.outgoing = outgoing
257 257 pushop.remoteheads = remoteheads
258 258 pushop.incoming = inc
259 259
260 260 @pushdiscovery('phase')
261 261 def _pushdiscoveryphase(pushop):
262 262 """discover the phase that needs to be pushed
263 263
264 264 (computed for both success and failure case for changesets push)"""
265 265 outgoing = pushop.outgoing
266 266 unfi = pushop.repo.unfiltered()
267 267 remotephases = pushop.remote.listkeys('phases')
268 268 publishing = remotephases.get('publishing', False)
269 269 ana = phases.analyzeremotephases(pushop.repo,
270 270 pushop.fallbackheads,
271 271 remotephases)
272 272 pheads, droots = ana
273 273 extracond = ''
274 274 if not publishing:
275 275 extracond = ' and public()'
276 276 revset = 'heads((%%ln::%%ln) %s)' % extracond
277 277 # Get the list of all revs draft on remote by public here.
278 278 # XXX Beware that revset break if droots is not strictly
279 279 # XXX root we may want to ensure it is but it is costly
280 280 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
281 281 if not outgoing.missing:
282 282 future = fallback
283 283 else:
284 284 # adds changeset we are going to push as draft
285 285 #
286 286 # should not be necessary for pushblishing server, but because of an
287 287 # issue fixed in xxxxx we have to do it anyway.
288 288 fdroots = list(unfi.set('roots(%ln + %ln::)',
289 289 outgoing.missing, droots))
290 290 fdroots = [f.node() for f in fdroots]
291 291 future = list(unfi.set(revset, fdroots, pushop.futureheads))
292 292 pushop.outdatedphases = future
293 293 pushop.fallbackoutdatedphases = fallback
294 294
295 295 @pushdiscovery('obsmarker')
296 296 def _pushdiscoveryobsmarkers(pushop):
297 297 if (obsolete._enabled
298 298 and pushop.repo.obsstore
299 299 and 'obsolete' in pushop.remote.listkeys('namespaces')):
300 300 repo = pushop.repo
301 301 # very naive computation, that can be quite expensive on big repo.
302 302 # However: evolution is currently slow on them anyway.
303 303 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
304 304 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
305 305
306 306 @pushdiscovery('bookmarks')
307 307 def _pushdiscoverybookmarks(pushop):
308 308 ui = pushop.ui
309 309 repo = pushop.repo.unfiltered()
310 310 remote = pushop.remote
311 311 ui.debug("checking for updated bookmarks\n")
312 312 ancestors = ()
313 313 if pushop.revs:
314 314 revnums = map(repo.changelog.rev, pushop.revs)
315 315 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
316 316 remotebookmark = remote.listkeys('bookmarks')
317 317
318 318 comp = bookmarks.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
319 319 addsrc, adddst, advsrc, advdst, diverge, differ, invalid = comp
320 320 for b, scid, dcid in advsrc:
321 321 if not ancestors or repo[scid].rev() in ancestors:
322 322 pushop.outbookmarks.append((b, dcid, scid))
323 323
324 324 def _pushcheckoutgoing(pushop):
325 325 outgoing = pushop.outgoing
326 326 unfi = pushop.repo.unfiltered()
327 327 if not outgoing.missing:
328 328 # nothing to push
329 329 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
330 330 return False
331 331 # something to push
332 332 if not pushop.force:
333 333 # if repo.obsstore == False --> no obsolete
334 334 # then, save the iteration
335 335 if unfi.obsstore:
336 336 # this message are here for 80 char limit reason
337 337 mso = _("push includes obsolete changeset: %s!")
338 338 mst = "push includes %s changeset: %s!"
339 339 # plain versions for i18n tool to detect them
340 340 _("push includes unstable changeset: %s!")
341 341 _("push includes bumped changeset: %s!")
342 342 _("push includes divergent changeset: %s!")
343 343 # If we are to push if there is at least one
344 344 # obsolete or unstable changeset in missing, at
345 345 # least one of the missinghead will be obsolete or
346 346 # unstable. So checking heads only is ok
347 347 for node in outgoing.missingheads:
348 348 ctx = unfi[node]
349 349 if ctx.obsolete():
350 350 raise util.Abort(mso % ctx)
351 351 elif ctx.troubled():
352 352 raise util.Abort(_(mst)
353 353 % (ctx.troubles()[0],
354 354 ctx))
355 355 newbm = pushop.ui.configlist('bookmarks', 'pushing')
356 356 discovery.checkheads(unfi, pushop.remote, outgoing,
357 357 pushop.remoteheads,
358 358 pushop.newbranch,
359 359 bool(pushop.incoming),
360 360 newbm)
361 361 return True
362 362
363 363 # List of names of steps to perform for an outgoing bundle2, order matters.
364 364 b2partsgenorder = []
365 365
366 366 # Mapping between step name and function
367 367 #
368 368 # This exists to help extensions wrap steps if necessary
369 369 b2partsgenmapping = {}
370 370
371 371 def b2partsgenerator(stepname):
372 372 """decorator for function generating bundle2 part
373 373
374 374 The function is added to the step -> function mapping and appended to the
375 375 list of steps. Beware that decorated functions will be added in order
376 376 (this may matter).
377 377
378 378 You can only use this decorator for new steps, if you want to wrap a step
379 379 from an extension, attack the b2partsgenmapping dictionary directly."""
380 380 def dec(func):
381 381 assert stepname not in b2partsgenmapping
382 382 b2partsgenmapping[stepname] = func
383 383 b2partsgenorder.append(stepname)
384 384 return func
385 385 return dec
386 386
387 387 @b2partsgenerator('changeset')
388 388 def _pushb2ctx(pushop, bundler):
389 389 """handle changegroup push through bundle2
390 390
391 391 addchangegroup result is stored in the ``pushop.ret`` attribute.
392 392 """
393 393 if 'changesets' in pushop.stepsdone:
394 394 return
395 395 pushop.stepsdone.add('changesets')
396 396 # Send known heads to the server for race detection.
397 397 if not _pushcheckoutgoing(pushop):
398 398 return
399 399 pushop.repo.prepushoutgoinghooks(pushop.repo,
400 400 pushop.remote,
401 401 pushop.outgoing)
402 402 if not pushop.force:
403 403 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
404 404 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
405 405 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
406 406 def handlereply(op):
407 407 """extract addchangroup returns from server reply"""
408 408 cgreplies = op.records.getreplies(cgpart.id)
409 409 assert len(cgreplies['changegroup']) == 1
410 410 pushop.ret = cgreplies['changegroup'][0]['return']
411 411 return handlereply
412 412
413 413 @b2partsgenerator('phase')
414 414 def _pushb2phases(pushop, bundler):
415 415 """handle phase push through bundle2"""
416 416 if 'phases' in pushop.stepsdone:
417 417 return
418 418 b2caps = bundle2.bundle2caps(pushop.remote)
419 419 if not 'b2x:pushkey' in b2caps:
420 420 return
421 421 pushop.stepsdone.add('phases')
422 422 part2node = []
423 423 enc = pushkey.encode
424 424 for newremotehead in pushop.outdatedphases:
425 425 part = bundler.newpart('b2x:pushkey')
426 426 part.addparam('namespace', enc('phases'))
427 427 part.addparam('key', enc(newremotehead.hex()))
428 428 part.addparam('old', enc(str(phases.draft)))
429 429 part.addparam('new', enc(str(phases.public)))
430 430 part2node.append((part.id, newremotehead))
431 431 def handlereply(op):
432 432 for partid, node in part2node:
433 433 partrep = op.records.getreplies(partid)
434 434 results = partrep['pushkey']
435 435 assert len(results) <= 1
436 436 msg = None
437 437 if not results:
438 438 msg = _('server ignored update of %s to public!\n') % node
439 439 elif not int(results[0]['return']):
440 440 msg = _('updating %s to public failed!\n') % node
441 441 if msg is not None:
442 442 pushop.ui.warn(msg)
443 443 return handlereply
444 444
445 445 @b2partsgenerator('obsmarkers')
446 446 def _pushb2obsmarkers(pushop, bundler):
447 447 if 'obsmarkers' in pushop.stepsdone:
448 448 return
449 449 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
450 450 if obsolete.commonversion(remoteversions) is None:
451 451 return
452 452 pushop.stepsdone.add('obsmarkers')
453 453 if pushop.outobsmarkers:
454 454 buildobsmarkerspart(bundler, pushop.outobsmarkers)
455 455
456 456 @b2partsgenerator('bookmarks')
457 457 def _pushb2bookmarks(pushop, bundler):
458 458 """handle phase push through bundle2"""
459 459 if 'bookmarks' in pushop.stepsdone:
460 460 return
461 461 b2caps = bundle2.bundle2caps(pushop.remote)
462 462 if 'b2x:pushkey' not in b2caps:
463 463 return
464 464 pushop.stepsdone.add('bookmarks')
465 465 part2book = []
466 466 enc = pushkey.encode
467 467 for book, old, new in pushop.outbookmarks:
468 468 part = bundler.newpart('b2x:pushkey')
469 469 part.addparam('namespace', enc('bookmarks'))
470 470 part.addparam('key', enc(book))
471 471 part.addparam('old', enc(old))
472 472 part.addparam('new', enc(new))
473 473 part2book.append((part.id, book))
474 474 def handlereply(op):
475 475 for partid, book in part2book:
476 476 partrep = op.records.getreplies(partid)
477 477 results = partrep['pushkey']
478 478 assert len(results) <= 1
479 479 if not results:
480 480 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
481 481 else:
482 482 ret = int(results[0]['return'])
483 483 if ret:
484 484 pushop.ui.status(_("updating bookmark %s\n") % book)
485 485 else:
486 486 pushop.ui.warn(_('updating bookmark %s failed!\n') % book)
487 487 return handlereply
488 488
489 489
490 490 def _pushbundle2(pushop):
491 491 """push data to the remote using bundle2
492 492
493 493 The only currently supported type of data is changegroup but this will
494 494 evolve in the future."""
495 495 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
496 496 # create reply capability
497 497 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
498 498 bundler.newpart('b2x:replycaps', data=capsblob)
499 499 replyhandlers = []
500 500 for partgenname in b2partsgenorder:
501 501 partgen = b2partsgenmapping[partgenname]
502 502 ret = partgen(pushop, bundler)
503 503 if callable(ret):
504 504 replyhandlers.append(ret)
505 505 # do not push if nothing to push
506 506 if bundler.nbparts <= 1:
507 507 return
508 508 stream = util.chunkbuffer(bundler.getchunks())
509 509 try:
510 510 reply = pushop.remote.unbundle(stream, ['force'], 'push')
511 511 except error.BundleValueError, exc:
512 512 raise util.Abort('missing support for %s' % exc)
513 513 try:
514 514 op = bundle2.processbundle(pushop.repo, reply)
515 515 except error.BundleValueError, exc:
516 516 raise util.Abort('missing support for %s' % exc)
517 517 for rephand in replyhandlers:
518 518 rephand(op)
519 519
520 520 def _pushchangeset(pushop):
521 521 """Make the actual push of changeset bundle to remote repo"""
522 522 if 'changesets' in pushop.stepsdone:
523 523 return
524 524 pushop.stepsdone.add('changesets')
525 525 if not _pushcheckoutgoing(pushop):
526 526 return
527 527 pushop.repo.prepushoutgoinghooks(pushop.repo,
528 528 pushop.remote,
529 529 pushop.outgoing)
530 530 outgoing = pushop.outgoing
531 531 unbundle = pushop.remote.capable('unbundle')
532 532 # TODO: get bundlecaps from remote
533 533 bundlecaps = None
534 534 # create a changegroup from local
535 535 if pushop.revs is None and not (outgoing.excluded
536 536 or pushop.repo.changelog.filteredrevs):
537 537 # push everything,
538 538 # use the fast path, no race possible on push
539 539 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
540 540 cg = changegroup.getsubset(pushop.repo,
541 541 outgoing,
542 542 bundler,
543 543 'push',
544 544 fastpath=True)
545 545 else:
546 546 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
547 547 bundlecaps)
548 548
549 549 # apply changegroup to remote
550 550 if unbundle:
551 551 # local repo finds heads on server, finds out what
552 552 # revs it must push. once revs transferred, if server
553 553 # finds it has different heads (someone else won
554 554 # commit/push race), server aborts.
555 555 if pushop.force:
556 556 remoteheads = ['force']
557 557 else:
558 558 remoteheads = pushop.remoteheads
559 559 # ssh: return remote's addchangegroup()
560 560 # http: return remote's addchangegroup() or 0 for error
561 561 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
562 562 pushop.repo.url())
563 563 else:
564 564 # we return an integer indicating remote head count
565 565 # change
566 566 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
567 567
568 568 def _pushsyncphase(pushop):
569 569 """synchronise phase information locally and remotely"""
570 570 cheads = pushop.commonheads
571 571 # even when we don't push, exchanging phase data is useful
572 572 remotephases = pushop.remote.listkeys('phases')
573 573 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
574 574 and remotephases # server supports phases
575 575 and pushop.ret is None # nothing was pushed
576 576 and remotephases.get('publishing', False)):
577 577 # When:
578 578 # - this is a subrepo push
579 579 # - and remote support phase
580 580 # - and no changeset was pushed
581 581 # - and remote is publishing
582 582 # We may be in issue 3871 case!
583 583 # We drop the possible phase synchronisation done by
584 584 # courtesy to publish changesets possibly locally draft
585 585 # on the remote.
586 586 remotephases = {'publishing': 'True'}
587 587 if not remotephases: # old server or public only reply from non-publishing
588 588 _localphasemove(pushop, cheads)
589 589 # don't push any phase data as there is nothing to push
590 590 else:
591 591 ana = phases.analyzeremotephases(pushop.repo, cheads,
592 592 remotephases)
593 593 pheads, droots = ana
594 594 ### Apply remote phase on local
595 595 if remotephases.get('publishing', False):
596 596 _localphasemove(pushop, cheads)
597 597 else: # publish = False
598 598 _localphasemove(pushop, pheads)
599 599 _localphasemove(pushop, cheads, phases.draft)
600 600 ### Apply local phase on remote
601 601
602 602 if pushop.ret:
603 603 if 'phases' in pushop.stepsdone:
604 604 # phases already pushed though bundle2
605 605 return
606 606 outdated = pushop.outdatedphases
607 607 else:
608 608 outdated = pushop.fallbackoutdatedphases
609 609
610 610 pushop.stepsdone.add('phases')
611 611
612 612 # filter heads already turned public by the push
613 613 outdated = [c for c in outdated if c.node() not in pheads]
614 614 b2caps = bundle2.bundle2caps(pushop.remote)
615 615 if 'b2x:pushkey' in b2caps:
616 616 # server supports bundle2, let's do a batched push through it
617 617 #
618 618 # This will eventually be unified with the changesets bundle2 push
619 619 bundler = bundle2.bundle20(pushop.ui, b2caps)
620 620 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
621 621 bundler.newpart('b2x:replycaps', data=capsblob)
622 622 part2node = []
623 623 enc = pushkey.encode
624 624 for newremotehead in outdated:
625 625 part = bundler.newpart('b2x:pushkey')
626 626 part.addparam('namespace', enc('phases'))
627 627 part.addparam('key', enc(newremotehead.hex()))
628 628 part.addparam('old', enc(str(phases.draft)))
629 629 part.addparam('new', enc(str(phases.public)))
630 630 part2node.append((part.id, newremotehead))
631 631 stream = util.chunkbuffer(bundler.getchunks())
632 632 try:
633 633 reply = pushop.remote.unbundle(stream, ['force'], 'push')
634 634 op = bundle2.processbundle(pushop.repo, reply)
635 635 except error.BundleValueError, exc:
636 636 raise util.Abort('missing support for %s' % exc)
637 637 for partid, node in part2node:
638 638 partrep = op.records.getreplies(partid)
639 639 results = partrep['pushkey']
640 640 assert len(results) <= 1
641 641 msg = None
642 642 if not results:
643 643 msg = _('server ignored update of %s to public!\n') % node
644 644 elif not int(results[0]['return']):
645 645 msg = _('updating %s to public failed!\n') % node
646 646 if msg is not None:
647 647 pushop.ui.warn(msg)
648 648
649 649 else:
650 650 # fallback to independant pushkey command
651 651 for newremotehead in outdated:
652 652 r = pushop.remote.pushkey('phases',
653 653 newremotehead.hex(),
654 654 str(phases.draft),
655 655 str(phases.public))
656 656 if not r:
657 657 pushop.ui.warn(_('updating %s to public failed!\n')
658 658 % newremotehead)
659 659
660 660 def _localphasemove(pushop, nodes, phase=phases.public):
661 661 """move <nodes> to <phase> in the local source repo"""
662 662 if pushop.locallocked:
663 663 tr = pushop.repo.transaction('push-phase-sync')
664 664 try:
665 665 phases.advanceboundary(pushop.repo, tr, phase, nodes)
666 666 tr.close()
667 667 finally:
668 668 tr.release()
669 669 else:
670 670 # repo is not locked, do not change any phases!
671 671 # Informs the user that phases should have been moved when
672 672 # applicable.
673 673 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
674 674 phasestr = phases.phasenames[phase]
675 675 if actualmoves:
676 676 pushop.ui.status(_('cannot lock source repo, skipping '
677 677 'local %s phase update\n') % phasestr)
678 678
679 679 def _pushobsolete(pushop):
680 680 """utility function to push obsolete markers to a remote"""
681 681 if 'obsmarkers' in pushop.stepsdone:
682 682 return
683 683 pushop.ui.debug('try to push obsolete markers to remote\n')
684 684 repo = pushop.repo
685 685 remote = pushop.remote
686 686 pushop.stepsdone.add('obsmarkers')
687 687 if pushop.outobsmarkers:
688 688 rslts = []
689 689 remotedata = obsolete._pushkeyescape(pushop.outobsmarkers)
690 690 for key in sorted(remotedata, reverse=True):
691 691 # reverse sort to ensure we end with dump0
692 692 data = remotedata[key]
693 693 rslts.append(remote.pushkey('obsolete', key, '', data))
694 694 if [r for r in rslts if not r]:
695 695 msg = _('failed to push some obsolete markers!\n')
696 696 repo.ui.warn(msg)
697 697
698 698 def _pushbookmark(pushop):
699 699 """Update bookmark position on remote"""
700 700 if pushop.ret == 0 or 'bookmarks' in pushop.stepsdone:
701 701 return
702 702 pushop.stepsdone.add('bookmarks')
703 703 ui = pushop.ui
704 704 remote = pushop.remote
705 705 for b, old, new in pushop.outbookmarks:
706 706 if remote.pushkey('bookmarks', b, old, new):
707 707 ui.status(_("updating bookmark %s\n") % b)
708 708 else:
709 709 ui.warn(_('updating bookmark %s failed!\n') % b)
710 710
711 711 class pulloperation(object):
712 712 """A object that represent a single pull operation
713 713
714 714 It purpose is to carry push related state and very common operation.
715 715
716 716 A new should be created at the beginning of each pull and discarded
717 717 afterward.
718 718 """
719 719
720 720 def __init__(self, repo, remote, heads=None, force=False):
721 721 # repo we pull into
722 722 self.repo = repo
723 723 # repo we pull from
724 724 self.remote = remote
725 725 # revision we try to pull (None is "all")
726 726 self.heads = heads
727 727 # do we force pull?
728 728 self.force = force
729 729 # the name the pull transaction
730 730 self._trname = 'pull\n' + util.hidepassword(remote.url())
731 731 # hold the transaction once created
732 732 self._tr = None
733 733 # set of common changeset between local and remote before pull
734 734 self.common = None
735 735 # set of pulled head
736 736 self.rheads = None
737 737 # list of missing changeset to fetch remotely
738 738 self.fetch = None
739 739 # result of changegroup pulling (used as return code by pull)
740 740 self.cgresult = None
741 741 # list of step remaining todo (related to future bundle2 usage)
742 742 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
743 743
744 744 @util.propertycache
745 745 def pulledsubset(self):
746 746 """heads of the set of changeset target by the pull"""
747 747 # compute target subset
748 748 if self.heads is None:
749 749 # We pulled every thing possible
750 750 # sync on everything common
751 751 c = set(self.common)
752 752 ret = list(self.common)
753 753 for n in self.rheads:
754 754 if n not in c:
755 755 ret.append(n)
756 756 return ret
757 757 else:
758 758 # We pulled a specific subset
759 759 # sync on this subset
760 760 return self.heads
761 761
762 762 def gettransaction(self):
763 763 """get appropriate pull transaction, creating it if needed"""
764 764 if self._tr is None:
765 765 self._tr = self.repo.transaction(self._trname)
766 766 return self._tr
767 767
768 768 def closetransaction(self):
769 769 """close transaction if created"""
770 770 if self._tr is not None:
771 771 self._tr.close()
772 772
773 773 def releasetransaction(self):
774 774 """release transaction if created"""
775 775 if self._tr is not None:
776 776 self._tr.release()
777 777
778 778 def pull(repo, remote, heads=None, force=False):
779 779 pullop = pulloperation(repo, remote, heads, force)
780 780 if pullop.remote.local():
781 781 missing = set(pullop.remote.requirements) - pullop.repo.supported
782 782 if missing:
783 783 msg = _("required features are not"
784 784 " supported in the destination:"
785 785 " %s") % (', '.join(sorted(missing)))
786 786 raise util.Abort(msg)
787 787
788 788 lock = pullop.repo.lock()
789 789 try:
790 790 _pulldiscovery(pullop)
791 791 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
792 792 and pullop.remote.capable('bundle2-exp')):
793 793 _pullbundle2(pullop)
794 794 if 'changegroup' in pullop.todosteps:
795 795 _pullchangeset(pullop)
796 796 if 'phases' in pullop.todosteps:
797 797 _pullphase(pullop)
798 798 if 'obsmarkers' in pullop.todosteps:
799 799 _pullobsolete(pullop)
800 800 pullop.closetransaction()
801 801 finally:
802 802 pullop.releasetransaction()
803 803 lock.release()
804 804
805 805 return pullop.cgresult
806 806
807 807 def _pulldiscovery(pullop):
808 808 """discovery phase for the pull
809 809
810 810 Current handle changeset discovery only, will change handle all discovery
811 811 at some point."""
812 812 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
813 813 pullop.remote,
814 814 heads=pullop.heads,
815 815 force=pullop.force)
816 816 pullop.common, pullop.fetch, pullop.rheads = tmp
817 817
818 818 def _pullbundle2(pullop):
819 819 """pull data using bundle2
820 820
821 821 For now, the only supported data are changegroup."""
822 822 remotecaps = bundle2.bundle2caps(pullop.remote)
823 823 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
824 824 # pulling changegroup
825 825 pullop.todosteps.remove('changegroup')
826 826
827 827 kwargs['common'] = pullop.common
828 828 kwargs['heads'] = pullop.heads or pullop.rheads
829 829 kwargs['cg'] = pullop.fetch
830 830 if 'b2x:listkeys' in remotecaps:
831 831 kwargs['listkeys'] = ['phase']
832 832 if not pullop.fetch:
833 833 pullop.repo.ui.status(_("no changes found\n"))
834 834 pullop.cgresult = 0
835 835 else:
836 836 if pullop.heads is None and list(pullop.common) == [nullid]:
837 837 pullop.repo.ui.status(_("requesting all changes\n"))
838 if obsolete._enabled:
839 remoteversions = bundle2.obsmarkersversion(remotecaps)
840 if obsolete.commonversion(remoteversions) is not None:
841 kwargs['obsmarkers'] = True
842 pullop.todosteps.remove('obsmarkers')
838 843 _pullbundle2extraprepare(pullop, kwargs)
839 844 if kwargs.keys() == ['format']:
840 845 return # nothing to pull
841 846 bundle = pullop.remote.getbundle('pull', **kwargs)
842 847 try:
843 848 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
844 849 except error.BundleValueError, exc:
845 850 raise util.Abort('missing support for %s' % exc)
846 851
847 852 if pullop.fetch:
848 853 assert len(op.records['changegroup']) == 1
849 854 pullop.cgresult = op.records['changegroup'][0]['return']
850 855
851 856 # processing phases change
852 857 for namespace, value in op.records['listkeys']:
853 858 if namespace == 'phases':
854 859 _pullapplyphases(pullop, value)
855 860
856 861 def _pullbundle2extraprepare(pullop, kwargs):
857 862 """hook function so that extensions can extend the getbundle call"""
858 863 pass
859 864
860 865 def _pullchangeset(pullop):
861 866 """pull changeset from unbundle into the local repo"""
862 867 # We delay the open of the transaction as late as possible so we
863 868 # don't open transaction for nothing or you break future useful
864 869 # rollback call
865 870 pullop.todosteps.remove('changegroup')
866 871 if not pullop.fetch:
867 872 pullop.repo.ui.status(_("no changes found\n"))
868 873 pullop.cgresult = 0
869 874 return
870 875 pullop.gettransaction()
871 876 if pullop.heads is None and list(pullop.common) == [nullid]:
872 877 pullop.repo.ui.status(_("requesting all changes\n"))
873 878 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
874 879 # issue1320, avoid a race if remote changed after discovery
875 880 pullop.heads = pullop.rheads
876 881
877 882 if pullop.remote.capable('getbundle'):
878 883 # TODO: get bundlecaps from remote
879 884 cg = pullop.remote.getbundle('pull', common=pullop.common,
880 885 heads=pullop.heads or pullop.rheads)
881 886 elif pullop.heads is None:
882 887 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
883 888 elif not pullop.remote.capable('changegroupsubset'):
884 889 raise util.Abort(_("partial pull cannot be done because "
885 890 "other repository doesn't support "
886 891 "changegroupsubset."))
887 892 else:
888 893 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
889 894 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
890 895 pullop.remote.url())
891 896
892 897 def _pullphase(pullop):
893 898 # Get remote phases data from remote
894 899 remotephases = pullop.remote.listkeys('phases')
895 900 _pullapplyphases(pullop, remotephases)
896 901
897 902 def _pullapplyphases(pullop, remotephases):
898 903 """apply phase movement from observed remote state"""
899 904 pullop.todosteps.remove('phases')
900 905 publishing = bool(remotephases.get('publishing', False))
901 906 if remotephases and not publishing:
902 907 # remote is new and unpublishing
903 908 pheads, _dr = phases.analyzeremotephases(pullop.repo,
904 909 pullop.pulledsubset,
905 910 remotephases)
906 911 dheads = pullop.pulledsubset
907 912 else:
908 913 # Remote is old or publishing all common changesets
909 914 # should be seen as public
910 915 pheads = pullop.pulledsubset
911 916 dheads = []
912 917 unfi = pullop.repo.unfiltered()
913 918 phase = unfi._phasecache.phase
914 919 rev = unfi.changelog.nodemap.get
915 920 public = phases.public
916 921 draft = phases.draft
917 922
918 923 # exclude changesets already public locally and update the others
919 924 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
920 925 if pheads:
921 926 tr = pullop.gettransaction()
922 927 phases.advanceboundary(pullop.repo, tr, public, pheads)
923 928
924 929 # exclude changesets already draft locally and update the others
925 930 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
926 931 if dheads:
927 932 tr = pullop.gettransaction()
928 933 phases.advanceboundary(pullop.repo, tr, draft, dheads)
929 934
930 935 def _pullobsolete(pullop):
931 936 """utility function to pull obsolete markers from a remote
932 937
933 938 The `gettransaction` is function that return the pull transaction, creating
934 939 one if necessary. We return the transaction to inform the calling code that
935 940 a new transaction have been created (when applicable).
936 941
937 942 Exists mostly to allow overriding for experimentation purpose"""
938 943 pullop.todosteps.remove('obsmarkers')
939 944 tr = None
940 945 if obsolete._enabled:
941 946 pullop.repo.ui.debug('fetching remote obsolete markers\n')
942 947 remoteobs = pullop.remote.listkeys('obsolete')
943 948 if 'dump0' in remoteobs:
944 949 tr = pullop.gettransaction()
945 950 for key in sorted(remoteobs, reverse=True):
946 951 if key.startswith('dump'):
947 952 data = base85.b85decode(remoteobs[key])
948 953 pullop.repo.obsstore.mergemarkers(tr, data)
949 954 pullop.repo.invalidatevolatilesets()
950 955 return tr
951 956
952 957 def caps20to10(repo):
953 958 """return a set with appropriate options to use bundle20 during getbundle"""
954 959 caps = set(['HG2X'])
955 960 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
956 961 caps.add('bundle2=' + urllib.quote(capsblob))
957 962 return caps
958 963
959 964 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
960 965 **kwargs):
961 966 """return a full bundle (with potentially multiple kind of parts)
962 967
963 968 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
964 969 passed. For now, the bundle can contain only changegroup, but this will
965 970 changes when more part type will be available for bundle2.
966 971
967 972 This is different from changegroup.getbundle that only returns an HG10
968 973 changegroup bundle. They may eventually get reunited in the future when we
969 974 have a clearer idea of the API we what to query different data.
970 975
971 976 The implementation is at a very early stage and will get massive rework
972 977 when the API of bundle is refined.
973 978 """
974 979 cg = None
975 980 if kwargs.get('cg', True):
976 981 # build changegroup bundle here.
977 982 cg = changegroup.getbundle(repo, source, heads=heads,
978 983 common=common, bundlecaps=bundlecaps)
979 984 elif 'HG2X' not in bundlecaps:
980 985 raise ValueError(_('request for bundle10 must include changegroup'))
981 986 if bundlecaps is None or 'HG2X' not in bundlecaps:
982 987 if kwargs:
983 988 raise ValueError(_('unsupported getbundle arguments: %s')
984 989 % ', '.join(sorted(kwargs.keys())))
985 990 return cg
986 991 # very crude first implementation,
987 992 # the bundle API will change and the generation will be done lazily.
988 993 b2caps = {}
989 994 for bcaps in bundlecaps:
990 995 if bcaps.startswith('bundle2='):
991 996 blob = urllib.unquote(bcaps[len('bundle2='):])
992 997 b2caps.update(bundle2.decodecaps(blob))
993 998 bundler = bundle2.bundle20(repo.ui, b2caps)
994 999 if cg:
995 1000 bundler.newpart('b2x:changegroup', data=cg.getchunks())
996 1001 listkeys = kwargs.get('listkeys', ())
997 1002 for namespace in listkeys:
998 1003 part = bundler.newpart('b2x:listkeys')
999 1004 part.addparam('namespace', namespace)
1000 1005 keys = repo.listkeys(namespace).items()
1001 1006 part.data = pushkey.encodekeys(keys)
1002 1007 _getbundleobsmarkerpart(bundler, repo, source, heads=heads, common=common,
1003 1008 bundlecaps=bundlecaps, **kwargs)
1004 1009 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
1005 1010 bundlecaps=bundlecaps, **kwargs)
1006 1011 return util.chunkbuffer(bundler.getchunks())
1007 1012
1008 1013 def _getbundleobsmarkerpart(bundler, repo, source, heads=None, common=None,
1009 1014 bundlecaps=None, **kwargs):
1010 1015 if kwargs.get('obsmarkers', False):
1011 1016 if heads is None:
1012 1017 heads = repo.heads()
1013 1018 subset = [c.node() for c in repo.set('::%ln', heads)]
1014 1019 markers = repo.obsstore.relevantmarkers(subset)
1015 1020 buildobsmarkerspart(bundler, markers)
1016 1021
1017 1022 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
1018 1023 bundlecaps=None, **kwargs):
1019 1024 """hook function to let extensions add parts to the requested bundle"""
1020 1025 pass
1021 1026
1022 1027 def check_heads(repo, their_heads, context):
1023 1028 """check if the heads of a repo have been modified
1024 1029
1025 1030 Used by peer for unbundling.
1026 1031 """
1027 1032 heads = repo.heads()
1028 1033 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1029 1034 if not (their_heads == ['force'] or their_heads == heads or
1030 1035 their_heads == ['hashed', heads_hash]):
1031 1036 # someone else committed/pushed/unbundled while we
1032 1037 # were transferring data
1033 1038 raise error.PushRaced('repository changed while %s - '
1034 1039 'please try again' % context)
1035 1040
1036 1041 def unbundle(repo, cg, heads, source, url):
1037 1042 """Apply a bundle to a repo.
1038 1043
1039 1044 this function makes sure the repo is locked during the application and have
1040 1045 mechanism to check that no push race occurred between the creation of the
1041 1046 bundle and its application.
1042 1047
1043 1048 If the push was raced as PushRaced exception is raised."""
1044 1049 r = 0
1045 1050 # need a transaction when processing a bundle2 stream
1046 1051 tr = None
1047 1052 lock = repo.lock()
1048 1053 try:
1049 1054 check_heads(repo, heads, 'uploading changes')
1050 1055 # push can proceed
1051 1056 if util.safehasattr(cg, 'params'):
1052 1057 try:
1053 1058 tr = repo.transaction('unbundle')
1054 1059 tr.hookargs['bundle2-exp'] = '1'
1055 1060 r = bundle2.processbundle(repo, cg, lambda: tr).reply
1056 1061 cl = repo.unfiltered().changelog
1057 1062 p = cl.writepending() and repo.root or ""
1058 1063 repo.hook('b2x-pretransactionclose', throw=True, source=source,
1059 1064 url=url, pending=p, **tr.hookargs)
1060 1065 tr.close()
1061 1066 repo.hook('b2x-transactionclose', source=source, url=url,
1062 1067 **tr.hookargs)
1063 1068 except Exception, exc:
1064 1069 exc.duringunbundle2 = True
1065 1070 raise
1066 1071 else:
1067 1072 r = changegroup.addchangegroup(repo, cg, source, url)
1068 1073 finally:
1069 1074 if tr is not None:
1070 1075 tr.release()
1071 1076 lock.release()
1072 1077 return r
@@ -1,1199 +1,1198
1 1
2 2 $ getmainid() {
3 3 > hg -R main log --template '{node}\n' --rev "$1"
4 4 > }
5 5
6 6 Create an extension to test bundle2 API
7 7
8 8 $ cat > bundle2.py << EOF
9 9 > """A small extension to test bundle2 implementation
10 10 >
11 11 > Current bundle2 implementation is far too limited to be used in any core
12 12 > code. We still need to be able to test it while it grow up.
13 13 > """
14 14 >
15 15 > import sys, os
16 16 > from mercurial import cmdutil
17 17 > from mercurial import util
18 18 > from mercurial import bundle2
19 19 > from mercurial import scmutil
20 20 > from mercurial import discovery
21 21 > from mercurial import changegroup
22 22 > from mercurial import error
23 23 > from mercurial import obsolete
24 24 >
25 25 > obsolete._enabled = True
26 26 >
27 27 > try:
28 28 > import msvcrt
29 29 > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
30 30 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
31 31 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
32 32 > except ImportError:
33 33 > pass
34 34 >
35 35 > cmdtable = {}
36 36 > command = cmdutil.command(cmdtable)
37 37 >
38 38 > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
39 39 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
40 40 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
41 41 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
42 42 >
43 43 > @bundle2.parthandler('test:song')
44 44 > def songhandler(op, part):
45 45 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
46 46 > op.ui.write('The choir starts singing:\n')
47 47 > verses = 0
48 48 > for line in part.read().split('\n'):
49 49 > op.ui.write(' %s\n' % line)
50 50 > verses += 1
51 51 > op.records.add('song', {'verses': verses})
52 52 >
53 53 > @bundle2.parthandler('test:ping')
54 54 > def pinghandler(op, part):
55 55 > op.ui.write('received ping request (id %i)\n' % part.id)
56 56 > if op.reply is not None and 'ping-pong' in op.reply.capabilities:
57 57 > op.ui.write_err('replying to ping request (id %i)\n' % part.id)
58 58 > op.reply.newpart('test:pong', [('in-reply-to', str(part.id))])
59 59 >
60 60 > @bundle2.parthandler('test:debugreply')
61 61 > def debugreply(op, part):
62 62 > """print data about the capacity of the bundle reply"""
63 63 > if op.reply is None:
64 64 > op.ui.write('debugreply: no reply\n')
65 65 > else:
66 66 > op.ui.write('debugreply: capabilities:\n')
67 67 > for cap in sorted(op.reply.capabilities):
68 68 > op.ui.write('debugreply: %r\n' % cap)
69 69 > for val in op.reply.capabilities[cap]:
70 70 > op.ui.write('debugreply: %r\n' % val)
71 71 >
72 72 > @command('bundle2',
73 73 > [('', 'param', [], 'stream level parameter'),
74 74 > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
75 75 > ('', 'unknownparams', False, 'include an unknown part parameters in the bundle'),
76 76 > ('', 'parts', False, 'include some arbitrary parts to the bundle'),
77 77 > ('', 'reply', False, 'produce a reply bundle'),
78 78 > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
79 79 > ('r', 'rev', [], 'includes those changeset in the bundle'),],
80 80 > '[OUTPUTFILE]')
81 81 > def cmdbundle2(ui, repo, path=None, **opts):
82 82 > """write a bundle2 container on standard ouput"""
83 83 > bundler = bundle2.bundle20(ui)
84 84 > for p in opts['param']:
85 85 > p = p.split('=', 1)
86 86 > try:
87 87 > bundler.addparam(*p)
88 88 > except ValueError, exc:
89 89 > raise util.Abort('%s' % exc)
90 90 >
91 91 > if opts['reply']:
92 92 > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
93 93 > bundler.newpart('b2x:replycaps', data=capsstring)
94 94 >
95 95 > if opts['pushrace']:
96 96 > # also serve to test the assignement of data outside of init
97 97 > part = bundler.newpart('b2x:check:heads')
98 98 > part.data = '01234567890123456789'
99 99 >
100 100 > revs = opts['rev']
101 101 > if 'rev' in opts:
102 102 > revs = scmutil.revrange(repo, opts['rev'])
103 103 > if revs:
104 104 > # very crude version of a changegroup part creation
105 105 > bundled = repo.revs('%ld::%ld', revs, revs)
106 106 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
107 107 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
108 108 > outgoing = discovery.outgoing(repo.changelog, headcommon, headmissing)
109 109 > cg = changegroup.getlocalbundle(repo, 'test:bundle2', outgoing, None)
110 110 > bundler.newpart('b2x:changegroup', data=cg.getchunks())
111 111 >
112 112 > if opts['parts']:
113 113 > bundler.newpart('test:empty')
114 114 > # add a second one to make sure we handle multiple parts
115 115 > bundler.newpart('test:empty')
116 116 > bundler.newpart('test:song', data=ELEPHANTSSONG)
117 117 > bundler.newpart('test:debugreply')
118 118 > mathpart = bundler.newpart('test:math')
119 119 > mathpart.addparam('pi', '3.14')
120 120 > mathpart.addparam('e', '2.72')
121 121 > mathpart.addparam('cooking', 'raw', mandatory=False)
122 122 > mathpart.data = '42'
123 123 > # advisory known part with unknown mandatory param
124 124 > bundler.newpart('test:song', [('randomparam','')])
125 125 > if opts['unknown']:
126 126 > bundler.newpart('test:UNKNOWN', data='some random content')
127 127 > if opts['unknownparams']:
128 128 > bundler.newpart('test:SONG', [('randomparams', '')])
129 129 > if opts['parts']:
130 130 > bundler.newpart('test:ping')
131 131 >
132 132 > if path is None:
133 133 > file = sys.stdout
134 134 > else:
135 135 > file = open(path, 'wb')
136 136 >
137 137 > for chunk in bundler.getchunks():
138 138 > file.write(chunk)
139 139 >
140 140 > @command('unbundle2', [], '')
141 141 > def cmdunbundle2(ui, repo, replypath=None):
142 142 > """process a bundle2 stream from stdin on the current repo"""
143 143 > try:
144 144 > tr = None
145 145 > lock = repo.lock()
146 146 > tr = repo.transaction('processbundle')
147 147 > try:
148 148 > unbundler = bundle2.unbundle20(ui, sys.stdin)
149 149 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
150 150 > tr.close()
151 151 > except error.BundleValueError, exc:
152 152 > raise util.Abort('missing support for %s' % exc)
153 153 > except error.PushRaced, exc:
154 154 > raise util.Abort('push race: %s' % exc)
155 155 > finally:
156 156 > if tr is not None:
157 157 > tr.release()
158 158 > lock.release()
159 159 > remains = sys.stdin.read()
160 160 > ui.write('%i unread bytes\n' % len(remains))
161 161 > if op.records['song']:
162 162 > totalverses = sum(r['verses'] for r in op.records['song'])
163 163 > ui.write('%i total verses sung\n' % totalverses)
164 164 > for rec in op.records['changegroup']:
165 165 > ui.write('addchangegroup return: %i\n' % rec['return'])
166 166 > if op.reply is not None and replypath is not None:
167 167 > file = open(replypath, 'wb')
168 168 > for chunk in op.reply.getchunks():
169 169 > file.write(chunk)
170 170 >
171 171 > @command('statbundle2', [], '')
172 172 > def cmdstatbundle2(ui, repo):
173 173 > """print statistic on the bundle2 container read from stdin"""
174 174 > unbundler = bundle2.unbundle20(ui, sys.stdin)
175 175 > try:
176 176 > params = unbundler.params
177 177 > except error.BundleValueError, exc:
178 178 > raise util.Abort('unknown parameters: %s' % exc)
179 179 > ui.write('options count: %i\n' % len(params))
180 180 > for key in sorted(params):
181 181 > ui.write('- %s\n' % key)
182 182 > value = params[key]
183 183 > if value is not None:
184 184 > ui.write(' %s\n' % value)
185 185 > count = 0
186 186 > for p in unbundler.iterparts():
187 187 > count += 1
188 188 > ui.write(' :%s:\n' % p.type)
189 189 > ui.write(' mandatory: %i\n' % len(p.mandatoryparams))
190 190 > ui.write(' advisory: %i\n' % len(p.advisoryparams))
191 191 > ui.write(' payload: %i bytes\n' % len(p.read()))
192 192 > ui.write('parts count: %i\n' % count)
193 193 > EOF
194 194 $ cat >> $HGRCPATH << EOF
195 195 > [extensions]
196 196 > bundle2=$TESTTMP/bundle2.py
197 197 > [experimental]
198 198 > bundle2-exp=True
199 199 > [ui]
200 200 > ssh=python "$TESTDIR/dummyssh"
201 201 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
202 202 > [web]
203 203 > push_ssl = false
204 204 > allow_push = *
205 205 > [phases]
206 206 > publish=False
207 207 > EOF
208 208
209 209 The extension requires a repo (currently unused)
210 210
211 211 $ hg init main
212 212 $ cd main
213 213 $ touch a
214 214 $ hg add a
215 215 $ hg commit -m 'a'
216 216
217 217
218 218 Empty bundle
219 219 =================
220 220
221 221 - no option
222 222 - no parts
223 223
224 224 Test bundling
225 225
226 226 $ hg bundle2
227 227 HG2X\x00\x00\x00\x00 (no-eol) (esc)
228 228
229 229 Test unbundling
230 230
231 231 $ hg bundle2 | hg statbundle2
232 232 options count: 0
233 233 parts count: 0
234 234
235 235 Test old style bundle are detected and refused
236 236
237 237 $ hg bundle --all ../bundle.hg
238 238 1 changesets found
239 239 $ hg statbundle2 < ../bundle.hg
240 240 abort: unknown bundle version 10
241 241 [255]
242 242
243 243 Test parameters
244 244 =================
245 245
246 246 - some options
247 247 - no parts
248 248
249 249 advisory parameters, no value
250 250 -------------------------------
251 251
252 252 Simplest possible parameters form
253 253
254 254 Test generation simple option
255 255
256 256 $ hg bundle2 --param 'caution'
257 257 HG2X\x00\x07caution\x00\x00 (no-eol) (esc)
258 258
259 259 Test unbundling
260 260
261 261 $ hg bundle2 --param 'caution' | hg statbundle2
262 262 options count: 1
263 263 - caution
264 264 parts count: 0
265 265
266 266 Test generation multiple option
267 267
268 268 $ hg bundle2 --param 'caution' --param 'meal'
269 269 HG2X\x00\x0ccaution meal\x00\x00 (no-eol) (esc)
270 270
271 271 Test unbundling
272 272
273 273 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
274 274 options count: 2
275 275 - caution
276 276 - meal
277 277 parts count: 0
278 278
279 279 advisory parameters, with value
280 280 -------------------------------
281 281
282 282 Test generation
283 283
284 284 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants'
285 285 HG2X\x00\x1ccaution meal=vegan elephants\x00\x00 (no-eol) (esc)
286 286
287 287 Test unbundling
288 288
289 289 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
290 290 options count: 3
291 291 - caution
292 292 - elephants
293 293 - meal
294 294 vegan
295 295 parts count: 0
296 296
297 297 parameter with special char in value
298 298 ---------------------------------------------------
299 299
300 300 Test generation
301 301
302 302 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple
303 303 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
304 304
305 305 Test unbundling
306 306
307 307 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
308 308 options count: 2
309 309 - e|! 7/
310 310 babar%#==tutu
311 311 - simple
312 312 parts count: 0
313 313
314 314 Test unknown mandatory option
315 315 ---------------------------------------------------
316 316
317 317 $ hg bundle2 --param 'Gravity' | hg statbundle2
318 318 abort: unknown parameters: Stream Parameter - Gravity
319 319 [255]
320 320
321 321 Test debug output
322 322 ---------------------------------------------------
323 323
324 324 bundling debug
325 325
326 326 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2
327 327 start emission of HG2X stream
328 328 bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
329 329 start of parts
330 330 end of bundle
331 331
332 332 file content is ok
333 333
334 334 $ cat ../out.hg2
335 335 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
336 336
337 337 unbundling debug
338 338
339 339 $ hg statbundle2 --debug < ../out.hg2
340 340 start processing of HG2X stream
341 341 reading bundle2 stream parameters
342 342 ignoring unknown parameter 'e|! 7/'
343 343 ignoring unknown parameter 'simple'
344 344 options count: 2
345 345 - e|! 7/
346 346 babar%#==tutu
347 347 - simple
348 348 start extraction of bundle2 parts
349 349 part header size: 0
350 350 end of bundle2 stream
351 351 parts count: 0
352 352
353 353
354 354 Test buggy input
355 355 ---------------------------------------------------
356 356
357 357 empty parameter name
358 358
359 359 $ hg bundle2 --param '' --quiet
360 360 abort: empty parameter name
361 361 [255]
362 362
363 363 bad parameter name
364 364
365 365 $ hg bundle2 --param 42babar
366 366 abort: non letter first character: '42babar'
367 367 [255]
368 368
369 369
370 370 Test part
371 371 =================
372 372
373 373 $ hg bundle2 --parts ../parts.hg2 --debug
374 374 start emission of HG2X stream
375 375 bundle parameter:
376 376 start of parts
377 377 bundle part: "test:empty"
378 378 bundle part: "test:empty"
379 379 bundle part: "test:song"
380 380 bundle part: "test:debugreply"
381 381 bundle part: "test:math"
382 382 bundle part: "test:song"
383 383 bundle part: "test:ping"
384 384 end of bundle
385 385
386 386 $ cat ../parts.hg2
387 387 HG2X\x00\x00\x00\x11 (esc)
388 388 test:empty\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
389 389 test:empty\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x10 test:song\x00\x00\x00\x02\x00\x00\x00\x00\x00\xb2Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko (esc)
390 390 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
391 391 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.\x00\x00\x00\x00\x00\x16\x0ftest:debugreply\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00+ test:math\x00\x00\x00\x04\x02\x01\x02\x04\x01\x04\x07\x03pi3.14e2.72cookingraw\x00\x00\x00\x0242\x00\x00\x00\x00\x00\x1d test:song\x00\x00\x00\x05\x01\x00\x0b\x00randomparam\x00\x00\x00\x00\x00\x10 test:ping\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
392 392
393 393
394 394 $ hg statbundle2 < ../parts.hg2
395 395 options count: 0
396 396 :test:empty:
397 397 mandatory: 0
398 398 advisory: 0
399 399 payload: 0 bytes
400 400 :test:empty:
401 401 mandatory: 0
402 402 advisory: 0
403 403 payload: 0 bytes
404 404 :test:song:
405 405 mandatory: 0
406 406 advisory: 0
407 407 payload: 178 bytes
408 408 :test:debugreply:
409 409 mandatory: 0
410 410 advisory: 0
411 411 payload: 0 bytes
412 412 :test:math:
413 413 mandatory: 2
414 414 advisory: 1
415 415 payload: 2 bytes
416 416 :test:song:
417 417 mandatory: 1
418 418 advisory: 0
419 419 payload: 0 bytes
420 420 :test:ping:
421 421 mandatory: 0
422 422 advisory: 0
423 423 payload: 0 bytes
424 424 parts count: 7
425 425
426 426 $ hg statbundle2 --debug < ../parts.hg2
427 427 start processing of HG2X stream
428 428 reading bundle2 stream parameters
429 429 options count: 0
430 430 start extraction of bundle2 parts
431 431 part header size: 17
432 432 part type: "test:empty"
433 433 part id: "0"
434 434 part parameters: 0
435 435 :test:empty:
436 436 mandatory: 0
437 437 advisory: 0
438 438 payload chunk size: 0
439 439 payload: 0 bytes
440 440 part header size: 17
441 441 part type: "test:empty"
442 442 part id: "1"
443 443 part parameters: 0
444 444 :test:empty:
445 445 mandatory: 0
446 446 advisory: 0
447 447 payload chunk size: 0
448 448 payload: 0 bytes
449 449 part header size: 16
450 450 part type: "test:song"
451 451 part id: "2"
452 452 part parameters: 0
453 453 :test:song:
454 454 mandatory: 0
455 455 advisory: 0
456 456 payload chunk size: 178
457 457 payload chunk size: 0
458 458 payload: 178 bytes
459 459 part header size: 22
460 460 part type: "test:debugreply"
461 461 part id: "3"
462 462 part parameters: 0
463 463 :test:debugreply:
464 464 mandatory: 0
465 465 advisory: 0
466 466 payload chunk size: 0
467 467 payload: 0 bytes
468 468 part header size: 43
469 469 part type: "test:math"
470 470 part id: "4"
471 471 part parameters: 3
472 472 :test:math:
473 473 mandatory: 2
474 474 advisory: 1
475 475 payload chunk size: 2
476 476 payload chunk size: 0
477 477 payload: 2 bytes
478 478 part header size: 29
479 479 part type: "test:song"
480 480 part id: "5"
481 481 part parameters: 1
482 482 :test:song:
483 483 mandatory: 1
484 484 advisory: 0
485 485 payload chunk size: 0
486 486 payload: 0 bytes
487 487 part header size: 16
488 488 part type: "test:ping"
489 489 part id: "6"
490 490 part parameters: 0
491 491 :test:ping:
492 492 mandatory: 0
493 493 advisory: 0
494 494 payload chunk size: 0
495 495 payload: 0 bytes
496 496 part header size: 0
497 497 end of bundle2 stream
498 498 parts count: 7
499 499
500 500 Test actual unbundling of test part
501 501 =======================================
502 502
503 503 Process the bundle
504 504
505 505 $ hg unbundle2 --debug < ../parts.hg2
506 506 start processing of HG2X stream
507 507 reading bundle2 stream parameters
508 508 start extraction of bundle2 parts
509 509 part header size: 17
510 510 part type: "test:empty"
511 511 part id: "0"
512 512 part parameters: 0
513 513 ignoring unsupported advisory part test:empty
514 514 payload chunk size: 0
515 515 part header size: 17
516 516 part type: "test:empty"
517 517 part id: "1"
518 518 part parameters: 0
519 519 ignoring unsupported advisory part test:empty
520 520 payload chunk size: 0
521 521 part header size: 16
522 522 part type: "test:song"
523 523 part id: "2"
524 524 part parameters: 0
525 525 found a handler for part 'test:song'
526 526 The choir starts singing:
527 527 payload chunk size: 178
528 528 payload chunk size: 0
529 529 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
530 530 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
531 531 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
532 532 part header size: 22
533 533 part type: "test:debugreply"
534 534 part id: "3"
535 535 part parameters: 0
536 536 found a handler for part 'test:debugreply'
537 537 debugreply: no reply
538 538 payload chunk size: 0
539 539 part header size: 43
540 540 part type: "test:math"
541 541 part id: "4"
542 542 part parameters: 3
543 543 ignoring unsupported advisory part test:math
544 544 payload chunk size: 2
545 545 payload chunk size: 0
546 546 part header size: 29
547 547 part type: "test:song"
548 548 part id: "5"
549 549 part parameters: 1
550 550 found a handler for part 'test:song'
551 551 ignoring unsupported advisory part test:song - randomparam
552 552 payload chunk size: 0
553 553 part header size: 16
554 554 part type: "test:ping"
555 555 part id: "6"
556 556 part parameters: 0
557 557 found a handler for part 'test:ping'
558 558 received ping request (id 6)
559 559 payload chunk size: 0
560 560 part header size: 0
561 561 end of bundle2 stream
562 562 0 unread bytes
563 563 3 total verses sung
564 564
565 565 Unbundle with an unknown mandatory part
566 566 (should abort)
567 567
568 568 $ hg bundle2 --parts --unknown ../unknown.hg2
569 569
570 570 $ hg unbundle2 < ../unknown.hg2
571 571 The choir starts singing:
572 572 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
573 573 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
574 574 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
575 575 debugreply: no reply
576 576 0 unread bytes
577 577 abort: missing support for test:unknown
578 578 [255]
579 579
580 580 Unbundle with an unknown mandatory part parameters
581 581 (should abort)
582 582
583 583 $ hg bundle2 --unknownparams ../unknown.hg2
584 584
585 585 $ hg unbundle2 < ../unknown.hg2
586 586 0 unread bytes
587 587 abort: missing support for test:song - randomparams
588 588 [255]
589 589
590 590 unbundle with a reply
591 591
592 592 $ hg bundle2 --parts --reply ../parts-reply.hg2
593 593 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
594 594 0 unread bytes
595 595 3 total verses sung
596 596
597 597 The reply is a bundle
598 598
599 599 $ cat ../reply.hg2
600 600 HG2X\x00\x00\x00\x1f (esc)
601 601 b2x:output\x00\x00\x00\x00\x00\x01\x0b\x01in-reply-to3\x00\x00\x00\xd9The choir starts singing: (esc)
602 602 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
603 603 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
604 604 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
605 605 \x00\x00\x00\x00\x00\x1f (esc)
606 606 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to4\x00\x00\x00\xc9debugreply: capabilities: (esc)
607 607 debugreply: 'city=!'
608 608 debugreply: 'celeste,ville'
609 609 debugreply: 'elephants'
610 610 debugreply: 'babar'
611 611 debugreply: 'celeste'
612 612 debugreply: 'ping-pong'
613 613 \x00\x00\x00\x00\x00\x1e test:pong\x00\x00\x00\x02\x01\x00\x0b\x01in-reply-to7\x00\x00\x00\x00\x00\x1f (esc)
614 614 b2x:output\x00\x00\x00\x03\x00\x01\x0b\x01in-reply-to7\x00\x00\x00=received ping request (id 7) (esc)
615 615 replying to ping request (id 7)
616 616 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
617 617
618 618 The reply is valid
619 619
620 620 $ hg statbundle2 < ../reply.hg2
621 621 options count: 0
622 622 :b2x:output:
623 623 mandatory: 0
624 624 advisory: 1
625 625 payload: 217 bytes
626 626 :b2x:output:
627 627 mandatory: 0
628 628 advisory: 1
629 629 payload: 201 bytes
630 630 :test:pong:
631 631 mandatory: 1
632 632 advisory: 0
633 633 payload: 0 bytes
634 634 :b2x:output:
635 635 mandatory: 0
636 636 advisory: 1
637 637 payload: 61 bytes
638 638 parts count: 4
639 639
640 640 Unbundle the reply to get the output:
641 641
642 642 $ hg unbundle2 < ../reply.hg2
643 643 remote: The choir starts singing:
644 644 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
645 645 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
646 646 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
647 647 remote: debugreply: capabilities:
648 648 remote: debugreply: 'city=!'
649 649 remote: debugreply: 'celeste,ville'
650 650 remote: debugreply: 'elephants'
651 651 remote: debugreply: 'babar'
652 652 remote: debugreply: 'celeste'
653 653 remote: debugreply: 'ping-pong'
654 654 remote: received ping request (id 7)
655 655 remote: replying to ping request (id 7)
656 656 0 unread bytes
657 657
658 658 Test push race detection
659 659
660 660 $ hg bundle2 --pushrace ../part-race.hg2
661 661
662 662 $ hg unbundle2 < ../part-race.hg2
663 663 0 unread bytes
664 664 abort: push race: repository changed while pushing - please try again
665 665 [255]
666 666
667 667 Support for changegroup
668 668 ===================================
669 669
670 670 $ hg unbundle $TESTDIR/bundles/rebase.hg
671 671 adding changesets
672 672 adding manifests
673 673 adding file changes
674 674 added 8 changesets with 7 changes to 7 files (+3 heads)
675 675 (run 'hg heads' to see heads, 'hg merge' to merge)
676 676
677 677 $ hg log -G
678 678 o 8:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
679 679 |
680 680 | o 7:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
681 681 |/|
682 682 o | 6:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
683 683 | |
684 684 | o 5:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
685 685 |/
686 686 | o 4:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
687 687 | |
688 688 | o 3:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
689 689 | |
690 690 | o 2:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
691 691 |/
692 692 o 1:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
693 693
694 694 @ 0:3903775176ed draft test a
695 695
696 696
697 697 $ hg bundle2 --debug --rev '8+7+5+4' ../rev.hg2
698 698 4 changesets found
699 699 list of changesets:
700 700 32af7686d403cf45b5d95f2d70cebea587ac806a
701 701 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
702 702 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
703 703 02de42196ebee42ef284b6780a87cdc96e8eaab6
704 704 start emission of HG2X stream
705 705 bundle parameter:
706 706 start of parts
707 707 bundle part: "b2x:changegroup"
708 708 bundling: 1/4 changesets (25.00%)
709 709 bundling: 2/4 changesets (50.00%)
710 710 bundling: 3/4 changesets (75.00%)
711 711 bundling: 4/4 changesets (100.00%)
712 712 bundling: 1/4 manifests (25.00%)
713 713 bundling: 2/4 manifests (50.00%)
714 714 bundling: 3/4 manifests (75.00%)
715 715 bundling: 4/4 manifests (100.00%)
716 716 bundling: D 1/3 files (33.33%)
717 717 bundling: E 2/3 files (66.67%)
718 718 bundling: H 3/3 files (100.00%)
719 719 end of bundle
720 720
721 721 $ cat ../rev.hg2
722 722 HG2X\x00\x00\x00\x16\x0fb2x:changegroup\x00\x00\x00\x00\x00\x00\x00\x00\x06\x13\x00\x00\x00\xa42\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j_\xdd\xd9\x89W\xc8\xa5JMCm\xfe\x1d\xa9\xd8\x7f!\xa1\xb9{\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c (esc)
723 723 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02D (esc)
724 724 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01D\x00\x00\x00\xa4\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xcd\x01\x0b\x8c\xd9\x98\xf3\x98\x1aZ\x81\x15\xf9O\x8d\xa4\xabP`\x89\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)4dece9c826f69490507b98c6383a3009b295837d (esc)
725 725 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02E (esc)
726 726 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01E\x00\x00\x00\xa2\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)365b93d57fdf4814e2b5911d6bacff2b12014441 (esc)
727 727 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x00\x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01G\x00\x00\x00\xa4\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
728 728 \x87\xcd\xc9n\x8e\xaa\xb6$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
729 729 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)8bee48edc7318541fc0013ee41b089276a8c24bf (esc)
730 730 \x00\x00\x00f\x00\x00\x00f\x00\x00\x00\x02H (esc)
731 731 \x00\x00\x00g\x00\x00\x00h\x00\x00\x00\x01H\x00\x00\x00\x00\x00\x00\x00\x8bn\x1fLG\xec\xb53\xff\xd0\xc8\xe5,\xdc\x88\xaf\xb6\xcd9\xe2\x0cf\xa5\xa0\x18\x17\xfd\xf5#\x9c'8\x02\xb5\xb7a\x8d\x05\x1c\x89\xe4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+D\x00c3f1ca2924c16a19b0656a84900e504e5b0aec2d (esc)
732 732 \x00\x00\x00\x8bM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\x00}\x8c\x9d\x88\x84\x13%\xf5\xc6\xb0cq\xb3[N\x8a+\x1a\x83\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00+\x00\x00\x00\xac\x00\x00\x00+E\x009c6fd0350a6c0d0c49d4a9c5017cf07043f54e58 (esc)
733 733 \x00\x00\x00\x8b6[\x93\xd5\x7f\xdfH\x14\xe2\xb5\x91\x1dk\xac\xff+\x12\x01DA(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xceM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00V\x00\x00\x00V\x00\x00\x00+F\x0022bfcfd62a21a3287edbd4d656218d0f525ed76a (esc)
734 734 \x00\x00\x00\x97\x8b\xeeH\xed\xc71\x85A\xfc\x00\x13\xeeA\xb0\x89'j\x8c$\xbf(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
735 735 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00+\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+H\x008500189e74a9e0475e822093bc7db0d631aeb0b4 (esc)
736 736 \x00\x00\x00\x00\x00\x00\x00\x05D\x00\x00\x00b\xc3\xf1\xca)$\xc1j\x19\xb0ej\x84\x90\x0ePN[ (esc)
737 737 \xec-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02D (esc)
738 738 \x00\x00\x00\x00\x00\x00\x00\x05E\x00\x00\x00b\x9co\xd05 (esc)
739 739 l\r (no-eol) (esc)
740 740 \x0cI\xd4\xa9\xc5\x01|\xf0pC\xf5NX\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02E (esc)
741 741 \x00\x00\x00\x00\x00\x00\x00\x05H\x00\x00\x00b\x85\x00\x18\x9et\xa9\xe0G^\x82 \x93\xbc}\xb0\xd61\xae\xb0\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
742 742 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02H (esc)
743 743 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
744 744
745 745 $ hg unbundle2 < ../rev.hg2
746 746 adding changesets
747 747 adding manifests
748 748 adding file changes
749 749 added 0 changesets with 0 changes to 3 files
750 750 0 unread bytes
751 751 addchangegroup return: 1
752 752
753 753 with reply
754 754
755 755 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
756 756 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
757 757 0 unread bytes
758 758 addchangegroup return: 1
759 759
760 760 $ cat ../rev-reply.hg2
761 761 HG2X\x00\x00\x003\x15b2x:reply:changegroup\x00\x00\x00\x00\x00\x02\x0b\x01\x06\x01in-reply-to1return1\x00\x00\x00\x00\x00\x1f (esc)
762 762 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to1\x00\x00\x00dadding changesets (esc)
763 763 adding manifests
764 764 adding file changes
765 765 added 0 changesets with 0 changes to 3 files
766 766 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
767 767
768 768 $ cd ..
769 769
770 770 Real world exchange
771 771 =====================
772 772
773 773 Add more obsolescence information
774 774
775 775 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
776 776 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
777 777
778 778 clone --pull
779 779
780 780 $ hg -R main phase --public cd010b8cd998
781 781 $ hg clone main other --pull --rev 9520eea781bc
782 782 adding changesets
783 783 adding manifests
784 784 adding file changes
785 785 added 2 changesets with 2 changes to 2 files
786 1 new obsolescence markers
786 787 updating to branch default
787 788 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
788 789 $ hg -R other log -G
789 790 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
790 791 |
791 792 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
792 793
793 794 $ hg -R other debugobsolete
794 795 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
795 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
796 796
797 797 pull
798 798
799 799 $ hg -R main phase --public 9520eea781bc
800 800 $ hg -R other pull -r 24b6387c8c8c
801 801 pulling from $TESTTMP/main (glob)
802 802 searching for changes
803 803 adding changesets
804 804 adding manifests
805 805 adding file changes
806 806 added 1 changesets with 1 changes to 1 files (+1 heads)
807 1 new obsolescence markers
807 808 (run 'hg heads' to see heads, 'hg merge' to merge)
808 809 $ hg -R other log -G
809 810 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
810 811 |
811 812 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
812 813 |/
813 814 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
814 815
815 816 $ hg -R other debugobsolete
816 817 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
817 818 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
818 819
819 820 pull empty (with phase movement)
820 821
821 822 $ hg -R main phase --public 24b6387c8c8c
822 823 $ hg -R other pull -r 24b6387c8c8c
823 824 pulling from $TESTTMP/main (glob)
824 825 no changes found
825 826 $ hg -R other log -G
826 827 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
827 828 |
828 829 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
829 830 |/
830 831 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
831 832
832 833 $ hg -R other debugobsolete
833 834 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
834 835 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
835 836
836 837 pull empty
837 838
838 839 $ hg -R other pull -r 24b6387c8c8c
839 840 pulling from $TESTTMP/main (glob)
840 841 no changes found
841 842 $ hg -R other log -G
842 843 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
843 844 |
844 845 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
845 846 |/
846 847 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
847 848
848 849 $ hg -R other debugobsolete
849 850 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
850 851 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
851 852
852 853 add extra data to test their exchange during push
853 854
854 855 $ hg -R main bookmark --rev eea13746799a book_eea1
855 856 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
856 857 $ hg -R main bookmark --rev 02de42196ebe book_02de
857 858 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
858 859 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
859 860 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
860 861 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
861 862 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
862 863 $ hg -R main bookmark --rev 32af7686d403 book_32af
863 864 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
864 865
865 866 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
866 867 $ hg -R other bookmark --rev cd010b8cd998 book_02de
867 868 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
868 869 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
869 870 $ hg -R other bookmark --rev cd010b8cd998 book_32af
870 871
871 872 $ hg -R main phase --public eea13746799a
872 873
873 874 push
874 875 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
875 876 pushing to other
876 877 searching for changes
877 878 remote: adding changesets
878 879 remote: adding manifests
879 880 remote: adding file changes
880 881 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
881 882 remote: 1 new obsolescence markers
882 883 updating bookmark book_eea1
883 884 exporting bookmark book_eea1
884 885 $ hg -R other log -G
885 886 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
886 887 |\
887 888 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
888 889 | |
889 890 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
890 891 |/
891 892 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
892 893
893 894 $ hg -R other debugobsolete
894 895 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
895 896 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
896 897 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
897 898
898 899 pull over ssh
899 900
900 901 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
901 902 pulling from ssh://user@dummy/main
902 903 searching for changes
903 904 adding changesets
904 905 adding manifests
905 906 adding file changes
906 907 added 1 changesets with 1 changes to 1 files (+1 heads)
908 1 new obsolescence markers
907 909 updating bookmark book_02de
908 910 (run 'hg heads' to see heads, 'hg merge' to merge)
909 911 importing bookmark book_02de
910 912 $ hg -R other debugobsolete
911 913 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
912 914 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
913 915 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
914 916 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
915 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
916 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
917 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
918 917
919 918 pull over http
920 919
921 920 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
922 921 $ cat main.pid >> $DAEMON_PIDS
923 922
924 923 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
925 924 pulling from http://localhost:$HGPORT/
926 925 searching for changes
927 926 adding changesets
928 927 adding manifests
929 928 adding file changes
930 929 added 1 changesets with 1 changes to 1 files (+1 heads)
930 1 new obsolescence markers
931 931 updating bookmark book_42cc
932 932 (run 'hg heads .' to see heads, 'hg merge' to merge)
933 933 importing bookmark book_42cc
934 934 $ cat main-error.log
935 935 $ hg -R other debugobsolete
936 936 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
937 937 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
938 938 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
939 939 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
940 940 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
941 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
942 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
943 941
944 942 push over ssh
945 943
946 944 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
947 945 pushing to ssh://user@dummy/other
948 946 searching for changes
949 947 remote: adding changesets
950 948 remote: adding manifests
951 949 remote: adding file changes
952 950 remote: added 1 changesets with 1 changes to 1 files
951 remote: 1 new obsolescence markers
953 952 updating bookmark book_5fdd
954 953 exporting bookmark book_5fdd
955 954 $ hg -R other log -G
956 955 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
957 956 |
958 957 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
959 958 |
960 959 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
961 960 | |
962 961 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
963 962 | |/|
964 963 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
965 964 |/ /
966 965 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
967 966 |/
968 967 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
969 968
970 969 $ hg -R other debugobsolete
971 970 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
972 971 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
973 972 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
974 973 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
975 974 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
976 975 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
977 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
978 976
979 977 push over http
980 978
981 979 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
982 980 $ cat other.pid >> $DAEMON_PIDS
983 981
984 982 $ hg -R main phase --public 32af7686d403
985 983 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
986 984 pushing to http://localhost:$HGPORT2/
987 985 searching for changes
988 986 remote: adding changesets
989 987 remote: adding manifests
990 988 remote: adding file changes
991 989 remote: added 1 changesets with 1 changes to 1 files
990 remote: 1 new obsolescence markers
992 991 updating bookmark book_32af
993 992 exporting bookmark book_32af
994 993 $ cat other-error.log
995 994
996 995 Check final content.
997 996
998 997 $ hg -R other log -G
999 998 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
1000 999 |
1001 1000 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
1002 1001 |
1003 1002 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
1004 1003 |
1005 1004 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
1006 1005 | |
1007 1006 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
1008 1007 | |/|
1009 1008 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
1010 1009 |/ /
1011 1010 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
1012 1011 |/
1013 1012 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
1014 1013
1015 1014 $ hg -R other debugobsolete
1016 1015 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1017 1016 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1018 1017 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1019 1018 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1020 1019 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1021 1020 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1022 1021 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1023 1022
1024 1023 Error Handling
1025 1024 ==============
1026 1025
1027 1026 Check that errors are properly returned to the client during push.
1028 1027
1029 1028 Setting up
1030 1029
1031 1030 $ cat > failpush.py << EOF
1032 1031 > """A small extension that makes push fails when using bundle2
1033 1032 >
1034 1033 > used to test error handling in bundle2
1035 1034 > """
1036 1035 >
1037 1036 > from mercurial import util
1038 1037 > from mercurial import bundle2
1039 1038 > from mercurial import exchange
1040 1039 > from mercurial import extensions
1041 1040 >
1042 1041 > def _pushbundle2failpart(pushop, bundler):
1043 1042 > reason = pushop.ui.config('failpush', 'reason', None)
1044 1043 > part = None
1045 1044 > if reason == 'abort':
1046 1045 > bundler.newpart('test:abort')
1047 1046 > if reason == 'unknown':
1048 1047 > bundler.newpart('TEST:UNKNOWN')
1049 1048 > if reason == 'race':
1050 1049 > # 20 Bytes of crap
1051 1050 > bundler.newpart('b2x:check:heads', data='01234567890123456789')
1052 1051 >
1053 1052 > @bundle2.parthandler("test:abort")
1054 1053 > def handleabort(op, part):
1055 1054 > raise util.Abort('Abandon ship!', hint="don't panic")
1056 1055 >
1057 1056 > def uisetup(ui):
1058 1057 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
1059 1058 > exchange.b2partsgenorder.insert(0, 'failpart')
1060 1059 >
1061 1060 > EOF
1062 1061
1063 1062 $ cd main
1064 1063 $ hg up tip
1065 1064 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
1066 1065 $ echo 'I' > I
1067 1066 $ hg add I
1068 1067 $ hg ci -m 'I'
1069 1068 $ hg id
1070 1069 e7ec4e813ba6 tip
1071 1070 $ cd ..
1072 1071
1073 1072 $ cat << EOF >> $HGRCPATH
1074 1073 > [extensions]
1075 1074 > failpush=$TESTTMP/failpush.py
1076 1075 > EOF
1077 1076
1078 1077 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
1079 1078 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
1080 1079 $ cat other.pid >> $DAEMON_PIDS
1081 1080
1082 1081 Doing the actual push: Abort error
1083 1082
1084 1083 $ cat << EOF >> $HGRCPATH
1085 1084 > [failpush]
1086 1085 > reason = abort
1087 1086 > EOF
1088 1087
1089 1088 $ hg -R main push other -r e7ec4e813ba6
1090 1089 pushing to other
1091 1090 searching for changes
1092 1091 abort: Abandon ship!
1093 1092 (don't panic)
1094 1093 [255]
1095 1094
1096 1095 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1097 1096 pushing to ssh://user@dummy/other
1098 1097 searching for changes
1099 1098 abort: Abandon ship!
1100 1099 (don't panic)
1101 1100 [255]
1102 1101
1103 1102 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1104 1103 pushing to http://localhost:$HGPORT2/
1105 1104 searching for changes
1106 1105 abort: Abandon ship!
1107 1106 (don't panic)
1108 1107 [255]
1109 1108
1110 1109
1111 1110 Doing the actual push: unknown mandatory parts
1112 1111
1113 1112 $ cat << EOF >> $HGRCPATH
1114 1113 > [failpush]
1115 1114 > reason = unknown
1116 1115 > EOF
1117 1116
1118 1117 $ hg -R main push other -r e7ec4e813ba6
1119 1118 pushing to other
1120 1119 searching for changes
1121 1120 abort: missing support for test:unknown
1122 1121 [255]
1123 1122
1124 1123 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1125 1124 pushing to ssh://user@dummy/other
1126 1125 searching for changes
1127 1126 abort: missing support for test:unknown
1128 1127 [255]
1129 1128
1130 1129 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1131 1130 pushing to http://localhost:$HGPORT2/
1132 1131 searching for changes
1133 1132 abort: missing support for test:unknown
1134 1133 [255]
1135 1134
1136 1135 Doing the actual push: race
1137 1136
1138 1137 $ cat << EOF >> $HGRCPATH
1139 1138 > [failpush]
1140 1139 > reason = race
1141 1140 > EOF
1142 1141
1143 1142 $ hg -R main push other -r e7ec4e813ba6
1144 1143 pushing to other
1145 1144 searching for changes
1146 1145 abort: push failed:
1147 1146 'repository changed while pushing - please try again'
1148 1147 [255]
1149 1148
1150 1149 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1151 1150 pushing to ssh://user@dummy/other
1152 1151 searching for changes
1153 1152 abort: push failed:
1154 1153 'repository changed while pushing - please try again'
1155 1154 [255]
1156 1155
1157 1156 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1158 1157 pushing to http://localhost:$HGPORT2/
1159 1158 searching for changes
1160 1159 abort: push failed:
1161 1160 'repository changed while pushing - please try again'
1162 1161 [255]
1163 1162
1164 1163 Doing the actual push: hook abort
1165 1164
1166 1165 $ cat << EOF >> $HGRCPATH
1167 1166 > [failpush]
1168 1167 > reason =
1169 1168 > [hooks]
1170 1169 > b2x-pretransactionclose.failpush = false
1171 1170 > EOF
1172 1171
1173 1172 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
1174 1173 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
1175 1174 $ cat other.pid >> $DAEMON_PIDS
1176 1175
1177 1176 $ hg -R main push other -r e7ec4e813ba6
1178 1177 pushing to other
1179 1178 searching for changes
1180 1179 transaction abort!
1181 1180 rollback completed
1182 1181 abort: b2x-pretransactionclose.failpush hook exited with status 1
1183 1182 [255]
1184 1183
1185 1184 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1186 1185 pushing to ssh://user@dummy/other
1187 1186 searching for changes
1188 1187 abort: b2x-pretransactionclose.failpush hook exited with status 1
1189 1188 remote: transaction abort!
1190 1189 remote: rollback completed
1191 1190 [255]
1192 1191
1193 1192 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1194 1193 pushing to http://localhost:$HGPORT2/
1195 1194 searching for changes
1196 1195 abort: b2x-pretransactionclose.failpush hook exited with status 1
1197 1196 [255]
1198 1197
1199 1198
General Comments 0
You need to be logged in to leave comments. Login now