##// END OF EJS Templates
pull: add source information to the transaction...
Pierre-Yves David -
r22972:44b16b59 default
parent child Browse files
Show More
@@ -1,1257 +1,1259 b''
1 1 # exchange.py - utility to exchange data between repos.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 from node import hex, nullid
10 10 import errno, urllib
11 11 import util, scmutil, changegroup, base85, error
12 12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
13 13
14 14 def readbundle(ui, fh, fname, vfs=None):
15 15 header = changegroup.readexactly(fh, 4)
16 16
17 17 alg = None
18 18 if not fname:
19 19 fname = "stream"
20 20 if not header.startswith('HG') and header.startswith('\0'):
21 21 fh = changegroup.headerlessfixup(fh, header)
22 22 header = "HG10"
23 23 alg = 'UN'
24 24 elif vfs:
25 25 fname = vfs.join(fname)
26 26
27 27 magic, version = header[0:2], header[2:4]
28 28
29 29 if magic != 'HG':
30 30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 31 if version == '10':
32 32 if alg is None:
33 33 alg = changegroup.readexactly(fh, 2)
34 34 return changegroup.cg1unpacker(fh, alg)
35 35 elif version == '2X':
36 36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 37 else:
38 38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39 39
40 40 def buildobsmarkerspart(bundler, markers):
41 41 """add an obsmarker part to the bundler with <markers>
42 42
43 43 No part is created if markers is empty.
44 44 Raises ValueError if the bundler doesn't support any known obsmarker format.
45 45 """
46 46 if markers:
47 47 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
48 48 version = obsolete.commonversion(remoteversions)
49 49 if version is None:
50 50 raise ValueError('bundler do not support common obsmarker format')
51 51 stream = obsolete.encodemarkers(markers, True, version=version)
52 52 return bundler.newpart('B2X:OBSMARKERS', data=stream)
53 53 return None
54 54
55 55 class pushoperation(object):
56 56 """A object that represent a single push operation
57 57
58 58 It purpose is to carry push related state and very common operation.
59 59
60 60 A new should be created at the beginning of each push and discarded
61 61 afterward.
62 62 """
63 63
64 64 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
65 65 bookmarks=()):
66 66 # repo we push from
67 67 self.repo = repo
68 68 self.ui = repo.ui
69 69 # repo we push to
70 70 self.remote = remote
71 71 # force option provided
72 72 self.force = force
73 73 # revs to be pushed (None is "all")
74 74 self.revs = revs
75 75 # bookmark explicitly pushed
76 76 self.bookmarks = bookmarks
77 77 # allow push of new branch
78 78 self.newbranch = newbranch
79 79 # did a local lock get acquired?
80 80 self.locallocked = None
81 81 # step already performed
82 82 # (used to check what steps have been already performed through bundle2)
83 83 self.stepsdone = set()
84 84 # Integer version of the changegroup push result
85 85 # - None means nothing to push
86 86 # - 0 means HTTP error
87 87 # - 1 means we pushed and remote head count is unchanged *or*
88 88 # we have outgoing changesets but refused to push
89 89 # - other values as described by addchangegroup()
90 90 self.cgresult = None
91 91 # Boolean value for the bookmark push
92 92 self.bkresult = None
93 93 # discover.outgoing object (contains common and outgoing data)
94 94 self.outgoing = None
95 95 # all remote heads before the push
96 96 self.remoteheads = None
97 97 # testable as a boolean indicating if any nodes are missing locally.
98 98 self.incoming = None
99 99 # phases changes that must be pushed along side the changesets
100 100 self.outdatedphases = None
101 101 # phases changes that must be pushed if changeset push fails
102 102 self.fallbackoutdatedphases = None
103 103 # outgoing obsmarkers
104 104 self.outobsmarkers = set()
105 105 # outgoing bookmarks
106 106 self.outbookmarks = []
107 107
108 108 @util.propertycache
109 109 def futureheads(self):
110 110 """future remote heads if the changeset push succeeds"""
111 111 return self.outgoing.missingheads
112 112
113 113 @util.propertycache
114 114 def fallbackheads(self):
115 115 """future remote heads if the changeset push fails"""
116 116 if self.revs is None:
117 117 # not target to push, all common are relevant
118 118 return self.outgoing.commonheads
119 119 unfi = self.repo.unfiltered()
120 120 # I want cheads = heads(::missingheads and ::commonheads)
121 121 # (missingheads is revs with secret changeset filtered out)
122 122 #
123 123 # This can be expressed as:
124 124 # cheads = ( (missingheads and ::commonheads)
125 125 # + (commonheads and ::missingheads))"
126 126 # )
127 127 #
128 128 # while trying to push we already computed the following:
129 129 # common = (::commonheads)
130 130 # missing = ((commonheads::missingheads) - commonheads)
131 131 #
132 132 # We can pick:
133 133 # * missingheads part of common (::commonheads)
134 134 common = set(self.outgoing.common)
135 135 nm = self.repo.changelog.nodemap
136 136 cheads = [node for node in self.revs if nm[node] in common]
137 137 # and
138 138 # * commonheads parents on missing
139 139 revset = unfi.set('%ln and parents(roots(%ln))',
140 140 self.outgoing.commonheads,
141 141 self.outgoing.missing)
142 142 cheads.extend(c.node() for c in revset)
143 143 return cheads
144 144
145 145 @property
146 146 def commonheads(self):
147 147 """set of all common heads after changeset bundle push"""
148 148 if self.cgresult:
149 149 return self.futureheads
150 150 else:
151 151 return self.fallbackheads
152 152
153 153 # mapping of message used when pushing bookmark
154 154 bookmsgmap = {'update': (_("updating bookmark %s\n"),
155 155 _('updating bookmark %s failed!\n')),
156 156 'export': (_("exporting bookmark %s\n"),
157 157 _('exporting bookmark %s failed!\n')),
158 158 'delete': (_("deleting remote bookmark %s\n"),
159 159 _('deleting remote bookmark %s failed!\n')),
160 160 }
161 161
162 162
163 163 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
164 164 '''Push outgoing changesets (limited by revs) from a local
165 165 repository to remote. Return an integer:
166 166 - None means nothing to push
167 167 - 0 means HTTP error
168 168 - 1 means we pushed and remote head count is unchanged *or*
169 169 we have outgoing changesets but refused to push
170 170 - other values as described by addchangegroup()
171 171 '''
172 172 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
173 173 if pushop.remote.local():
174 174 missing = (set(pushop.repo.requirements)
175 175 - pushop.remote.local().supported)
176 176 if missing:
177 177 msg = _("required features are not"
178 178 " supported in the destination:"
179 179 " %s") % (', '.join(sorted(missing)))
180 180 raise util.Abort(msg)
181 181
182 182 # there are two ways to push to remote repo:
183 183 #
184 184 # addchangegroup assumes local user can lock remote
185 185 # repo (local filesystem, old ssh servers).
186 186 #
187 187 # unbundle assumes local user cannot lock remote repo (new ssh
188 188 # servers, http servers).
189 189
190 190 if not pushop.remote.canpush():
191 191 raise util.Abort(_("destination does not support push"))
192 192 # get local lock as we might write phase data
193 193 locallock = None
194 194 try:
195 195 locallock = pushop.repo.lock()
196 196 pushop.locallocked = True
197 197 except IOError, err:
198 198 pushop.locallocked = False
199 199 if err.errno != errno.EACCES:
200 200 raise
201 201 # source repo cannot be locked.
202 202 # We do not abort the push, but just disable the local phase
203 203 # synchronisation.
204 204 msg = 'cannot lock source repository: %s\n' % err
205 205 pushop.ui.debug(msg)
206 206 try:
207 207 pushop.repo.checkpush(pushop)
208 208 lock = None
209 209 unbundle = pushop.remote.capable('unbundle')
210 210 if not unbundle:
211 211 lock = pushop.remote.lock()
212 212 try:
213 213 _pushdiscovery(pushop)
214 214 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
215 215 False)
216 216 and pushop.remote.capable('bundle2-exp')):
217 217 _pushbundle2(pushop)
218 218 _pushchangeset(pushop)
219 219 _pushsyncphase(pushop)
220 220 _pushobsolete(pushop)
221 221 _pushbookmark(pushop)
222 222 finally:
223 223 if lock is not None:
224 224 lock.release()
225 225 finally:
226 226 if locallock is not None:
227 227 locallock.release()
228 228
229 229 return pushop
230 230
231 231 # list of steps to perform discovery before push
232 232 pushdiscoveryorder = []
233 233
234 234 # Mapping between step name and function
235 235 #
236 236 # This exists to help extensions wrap steps if necessary
237 237 pushdiscoverymapping = {}
238 238
239 239 def pushdiscovery(stepname):
240 240 """decorator for function performing discovery before push
241 241
242 242 The function is added to the step -> function mapping and appended to the
243 243 list of steps. Beware that decorated function will be added in order (this
244 244 may matter).
245 245
246 246 You can only use this decorator for a new step, if you want to wrap a step
247 247 from an extension, change the pushdiscovery dictionary directly."""
248 248 def dec(func):
249 249 assert stepname not in pushdiscoverymapping
250 250 pushdiscoverymapping[stepname] = func
251 251 pushdiscoveryorder.append(stepname)
252 252 return func
253 253 return dec
254 254
255 255 def _pushdiscovery(pushop):
256 256 """Run all discovery steps"""
257 257 for stepname in pushdiscoveryorder:
258 258 step = pushdiscoverymapping[stepname]
259 259 step(pushop)
260 260
261 261 @pushdiscovery('changeset')
262 262 def _pushdiscoverychangeset(pushop):
263 263 """discover the changeset that need to be pushed"""
264 264 unfi = pushop.repo.unfiltered()
265 265 fci = discovery.findcommonincoming
266 266 commoninc = fci(unfi, pushop.remote, force=pushop.force)
267 267 common, inc, remoteheads = commoninc
268 268 fco = discovery.findcommonoutgoing
269 269 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
270 270 commoninc=commoninc, force=pushop.force)
271 271 pushop.outgoing = outgoing
272 272 pushop.remoteheads = remoteheads
273 273 pushop.incoming = inc
274 274
275 275 @pushdiscovery('phase')
276 276 def _pushdiscoveryphase(pushop):
277 277 """discover the phase that needs to be pushed
278 278
279 279 (computed for both success and failure case for changesets push)"""
280 280 outgoing = pushop.outgoing
281 281 unfi = pushop.repo.unfiltered()
282 282 remotephases = pushop.remote.listkeys('phases')
283 283 publishing = remotephases.get('publishing', False)
284 284 ana = phases.analyzeremotephases(pushop.repo,
285 285 pushop.fallbackheads,
286 286 remotephases)
287 287 pheads, droots = ana
288 288 extracond = ''
289 289 if not publishing:
290 290 extracond = ' and public()'
291 291 revset = 'heads((%%ln::%%ln) %s)' % extracond
292 292 # Get the list of all revs draft on remote by public here.
293 293 # XXX Beware that revset break if droots is not strictly
294 294 # XXX root we may want to ensure it is but it is costly
295 295 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
296 296 if not outgoing.missing:
297 297 future = fallback
298 298 else:
299 299 # adds changeset we are going to push as draft
300 300 #
301 301 # should not be necessary for pushblishing server, but because of an
302 302 # issue fixed in xxxxx we have to do it anyway.
303 303 fdroots = list(unfi.set('roots(%ln + %ln::)',
304 304 outgoing.missing, droots))
305 305 fdroots = [f.node() for f in fdroots]
306 306 future = list(unfi.set(revset, fdroots, pushop.futureheads))
307 307 pushop.outdatedphases = future
308 308 pushop.fallbackoutdatedphases = fallback
309 309
310 310 @pushdiscovery('obsmarker')
311 311 def _pushdiscoveryobsmarkers(pushop):
312 312 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
313 313 and pushop.repo.obsstore
314 314 and 'obsolete' in pushop.remote.listkeys('namespaces')):
315 315 repo = pushop.repo
316 316 # very naive computation, that can be quite expensive on big repo.
317 317 # However: evolution is currently slow on them anyway.
318 318 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
319 319 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
320 320
321 321 @pushdiscovery('bookmarks')
322 322 def _pushdiscoverybookmarks(pushop):
323 323 ui = pushop.ui
324 324 repo = pushop.repo.unfiltered()
325 325 remote = pushop.remote
326 326 ui.debug("checking for updated bookmarks\n")
327 327 ancestors = ()
328 328 if pushop.revs:
329 329 revnums = map(repo.changelog.rev, pushop.revs)
330 330 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
331 331 remotebookmark = remote.listkeys('bookmarks')
332 332
333 333 explicit = set(pushop.bookmarks)
334 334
335 335 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
336 336 addsrc, adddst, advsrc, advdst, diverge, differ, invalid = comp
337 337 for b, scid, dcid in advsrc:
338 338 if b in explicit:
339 339 explicit.remove(b)
340 340 if not ancestors or repo[scid].rev() in ancestors:
341 341 pushop.outbookmarks.append((b, dcid, scid))
342 342 # search added bookmark
343 343 for b, scid, dcid in addsrc:
344 344 if b in explicit:
345 345 explicit.remove(b)
346 346 pushop.outbookmarks.append((b, '', scid))
347 347 # search for overwritten bookmark
348 348 for b, scid, dcid in advdst + diverge + differ:
349 349 if b in explicit:
350 350 explicit.remove(b)
351 351 pushop.outbookmarks.append((b, dcid, scid))
352 352 # search for bookmark to delete
353 353 for b, scid, dcid in adddst:
354 354 if b in explicit:
355 355 explicit.remove(b)
356 356 # treat as "deleted locally"
357 357 pushop.outbookmarks.append((b, dcid, ''))
358 358
359 359 if explicit:
360 360 explicit = sorted(explicit)
361 361 # we should probably list all of them
362 362 ui.warn(_('bookmark %s does not exist on the local '
363 363 'or remote repository!\n') % explicit[0])
364 364 pushop.bkresult = 2
365 365
366 366 pushop.outbookmarks.sort()
367 367
368 368 def _pushcheckoutgoing(pushop):
369 369 outgoing = pushop.outgoing
370 370 unfi = pushop.repo.unfiltered()
371 371 if not outgoing.missing:
372 372 # nothing to push
373 373 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
374 374 return False
375 375 # something to push
376 376 if not pushop.force:
377 377 # if repo.obsstore == False --> no obsolete
378 378 # then, save the iteration
379 379 if unfi.obsstore:
380 380 # this message are here for 80 char limit reason
381 381 mso = _("push includes obsolete changeset: %s!")
382 382 mst = {"unstable": _("push includes unstable changeset: %s!"),
383 383 "bumped": _("push includes bumped changeset: %s!"),
384 384 "divergent": _("push includes divergent changeset: %s!")}
385 385 # If we are to push if there is at least one
386 386 # obsolete or unstable changeset in missing, at
387 387 # least one of the missinghead will be obsolete or
388 388 # unstable. So checking heads only is ok
389 389 for node in outgoing.missingheads:
390 390 ctx = unfi[node]
391 391 if ctx.obsolete():
392 392 raise util.Abort(mso % ctx)
393 393 elif ctx.troubled():
394 394 raise util.Abort(mst[ctx.troubles()[0]] % ctx)
395 395 newbm = pushop.ui.configlist('bookmarks', 'pushing')
396 396 discovery.checkheads(unfi, pushop.remote, outgoing,
397 397 pushop.remoteheads,
398 398 pushop.newbranch,
399 399 bool(pushop.incoming),
400 400 newbm)
401 401 return True
402 402
403 403 # List of names of steps to perform for an outgoing bundle2, order matters.
404 404 b2partsgenorder = []
405 405
406 406 # Mapping between step name and function
407 407 #
408 408 # This exists to help extensions wrap steps if necessary
409 409 b2partsgenmapping = {}
410 410
411 411 def b2partsgenerator(stepname):
412 412 """decorator for function generating bundle2 part
413 413
414 414 The function is added to the step -> function mapping and appended to the
415 415 list of steps. Beware that decorated functions will be added in order
416 416 (this may matter).
417 417
418 418 You can only use this decorator for new steps, if you want to wrap a step
419 419 from an extension, attack the b2partsgenmapping dictionary directly."""
420 420 def dec(func):
421 421 assert stepname not in b2partsgenmapping
422 422 b2partsgenmapping[stepname] = func
423 423 b2partsgenorder.append(stepname)
424 424 return func
425 425 return dec
426 426
427 427 @b2partsgenerator('changeset')
428 428 def _pushb2ctx(pushop, bundler):
429 429 """handle changegroup push through bundle2
430 430
431 431 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
432 432 """
433 433 if 'changesets' in pushop.stepsdone:
434 434 return
435 435 pushop.stepsdone.add('changesets')
436 436 # Send known heads to the server for race detection.
437 437 if not _pushcheckoutgoing(pushop):
438 438 return
439 439 pushop.repo.prepushoutgoinghooks(pushop.repo,
440 440 pushop.remote,
441 441 pushop.outgoing)
442 442 if not pushop.force:
443 443 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
444 444 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', pushop.outgoing)
445 445 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
446 446 def handlereply(op):
447 447 """extract addchangroup returns from server reply"""
448 448 cgreplies = op.records.getreplies(cgpart.id)
449 449 assert len(cgreplies['changegroup']) == 1
450 450 pushop.cgresult = cgreplies['changegroup'][0]['return']
451 451 return handlereply
452 452
453 453 @b2partsgenerator('phase')
454 454 def _pushb2phases(pushop, bundler):
455 455 """handle phase push through bundle2"""
456 456 if 'phases' in pushop.stepsdone:
457 457 return
458 458 b2caps = bundle2.bundle2caps(pushop.remote)
459 459 if not 'b2x:pushkey' in b2caps:
460 460 return
461 461 pushop.stepsdone.add('phases')
462 462 part2node = []
463 463 enc = pushkey.encode
464 464 for newremotehead in pushop.outdatedphases:
465 465 part = bundler.newpart('b2x:pushkey')
466 466 part.addparam('namespace', enc('phases'))
467 467 part.addparam('key', enc(newremotehead.hex()))
468 468 part.addparam('old', enc(str(phases.draft)))
469 469 part.addparam('new', enc(str(phases.public)))
470 470 part2node.append((part.id, newremotehead))
471 471 def handlereply(op):
472 472 for partid, node in part2node:
473 473 partrep = op.records.getreplies(partid)
474 474 results = partrep['pushkey']
475 475 assert len(results) <= 1
476 476 msg = None
477 477 if not results:
478 478 msg = _('server ignored update of %s to public!\n') % node
479 479 elif not int(results[0]['return']):
480 480 msg = _('updating %s to public failed!\n') % node
481 481 if msg is not None:
482 482 pushop.ui.warn(msg)
483 483 return handlereply
484 484
485 485 @b2partsgenerator('obsmarkers')
486 486 def _pushb2obsmarkers(pushop, bundler):
487 487 if 'obsmarkers' in pushop.stepsdone:
488 488 return
489 489 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
490 490 if obsolete.commonversion(remoteversions) is None:
491 491 return
492 492 pushop.stepsdone.add('obsmarkers')
493 493 if pushop.outobsmarkers:
494 494 buildobsmarkerspart(bundler, pushop.outobsmarkers)
495 495
496 496 @b2partsgenerator('bookmarks')
497 497 def _pushb2bookmarks(pushop, bundler):
498 498 """handle phase push through bundle2"""
499 499 if 'bookmarks' in pushop.stepsdone:
500 500 return
501 501 b2caps = bundle2.bundle2caps(pushop.remote)
502 502 if 'b2x:pushkey' not in b2caps:
503 503 return
504 504 pushop.stepsdone.add('bookmarks')
505 505 part2book = []
506 506 enc = pushkey.encode
507 507 for book, old, new in pushop.outbookmarks:
508 508 part = bundler.newpart('b2x:pushkey')
509 509 part.addparam('namespace', enc('bookmarks'))
510 510 part.addparam('key', enc(book))
511 511 part.addparam('old', enc(old))
512 512 part.addparam('new', enc(new))
513 513 action = 'update'
514 514 if not old:
515 515 action = 'export'
516 516 elif not new:
517 517 action = 'delete'
518 518 part2book.append((part.id, book, action))
519 519
520 520
521 521 def handlereply(op):
522 522 ui = pushop.ui
523 523 for partid, book, action in part2book:
524 524 partrep = op.records.getreplies(partid)
525 525 results = partrep['pushkey']
526 526 assert len(results) <= 1
527 527 if not results:
528 528 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
529 529 else:
530 530 ret = int(results[0]['return'])
531 531 if ret:
532 532 ui.status(bookmsgmap[action][0] % book)
533 533 else:
534 534 ui.warn(bookmsgmap[action][1] % book)
535 535 if pushop.bkresult is not None:
536 536 pushop.bkresult = 1
537 537 return handlereply
538 538
539 539
540 540 def _pushbundle2(pushop):
541 541 """push data to the remote using bundle2
542 542
543 543 The only currently supported type of data is changegroup but this will
544 544 evolve in the future."""
545 545 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
546 546 # create reply capability
547 547 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
548 548 bundler.newpart('b2x:replycaps', data=capsblob)
549 549 replyhandlers = []
550 550 for partgenname in b2partsgenorder:
551 551 partgen = b2partsgenmapping[partgenname]
552 552 ret = partgen(pushop, bundler)
553 553 if callable(ret):
554 554 replyhandlers.append(ret)
555 555 # do not push if nothing to push
556 556 if bundler.nbparts <= 1:
557 557 return
558 558 stream = util.chunkbuffer(bundler.getchunks())
559 559 try:
560 560 reply = pushop.remote.unbundle(stream, ['force'], 'push')
561 561 except error.BundleValueError, exc:
562 562 raise util.Abort('missing support for %s' % exc)
563 563 try:
564 564 op = bundle2.processbundle(pushop.repo, reply)
565 565 except error.BundleValueError, exc:
566 566 raise util.Abort('missing support for %s' % exc)
567 567 for rephand in replyhandlers:
568 568 rephand(op)
569 569
570 570 def _pushchangeset(pushop):
571 571 """Make the actual push of changeset bundle to remote repo"""
572 572 if 'changesets' in pushop.stepsdone:
573 573 return
574 574 pushop.stepsdone.add('changesets')
575 575 if not _pushcheckoutgoing(pushop):
576 576 return
577 577 pushop.repo.prepushoutgoinghooks(pushop.repo,
578 578 pushop.remote,
579 579 pushop.outgoing)
580 580 outgoing = pushop.outgoing
581 581 unbundle = pushop.remote.capable('unbundle')
582 582 # TODO: get bundlecaps from remote
583 583 bundlecaps = None
584 584 # create a changegroup from local
585 585 if pushop.revs is None and not (outgoing.excluded
586 586 or pushop.repo.changelog.filteredrevs):
587 587 # push everything,
588 588 # use the fast path, no race possible on push
589 589 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
590 590 cg = changegroup.getsubset(pushop.repo,
591 591 outgoing,
592 592 bundler,
593 593 'push',
594 594 fastpath=True)
595 595 else:
596 596 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
597 597 bundlecaps)
598 598
599 599 # apply changegroup to remote
600 600 if unbundle:
601 601 # local repo finds heads on server, finds out what
602 602 # revs it must push. once revs transferred, if server
603 603 # finds it has different heads (someone else won
604 604 # commit/push race), server aborts.
605 605 if pushop.force:
606 606 remoteheads = ['force']
607 607 else:
608 608 remoteheads = pushop.remoteheads
609 609 # ssh: return remote's addchangegroup()
610 610 # http: return remote's addchangegroup() or 0 for error
611 611 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
612 612 pushop.repo.url())
613 613 else:
614 614 # we return an integer indicating remote head count
615 615 # change
616 616 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
617 617 pushop.repo.url())
618 618
619 619 def _pushsyncphase(pushop):
620 620 """synchronise phase information locally and remotely"""
621 621 cheads = pushop.commonheads
622 622 # even when we don't push, exchanging phase data is useful
623 623 remotephases = pushop.remote.listkeys('phases')
624 624 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
625 625 and remotephases # server supports phases
626 626 and pushop.cgresult is None # nothing was pushed
627 627 and remotephases.get('publishing', False)):
628 628 # When:
629 629 # - this is a subrepo push
630 630 # - and remote support phase
631 631 # - and no changeset was pushed
632 632 # - and remote is publishing
633 633 # We may be in issue 3871 case!
634 634 # We drop the possible phase synchronisation done by
635 635 # courtesy to publish changesets possibly locally draft
636 636 # on the remote.
637 637 remotephases = {'publishing': 'True'}
638 638 if not remotephases: # old server or public only reply from non-publishing
639 639 _localphasemove(pushop, cheads)
640 640 # don't push any phase data as there is nothing to push
641 641 else:
642 642 ana = phases.analyzeremotephases(pushop.repo, cheads,
643 643 remotephases)
644 644 pheads, droots = ana
645 645 ### Apply remote phase on local
646 646 if remotephases.get('publishing', False):
647 647 _localphasemove(pushop, cheads)
648 648 else: # publish = False
649 649 _localphasemove(pushop, pheads)
650 650 _localphasemove(pushop, cheads, phases.draft)
651 651 ### Apply local phase on remote
652 652
653 653 if pushop.cgresult:
654 654 if 'phases' in pushop.stepsdone:
655 655 # phases already pushed though bundle2
656 656 return
657 657 outdated = pushop.outdatedphases
658 658 else:
659 659 outdated = pushop.fallbackoutdatedphases
660 660
661 661 pushop.stepsdone.add('phases')
662 662
663 663 # filter heads already turned public by the push
664 664 outdated = [c for c in outdated if c.node() not in pheads]
665 665 b2caps = bundle2.bundle2caps(pushop.remote)
666 666 if 'b2x:pushkey' in b2caps:
667 667 # server supports bundle2, let's do a batched push through it
668 668 #
669 669 # This will eventually be unified with the changesets bundle2 push
670 670 bundler = bundle2.bundle20(pushop.ui, b2caps)
671 671 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
672 672 bundler.newpart('b2x:replycaps', data=capsblob)
673 673 part2node = []
674 674 enc = pushkey.encode
675 675 for newremotehead in outdated:
676 676 part = bundler.newpart('b2x:pushkey')
677 677 part.addparam('namespace', enc('phases'))
678 678 part.addparam('key', enc(newremotehead.hex()))
679 679 part.addparam('old', enc(str(phases.draft)))
680 680 part.addparam('new', enc(str(phases.public)))
681 681 part2node.append((part.id, newremotehead))
682 682 stream = util.chunkbuffer(bundler.getchunks())
683 683 try:
684 684 reply = pushop.remote.unbundle(stream, ['force'], 'push')
685 685 op = bundle2.processbundle(pushop.repo, reply)
686 686 except error.BundleValueError, exc:
687 687 raise util.Abort('missing support for %s' % exc)
688 688 for partid, node in part2node:
689 689 partrep = op.records.getreplies(partid)
690 690 results = partrep['pushkey']
691 691 assert len(results) <= 1
692 692 msg = None
693 693 if not results:
694 694 msg = _('server ignored update of %s to public!\n') % node
695 695 elif not int(results[0]['return']):
696 696 msg = _('updating %s to public failed!\n') % node
697 697 if msg is not None:
698 698 pushop.ui.warn(msg)
699 699
700 700 else:
701 701 # fallback to independant pushkey command
702 702 for newremotehead in outdated:
703 703 r = pushop.remote.pushkey('phases',
704 704 newremotehead.hex(),
705 705 str(phases.draft),
706 706 str(phases.public))
707 707 if not r:
708 708 pushop.ui.warn(_('updating %s to public failed!\n')
709 709 % newremotehead)
710 710
711 711 def _localphasemove(pushop, nodes, phase=phases.public):
712 712 """move <nodes> to <phase> in the local source repo"""
713 713 if pushop.locallocked:
714 714 tr = pushop.repo.transaction('push-phase-sync')
715 715 try:
716 716 phases.advanceboundary(pushop.repo, tr, phase, nodes)
717 717 tr.close()
718 718 finally:
719 719 tr.release()
720 720 else:
721 721 # repo is not locked, do not change any phases!
722 722 # Informs the user that phases should have been moved when
723 723 # applicable.
724 724 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
725 725 phasestr = phases.phasenames[phase]
726 726 if actualmoves:
727 727 pushop.ui.status(_('cannot lock source repo, skipping '
728 728 'local %s phase update\n') % phasestr)
729 729
730 730 def _pushobsolete(pushop):
731 731 """utility function to push obsolete markers to a remote"""
732 732 if 'obsmarkers' in pushop.stepsdone:
733 733 return
734 734 pushop.ui.debug('try to push obsolete markers to remote\n')
735 735 repo = pushop.repo
736 736 remote = pushop.remote
737 737 pushop.stepsdone.add('obsmarkers')
738 738 if pushop.outobsmarkers:
739 739 rslts = []
740 740 remotedata = obsolete._pushkeyescape(pushop.outobsmarkers)
741 741 for key in sorted(remotedata, reverse=True):
742 742 # reverse sort to ensure we end with dump0
743 743 data = remotedata[key]
744 744 rslts.append(remote.pushkey('obsolete', key, '', data))
745 745 if [r for r in rslts if not r]:
746 746 msg = _('failed to push some obsolete markers!\n')
747 747 repo.ui.warn(msg)
748 748
749 749 def _pushbookmark(pushop):
750 750 """Update bookmark position on remote"""
751 751 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
752 752 return
753 753 pushop.stepsdone.add('bookmarks')
754 754 ui = pushop.ui
755 755 remote = pushop.remote
756 756
757 757 for b, old, new in pushop.outbookmarks:
758 758 action = 'update'
759 759 if not old:
760 760 action = 'export'
761 761 elif not new:
762 762 action = 'delete'
763 763 if remote.pushkey('bookmarks', b, old, new):
764 764 ui.status(bookmsgmap[action][0] % b)
765 765 else:
766 766 ui.warn(bookmsgmap[action][1] % b)
767 767 # discovery can have set the value form invalid entry
768 768 if pushop.bkresult is not None:
769 769 pushop.bkresult = 1
770 770
771 771 class pulloperation(object):
772 772 """A object that represent a single pull operation
773 773
774 774 It purpose is to carry push related state and very common operation.
775 775
776 776 A new should be created at the beginning of each pull and discarded
777 777 afterward.
778 778 """
779 779
780 780 def __init__(self, repo, remote, heads=None, force=False, bookmarks=()):
781 781 # repo we pull into
782 782 self.repo = repo
783 783 # repo we pull from
784 784 self.remote = remote
785 785 # revision we try to pull (None is "all")
786 786 self.heads = heads
787 787 # bookmark pulled explicitly
788 788 self.explicitbookmarks = bookmarks
789 789 # do we force pull?
790 790 self.force = force
791 791 # the name the pull transaction
792 792 self._trname = 'pull\n' + util.hidepassword(remote.url())
793 793 # hold the transaction once created
794 794 self._tr = None
795 795 # set of common changeset between local and remote before pull
796 796 self.common = None
797 797 # set of pulled head
798 798 self.rheads = None
799 799 # list of missing changeset to fetch remotely
800 800 self.fetch = None
801 801 # remote bookmarks data
802 802 self.remotebookmarks = None
803 803 # result of changegroup pulling (used as return code by pull)
804 804 self.cgresult = None
805 805 # list of step already done
806 806 self.stepsdone = set()
807 807
808 808 @util.propertycache
809 809 def pulledsubset(self):
810 810 """heads of the set of changeset target by the pull"""
811 811 # compute target subset
812 812 if self.heads is None:
813 813 # We pulled every thing possible
814 814 # sync on everything common
815 815 c = set(self.common)
816 816 ret = list(self.common)
817 817 for n in self.rheads:
818 818 if n not in c:
819 819 ret.append(n)
820 820 return ret
821 821 else:
822 822 # We pulled a specific subset
823 823 # sync on this subset
824 824 return self.heads
825 825
826 826 def gettransaction(self):
827 827 """get appropriate pull transaction, creating it if needed"""
828 828 if self._tr is None:
829 829 self._tr = self.repo.transaction(self._trname)
830 self._tr.hookargs['source'] = 'pull'
831 self._tr.hookargs['url'] = self.remote.url()
830 832 return self._tr
831 833
832 834 def closetransaction(self):
833 835 """close transaction if created"""
834 836 if self._tr is not None:
835 837 self._tr.close()
836 838
837 839 def releasetransaction(self):
838 840 """release transaction if created"""
839 841 if self._tr is not None:
840 842 self._tr.release()
841 843
842 844 def pull(repo, remote, heads=None, force=False, bookmarks=()):
843 845 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks)
844 846 if pullop.remote.local():
845 847 missing = set(pullop.remote.requirements) - pullop.repo.supported
846 848 if missing:
847 849 msg = _("required features are not"
848 850 " supported in the destination:"
849 851 " %s") % (', '.join(sorted(missing)))
850 852 raise util.Abort(msg)
851 853
852 854 pullop.remotebookmarks = remote.listkeys('bookmarks')
853 855 lock = pullop.repo.lock()
854 856 try:
855 857 _pulldiscovery(pullop)
856 858 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
857 859 and pullop.remote.capable('bundle2-exp')):
858 860 _pullbundle2(pullop)
859 861 _pullchangeset(pullop)
860 862 _pullphase(pullop)
861 863 _pullbookmarks(pullop)
862 864 _pullobsolete(pullop)
863 865 pullop.closetransaction()
864 866 finally:
865 867 pullop.releasetransaction()
866 868 lock.release()
867 869
868 870 return pullop
869 871
870 872 # list of steps to perform discovery before pull
871 873 pulldiscoveryorder = []
872 874
873 875 # Mapping between step name and function
874 876 #
875 877 # This exists to help extensions wrap steps if necessary
876 878 pulldiscoverymapping = {}
877 879
878 880 def pulldiscovery(stepname):
879 881 """decorator for function performing discovery before pull
880 882
881 883 The function is added to the step -> function mapping and appended to the
882 884 list of steps. Beware that decorated function will be added in order (this
883 885 may matter).
884 886
885 887 You can only use this decorator for a new step, if you want to wrap a step
886 888 from an extension, change the pulldiscovery dictionary directly."""
887 889 def dec(func):
888 890 assert stepname not in pulldiscoverymapping
889 891 pulldiscoverymapping[stepname] = func
890 892 pulldiscoveryorder.append(stepname)
891 893 return func
892 894 return dec
893 895
894 896 def _pulldiscovery(pullop):
895 897 """Run all discovery steps"""
896 898 for stepname in pulldiscoveryorder:
897 899 step = pulldiscoverymapping[stepname]
898 900 step(pullop)
899 901
900 902 @pulldiscovery('changegroup')
901 903 def _pulldiscoverychangegroup(pullop):
902 904 """discovery phase for the pull
903 905
904 906 Current handle changeset discovery only, will change handle all discovery
905 907 at some point."""
906 908 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
907 909 pullop.remote,
908 910 heads=pullop.heads,
909 911 force=pullop.force)
910 912 pullop.common, pullop.fetch, pullop.rheads = tmp
911 913
912 914 def _pullbundle2(pullop):
913 915 """pull data using bundle2
914 916
915 917 For now, the only supported data are changegroup."""
916 918 remotecaps = bundle2.bundle2caps(pullop.remote)
917 919 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
918 920 # pulling changegroup
919 921 pullop.stepsdone.add('changegroup')
920 922
921 923 kwargs['common'] = pullop.common
922 924 kwargs['heads'] = pullop.heads or pullop.rheads
923 925 kwargs['cg'] = pullop.fetch
924 926 if 'b2x:listkeys' in remotecaps:
925 927 kwargs['listkeys'] = ['phase', 'bookmarks']
926 928 if not pullop.fetch:
927 929 pullop.repo.ui.status(_("no changes found\n"))
928 930 pullop.cgresult = 0
929 931 else:
930 932 if pullop.heads is None and list(pullop.common) == [nullid]:
931 933 pullop.repo.ui.status(_("requesting all changes\n"))
932 934 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
933 935 remoteversions = bundle2.obsmarkersversion(remotecaps)
934 936 if obsolete.commonversion(remoteversions) is not None:
935 937 kwargs['obsmarkers'] = True
936 938 pullop.stepsdone.add('obsmarkers')
937 939 _pullbundle2extraprepare(pullop, kwargs)
938 940 if kwargs.keys() == ['format']:
939 941 return # nothing to pull
940 942 bundle = pullop.remote.getbundle('pull', **kwargs)
941 943 try:
942 944 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
943 945 except error.BundleValueError, exc:
944 946 raise util.Abort('missing support for %s' % exc)
945 947
946 948 if pullop.fetch:
947 949 changedheads = 0
948 950 pullop.cgresult = 1
949 951 for cg in op.records['changegroup']:
950 952 ret = cg['return']
951 953 # If any changegroup result is 0, return 0
952 954 if ret == 0:
953 955 pullop.cgresult = 0
954 956 break
955 957 if ret < -1:
956 958 changedheads += ret + 1
957 959 elif ret > 1:
958 960 changedheads += ret - 1
959 961 if changedheads > 0:
960 962 pullop.cgresult = 1 + changedheads
961 963 elif changedheads < 0:
962 964 pullop.cgresult = -1 + changedheads
963 965
964 966 # processing phases change
965 967 for namespace, value in op.records['listkeys']:
966 968 if namespace == 'phases':
967 969 _pullapplyphases(pullop, value)
968 970
969 971 # processing bookmark update
970 972 for namespace, value in op.records['listkeys']:
971 973 if namespace == 'bookmarks':
972 974 pullop.remotebookmarks = value
973 975 _pullbookmarks(pullop)
974 976
975 977 def _pullbundle2extraprepare(pullop, kwargs):
976 978 """hook function so that extensions can extend the getbundle call"""
977 979 pass
978 980
979 981 def _pullchangeset(pullop):
980 982 """pull changeset from unbundle into the local repo"""
981 983 # We delay the open of the transaction as late as possible so we
982 984 # don't open transaction for nothing or you break future useful
983 985 # rollback call
984 986 if 'changegroup' in pullop.stepsdone:
985 987 return
986 988 pullop.stepsdone.add('changegroup')
987 989 if not pullop.fetch:
988 990 pullop.repo.ui.status(_("no changes found\n"))
989 991 pullop.cgresult = 0
990 992 return
991 993 pullop.gettransaction()
992 994 if pullop.heads is None and list(pullop.common) == [nullid]:
993 995 pullop.repo.ui.status(_("requesting all changes\n"))
994 996 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
995 997 # issue1320, avoid a race if remote changed after discovery
996 998 pullop.heads = pullop.rheads
997 999
998 1000 if pullop.remote.capable('getbundle'):
999 1001 # TODO: get bundlecaps from remote
1000 1002 cg = pullop.remote.getbundle('pull', common=pullop.common,
1001 1003 heads=pullop.heads or pullop.rheads)
1002 1004 elif pullop.heads is None:
1003 1005 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1004 1006 elif not pullop.remote.capable('changegroupsubset'):
1005 1007 raise util.Abort(_("partial pull cannot be done because "
1006 1008 "other repository doesn't support "
1007 1009 "changegroupsubset."))
1008 1010 else:
1009 1011 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1010 1012 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
1011 1013 pullop.remote.url())
1012 1014
1013 1015 def _pullphase(pullop):
1014 1016 # Get remote phases data from remote
1015 1017 if 'phases' in pullop.stepsdone:
1016 1018 return
1017 1019 remotephases = pullop.remote.listkeys('phases')
1018 1020 _pullapplyphases(pullop, remotephases)
1019 1021
1020 1022 def _pullapplyphases(pullop, remotephases):
1021 1023 """apply phase movement from observed remote state"""
1022 1024 if 'phases' in pullop.stepsdone:
1023 1025 return
1024 1026 pullop.stepsdone.add('phases')
1025 1027 publishing = bool(remotephases.get('publishing', False))
1026 1028 if remotephases and not publishing:
1027 1029 # remote is new and unpublishing
1028 1030 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1029 1031 pullop.pulledsubset,
1030 1032 remotephases)
1031 1033 dheads = pullop.pulledsubset
1032 1034 else:
1033 1035 # Remote is old or publishing all common changesets
1034 1036 # should be seen as public
1035 1037 pheads = pullop.pulledsubset
1036 1038 dheads = []
1037 1039 unfi = pullop.repo.unfiltered()
1038 1040 phase = unfi._phasecache.phase
1039 1041 rev = unfi.changelog.nodemap.get
1040 1042 public = phases.public
1041 1043 draft = phases.draft
1042 1044
1043 1045 # exclude changesets already public locally and update the others
1044 1046 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1045 1047 if pheads:
1046 1048 tr = pullop.gettransaction()
1047 1049 phases.advanceboundary(pullop.repo, tr, public, pheads)
1048 1050
1049 1051 # exclude changesets already draft locally and update the others
1050 1052 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1051 1053 if dheads:
1052 1054 tr = pullop.gettransaction()
1053 1055 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1054 1056
1055 1057 def _pullbookmarks(pullop):
1056 1058 """process the remote bookmark information to update the local one"""
1057 1059 if 'bookmarks' in pullop.stepsdone:
1058 1060 return
1059 1061 pullop.stepsdone.add('bookmarks')
1060 1062 repo = pullop.repo
1061 1063 remotebookmarks = pullop.remotebookmarks
1062 1064 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1063 1065 pullop.remote.url(),
1064 1066 pullop.gettransaction,
1065 1067 explicit=pullop.explicitbookmarks)
1066 1068
1067 1069 def _pullobsolete(pullop):
1068 1070 """utility function to pull obsolete markers from a remote
1069 1071
1070 1072 The `gettransaction` is function that return the pull transaction, creating
1071 1073 one if necessary. We return the transaction to inform the calling code that
1072 1074 a new transaction have been created (when applicable).
1073 1075
1074 1076 Exists mostly to allow overriding for experimentation purpose"""
1075 1077 if 'obsmarkers' in pullop.stepsdone:
1076 1078 return
1077 1079 pullop.stepsdone.add('obsmarkers')
1078 1080 tr = None
1079 1081 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1080 1082 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1081 1083 remoteobs = pullop.remote.listkeys('obsolete')
1082 1084 if 'dump0' in remoteobs:
1083 1085 tr = pullop.gettransaction()
1084 1086 for key in sorted(remoteobs, reverse=True):
1085 1087 if key.startswith('dump'):
1086 1088 data = base85.b85decode(remoteobs[key])
1087 1089 pullop.repo.obsstore.mergemarkers(tr, data)
1088 1090 pullop.repo.invalidatevolatilesets()
1089 1091 return tr
1090 1092
1091 1093 def caps20to10(repo):
1092 1094 """return a set with appropriate options to use bundle20 during getbundle"""
1093 1095 caps = set(['HG2X'])
1094 1096 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1095 1097 caps.add('bundle2=' + urllib.quote(capsblob))
1096 1098 return caps
1097 1099
1098 1100 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1099 1101 getbundle2partsorder = []
1100 1102
1101 1103 # Mapping between step name and function
1102 1104 #
1103 1105 # This exists to help extensions wrap steps if necessary
1104 1106 getbundle2partsmapping = {}
1105 1107
1106 1108 def getbundle2partsgenerator(stepname):
1107 1109 """decorator for function generating bundle2 part for getbundle
1108 1110
1109 1111 The function is added to the step -> function mapping and appended to the
1110 1112 list of steps. Beware that decorated functions will be added in order
1111 1113 (this may matter).
1112 1114
1113 1115 You can only use this decorator for new steps, if you want to wrap a step
1114 1116 from an extension, attack the getbundle2partsmapping dictionary directly."""
1115 1117 def dec(func):
1116 1118 assert stepname not in getbundle2partsmapping
1117 1119 getbundle2partsmapping[stepname] = func
1118 1120 getbundle2partsorder.append(stepname)
1119 1121 return func
1120 1122 return dec
1121 1123
1122 1124 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1123 1125 **kwargs):
1124 1126 """return a full bundle (with potentially multiple kind of parts)
1125 1127
1126 1128 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
1127 1129 passed. For now, the bundle can contain only changegroup, but this will
1128 1130 changes when more part type will be available for bundle2.
1129 1131
1130 1132 This is different from changegroup.getchangegroup that only returns an HG10
1131 1133 changegroup bundle. They may eventually get reunited in the future when we
1132 1134 have a clearer idea of the API we what to query different data.
1133 1135
1134 1136 The implementation is at a very early stage and will get massive rework
1135 1137 when the API of bundle is refined.
1136 1138 """
1137 1139 # bundle10 case
1138 1140 if bundlecaps is None or 'HG2X' not in bundlecaps:
1139 1141 if bundlecaps and not kwargs.get('cg', True):
1140 1142 raise ValueError(_('request for bundle10 must include changegroup'))
1141 1143
1142 1144 if kwargs:
1143 1145 raise ValueError(_('unsupported getbundle arguments: %s')
1144 1146 % ', '.join(sorted(kwargs.keys())))
1145 1147 return changegroup.getchangegroup(repo, source, heads=heads,
1146 1148 common=common, bundlecaps=bundlecaps)
1147 1149
1148 1150 # bundle20 case
1149 1151 b2caps = {}
1150 1152 for bcaps in bundlecaps:
1151 1153 if bcaps.startswith('bundle2='):
1152 1154 blob = urllib.unquote(bcaps[len('bundle2='):])
1153 1155 b2caps.update(bundle2.decodecaps(blob))
1154 1156 bundler = bundle2.bundle20(repo.ui, b2caps)
1155 1157
1156 1158 for name in getbundle2partsorder:
1157 1159 func = getbundle2partsmapping[name]
1158 1160 kwargs['heads'] = heads
1159 1161 kwargs['common'] = common
1160 1162 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1161 1163 **kwargs)
1162 1164
1163 1165 return util.chunkbuffer(bundler.getchunks())
1164 1166
1165 1167 @getbundle2partsgenerator('changegroup')
1166 1168 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1167 1169 b2caps=None, heads=None, common=None, **kwargs):
1168 1170 """add a changegroup part to the requested bundle"""
1169 1171 cg = None
1170 1172 if kwargs.get('cg', True):
1171 1173 # build changegroup bundle here.
1172 1174 cg = changegroup.getchangegroup(repo, source, heads=heads,
1173 1175 common=common, bundlecaps=bundlecaps)
1174 1176
1175 1177 if cg:
1176 1178 bundler.newpart('b2x:changegroup', data=cg.getchunks())
1177 1179
1178 1180 @getbundle2partsgenerator('listkeys')
1179 1181 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1180 1182 b2caps=None, **kwargs):
1181 1183 """add parts containing listkeys namespaces to the requested bundle"""
1182 1184 listkeys = kwargs.get('listkeys', ())
1183 1185 for namespace in listkeys:
1184 1186 part = bundler.newpart('b2x:listkeys')
1185 1187 part.addparam('namespace', namespace)
1186 1188 keys = repo.listkeys(namespace).items()
1187 1189 part.data = pushkey.encodekeys(keys)
1188 1190
1189 1191 @getbundle2partsgenerator('obsmarkers')
1190 1192 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1191 1193 b2caps=None, heads=None, **kwargs):
1192 1194 """add an obsolescence markers part to the requested bundle"""
1193 1195 if kwargs.get('obsmarkers', False):
1194 1196 if heads is None:
1195 1197 heads = repo.heads()
1196 1198 subset = [c.node() for c in repo.set('::%ln', heads)]
1197 1199 markers = repo.obsstore.relevantmarkers(subset)
1198 1200 buildobsmarkerspart(bundler, markers)
1199 1201
1200 1202 @getbundle2partsgenerator('extra')
1201 1203 def _getbundleextrapart(bundler, repo, source, bundlecaps=None,
1202 1204 b2caps=None, **kwargs):
1203 1205 """hook function to let extensions add parts to the requested bundle"""
1204 1206 pass
1205 1207
1206 1208 def check_heads(repo, their_heads, context):
1207 1209 """check if the heads of a repo have been modified
1208 1210
1209 1211 Used by peer for unbundling.
1210 1212 """
1211 1213 heads = repo.heads()
1212 1214 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1213 1215 if not (their_heads == ['force'] or their_heads == heads or
1214 1216 their_heads == ['hashed', heads_hash]):
1215 1217 # someone else committed/pushed/unbundled while we
1216 1218 # were transferring data
1217 1219 raise error.PushRaced('repository changed while %s - '
1218 1220 'please try again' % context)
1219 1221
1220 1222 def unbundle(repo, cg, heads, source, url):
1221 1223 """Apply a bundle to a repo.
1222 1224
1223 1225 this function makes sure the repo is locked during the application and have
1224 1226 mechanism to check that no push race occurred between the creation of the
1225 1227 bundle and its application.
1226 1228
1227 1229 If the push was raced as PushRaced exception is raised."""
1228 1230 r = 0
1229 1231 # need a transaction when processing a bundle2 stream
1230 1232 tr = None
1231 1233 lock = repo.lock()
1232 1234 try:
1233 1235 check_heads(repo, heads, 'uploading changes')
1234 1236 # push can proceed
1235 1237 if util.safehasattr(cg, 'params'):
1236 1238 try:
1237 1239 tr = repo.transaction('unbundle')
1238 1240 tr.hookargs['source'] = source
1239 1241 tr.hookargs['url'] = url
1240 1242 tr.hookargs['bundle2-exp'] = '1'
1241 1243 r = bundle2.processbundle(repo, cg, lambda: tr).reply
1242 1244 cl = repo.unfiltered().changelog
1243 1245 p = cl.writepending() and repo.root or ""
1244 1246 repo.hook('b2x-pretransactionclose', throw=True, pending=p,
1245 1247 **tr.hookargs)
1246 1248 tr.close()
1247 1249 repo.hook('b2x-transactionclose', **tr.hookargs)
1248 1250 except Exception, exc:
1249 1251 exc.duringunbundle2 = True
1250 1252 raise
1251 1253 else:
1252 1254 r = changegroup.addchangegroup(repo, cg, source, url)
1253 1255 finally:
1254 1256 if tr is not None:
1255 1257 tr.release()
1256 1258 lock.release()
1257 1259 return r
@@ -1,478 +1,478 b''
1 1 Test exchange of common information using bundle2
2 2
3 3
4 4 $ getmainid() {
5 5 > hg -R main log --template '{node}\n' --rev "$1"
6 6 > }
7 7
8 8 enable obsolescence
9 9
10 10 $ cat >> $HGRCPATH << EOF
11 11 > [experimental]
12 12 > evolution=createmarkers,exchange
13 13 > bundle2-exp=True
14 14 > [ui]
15 15 > ssh=python "$TESTDIR/dummyssh"
16 16 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
17 17 > [web]
18 18 > push_ssl = false
19 19 > allow_push = *
20 20 > [phases]
21 21 > publish=False
22 22 > [hooks]
23 23 > changegroup = sh -c "HG_LOCAL= python \"$TESTDIR/printenv.py\" changegroup"
24 24 > b2x-transactionclose = sh -c "HG_LOCAL= python \"$TESTDIR/printenv.py\" b2x-transactionclose"
25 25 > EOF
26 26
27 27 The extension requires a repo (currently unused)
28 28
29 29 $ hg init main
30 30 $ cd main
31 31 $ touch a
32 32 $ hg add a
33 33 $ hg commit -m 'a'
34 34
35 35 $ hg unbundle $TESTDIR/bundles/rebase.hg
36 36 adding changesets
37 37 adding manifests
38 38 adding file changes
39 39 added 8 changesets with 7 changes to 7 files (+3 heads)
40 40 changegroup hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_SOURCE=unbundle HG_URL=bundle:*/rebase.hg (glob)
41 41 (run 'hg heads' to see heads, 'hg merge' to merge)
42 42
43 43 $ cd ..
44 44
45 45 Real world exchange
46 46 =====================
47 47
48 48 Add more obsolescence information
49 49
50 50 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
51 51 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
52 52
53 53 clone --pull
54 54
55 55 $ hg -R main phase --public cd010b8cd998
56 56 $ hg clone main other --pull --rev 9520eea781bc
57 57 adding changesets
58 58 adding manifests
59 59 adding file changes
60 60 added 2 changesets with 2 changes to 2 files
61 61 1 new obsolescence markers
62 changegroup hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_SOURCE=bundle2 HG_URL=bundle2
62 changegroup hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
63 63 updating to branch default
64 64 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
65 65 $ hg -R other log -G
66 66 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
67 67 |
68 68 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
69 69
70 70 $ hg -R other debugobsolete
71 71 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
72 72
73 73 pull
74 74
75 75 $ hg -R main phase --public 9520eea781bc
76 76 $ hg -R other pull -r 24b6387c8c8c
77 77 pulling from $TESTTMP/main (glob)
78 78 searching for changes
79 79 adding changesets
80 80 adding manifests
81 81 adding file changes
82 82 added 1 changesets with 1 changes to 1 files (+1 heads)
83 83 1 new obsolescence markers
84 changegroup hook: HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_SOURCE=bundle2 HG_URL=bundle2
84 changegroup hook: HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
85 85 (run 'hg heads' to see heads, 'hg merge' to merge)
86 86 $ hg -R other log -G
87 87 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
88 88 |
89 89 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
90 90 |/
91 91 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
92 92
93 93 $ hg -R other debugobsolete
94 94 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
95 95 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
96 96
97 97 pull empty (with phase movement)
98 98
99 99 $ hg -R main phase --public 24b6387c8c8c
100 100 $ hg -R other pull -r 24b6387c8c8c
101 101 pulling from $TESTTMP/main (glob)
102 102 no changes found
103 103 $ hg -R other log -G
104 104 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
105 105 |
106 106 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
107 107 |/
108 108 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
109 109
110 110 $ hg -R other debugobsolete
111 111 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
112 112 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
113 113
114 114 pull empty
115 115
116 116 $ hg -R other pull -r 24b6387c8c8c
117 117 pulling from $TESTTMP/main (glob)
118 118 no changes found
119 119 $ hg -R other log -G
120 120 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
121 121 |
122 122 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
123 123 |/
124 124 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
125 125
126 126 $ hg -R other debugobsolete
127 127 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
128 128 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
129 129
130 130 add extra data to test their exchange during push
131 131
132 132 $ hg -R main bookmark --rev eea13746799a book_eea1
133 133 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
134 134 $ hg -R main bookmark --rev 02de42196ebe book_02de
135 135 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
136 136 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
137 137 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
138 138 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
139 139 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
140 140 $ hg -R main bookmark --rev 32af7686d403 book_32af
141 141 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
142 142
143 143 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
144 144 $ hg -R other bookmark --rev cd010b8cd998 book_02de
145 145 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
146 146 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
147 147 $ hg -R other bookmark --rev cd010b8cd998 book_32af
148 148
149 149 $ hg -R main phase --public eea13746799a
150 150
151 151 push
152 152 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
153 153 pushing to other
154 154 searching for changes
155 155 b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2-EXP=1 HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_URL=push
156 156 changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_SOURCE=push HG_URL=push
157 157 remote: adding changesets
158 158 remote: adding manifests
159 159 remote: adding file changes
160 160 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
161 161 remote: 1 new obsolescence markers
162 162 updating bookmark book_eea1
163 163 $ hg -R other log -G
164 164 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
165 165 |\
166 166 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
167 167 | |
168 168 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
169 169 |/
170 170 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
171 171
172 172 $ hg -R other debugobsolete
173 173 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
174 174 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
175 175 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
176 176
177 177 pull over ssh
178 178
179 179 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
180 180 pulling from ssh://user@dummy/main
181 181 searching for changes
182 182 adding changesets
183 183 adding manifests
184 184 adding file changes
185 185 added 1 changesets with 1 changes to 1 files (+1 heads)
186 186 1 new obsolescence markers
187 187 updating bookmark book_02de
188 changegroup hook: HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_SOURCE=bundle2 HG_URL=bundle2
188 changegroup hook: HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_SOURCE=pull HG_URL=ssh://user@dummy/main
189 189 (run 'hg heads' to see heads, 'hg merge' to merge)
190 190 $ hg -R other debugobsolete
191 191 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
192 192 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
193 193 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
194 194 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
195 195
196 196 pull over http
197 197
198 198 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
199 199 $ cat main.pid >> $DAEMON_PIDS
200 200
201 201 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
202 202 pulling from http://localhost:$HGPORT/
203 203 searching for changes
204 204 adding changesets
205 205 adding manifests
206 206 adding file changes
207 207 added 1 changesets with 1 changes to 1 files (+1 heads)
208 208 1 new obsolescence markers
209 209 updating bookmark book_42cc
210 changegroup hook: HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_SOURCE=bundle2 HG_URL=bundle2
210 changegroup hook: HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_SOURCE=pull HG_URL=http://localhost:$HGPORT/
211 211 (run 'hg heads .' to see heads, 'hg merge' to merge)
212 212 $ cat main-error.log
213 213 $ hg -R other debugobsolete
214 214 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
215 215 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
216 216 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
217 217 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
218 218 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
219 219
220 220 push over ssh
221 221
222 222 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
223 223 pushing to ssh://user@dummy/other
224 224 searching for changes
225 225 remote: adding changesets
226 226 remote: adding manifests
227 227 remote: adding file changes
228 228 remote: added 1 changesets with 1 changes to 1 files
229 229 remote: 1 new obsolescence markers
230 230 updating bookmark book_5fdd
231 231 remote: b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2-EXP=1 HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
232 232 remote: changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
233 233 $ hg -R other log -G
234 234 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
235 235 |
236 236 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
237 237 |
238 238 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
239 239 | |
240 240 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
241 241 | |/|
242 242 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
243 243 |/ /
244 244 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
245 245 |/
246 246 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
247 247
248 248 $ hg -R other debugobsolete
249 249 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
250 250 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
251 251 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
252 252 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
253 253 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
254 254 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
255 255
256 256 push over http
257 257
258 258 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
259 259 $ cat other.pid >> $DAEMON_PIDS
260 260
261 261 $ hg -R main phase --public 32af7686d403
262 262 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
263 263 pushing to http://localhost:$HGPORT2/
264 264 searching for changes
265 265 remote: adding changesets
266 266 remote: adding manifests
267 267 remote: adding file changes
268 268 remote: added 1 changesets with 1 changes to 1 files
269 269 remote: 1 new obsolescence markers
270 270 updating bookmark book_32af
271 271 $ cat other-error.log
272 272
273 273 Check final content.
274 274
275 275 $ hg -R other log -G
276 276 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
277 277 |
278 278 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
279 279 |
280 280 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
281 281 |
282 282 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
283 283 | |
284 284 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
285 285 | |/|
286 286 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
287 287 |/ /
288 288 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
289 289 |/
290 290 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
291 291
292 292 $ hg -R other debugobsolete
293 293 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
294 294 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
295 295 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
296 296 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
297 297 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
298 298 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
299 299 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
300 300
301 301 Error Handling
302 302 ==============
303 303
304 304 Check that errors are properly returned to the client during push.
305 305
306 306 Setting up
307 307
308 308 $ cat > failpush.py << EOF
309 309 > """A small extension that makes push fails when using bundle2
310 310 >
311 311 > used to test error handling in bundle2
312 312 > """
313 313 >
314 314 > from mercurial import util
315 315 > from mercurial import bundle2
316 316 > from mercurial import exchange
317 317 > from mercurial import extensions
318 318 >
319 319 > def _pushbundle2failpart(pushop, bundler):
320 320 > reason = pushop.ui.config('failpush', 'reason', None)
321 321 > part = None
322 322 > if reason == 'abort':
323 323 > bundler.newpart('test:abort')
324 324 > if reason == 'unknown':
325 325 > bundler.newpart('TEST:UNKNOWN')
326 326 > if reason == 'race':
327 327 > # 20 Bytes of crap
328 328 > bundler.newpart('b2x:check:heads', data='01234567890123456789')
329 329 >
330 330 > @bundle2.parthandler("test:abort")
331 331 > def handleabort(op, part):
332 332 > raise util.Abort('Abandon ship!', hint="don't panic")
333 333 >
334 334 > def uisetup(ui):
335 335 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
336 336 > exchange.b2partsgenorder.insert(0, 'failpart')
337 337 >
338 338 > EOF
339 339
340 340 $ cd main
341 341 $ hg up tip
342 342 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
343 343 $ echo 'I' > I
344 344 $ hg add I
345 345 $ hg ci -m 'I'
346 346 $ hg id
347 347 e7ec4e813ba6 tip
348 348 $ cd ..
349 349
350 350 $ cat << EOF >> $HGRCPATH
351 351 > [extensions]
352 352 > failpush=$TESTTMP/failpush.py
353 353 > EOF
354 354
355 355 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
356 356 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
357 357 $ cat other.pid >> $DAEMON_PIDS
358 358
359 359 Doing the actual push: Abort error
360 360
361 361 $ cat << EOF >> $HGRCPATH
362 362 > [failpush]
363 363 > reason = abort
364 364 > EOF
365 365
366 366 $ hg -R main push other -r e7ec4e813ba6
367 367 pushing to other
368 368 searching for changes
369 369 abort: Abandon ship!
370 370 (don't panic)
371 371 [255]
372 372
373 373 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
374 374 pushing to ssh://user@dummy/other
375 375 searching for changes
376 376 abort: Abandon ship!
377 377 (don't panic)
378 378 [255]
379 379
380 380 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
381 381 pushing to http://localhost:$HGPORT2/
382 382 searching for changes
383 383 abort: Abandon ship!
384 384 (don't panic)
385 385 [255]
386 386
387 387
388 388 Doing the actual push: unknown mandatory parts
389 389
390 390 $ cat << EOF >> $HGRCPATH
391 391 > [failpush]
392 392 > reason = unknown
393 393 > EOF
394 394
395 395 $ hg -R main push other -r e7ec4e813ba6
396 396 pushing to other
397 397 searching for changes
398 398 abort: missing support for test:unknown
399 399 [255]
400 400
401 401 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
402 402 pushing to ssh://user@dummy/other
403 403 searching for changes
404 404 abort: missing support for test:unknown
405 405 [255]
406 406
407 407 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
408 408 pushing to http://localhost:$HGPORT2/
409 409 searching for changes
410 410 abort: missing support for test:unknown
411 411 [255]
412 412
413 413 Doing the actual push: race
414 414
415 415 $ cat << EOF >> $HGRCPATH
416 416 > [failpush]
417 417 > reason = race
418 418 > EOF
419 419
420 420 $ hg -R main push other -r e7ec4e813ba6
421 421 pushing to other
422 422 searching for changes
423 423 abort: push failed:
424 424 'repository changed while pushing - please try again'
425 425 [255]
426 426
427 427 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
428 428 pushing to ssh://user@dummy/other
429 429 searching for changes
430 430 abort: push failed:
431 431 'repository changed while pushing - please try again'
432 432 [255]
433 433
434 434 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
435 435 pushing to http://localhost:$HGPORT2/
436 436 searching for changes
437 437 abort: push failed:
438 438 'repository changed while pushing - please try again'
439 439 [255]
440 440
441 441 Doing the actual push: hook abort
442 442
443 443 $ cat << EOF >> $HGRCPATH
444 444 > [failpush]
445 445 > reason =
446 446 > [hooks]
447 447 > b2x-pretransactionclose.failpush = false
448 448 > EOF
449 449
450 450 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
451 451 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
452 452 $ cat other.pid >> $DAEMON_PIDS
453 453
454 454 $ hg -R main push other -r e7ec4e813ba6
455 455 pushing to other
456 456 searching for changes
457 457 transaction abort!
458 458 rollback completed
459 459 changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=e7ec4e813ba6b07be2a0516ce1a74bb4e503f91a HG_SOURCE=push HG_URL=push
460 460 abort: b2x-pretransactionclose.failpush hook exited with status 1
461 461 [255]
462 462
463 463 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
464 464 pushing to ssh://user@dummy/other
465 465 searching for changes
466 466 abort: b2x-pretransactionclose.failpush hook exited with status 1
467 467 remote: transaction abort!
468 468 remote: rollback completed
469 469 remote: changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=e7ec4e813ba6b07be2a0516ce1a74bb4e503f91a HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
470 470 [255]
471 471
472 472 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
473 473 pushing to http://localhost:$HGPORT2/
474 474 searching for changes
475 475 abort: b2x-pretransactionclose.failpush hook exited with status 1
476 476 [255]
477 477
478 478
@@ -1,261 +1,261 b''
1 1 Create an extension to test bundle2 with multiple changegroups
2 2
3 3 $ cat > bundle2.py <<EOF
4 4 > """
5 5 > """
6 6 > from mercurial import changegroup, exchange
7 7 >
8 8 > def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
9 9 > b2caps=None, heads=None, common=None,
10 10 > **kwargs):
11 11 > # Create two changegroups given the common changesets and heads for the
12 12 > # changegroup part we are being requested. Use the parent of each head
13 13 > # in 'heads' as intermediate heads for the first changegroup.
14 14 > intermediates = [repo[r].p1().node() for r in heads]
15 15 > cg = changegroup.getchangegroup(repo, source, heads=intermediates,
16 16 > common=common, bundlecaps=bundlecaps)
17 17 > bundler.newpart('b2x:output', data='changegroup1')
18 18 > bundler.newpart('b2x:changegroup', data=cg.getchunks())
19 19 > cg = changegroup.getchangegroup(repo, source, heads=heads,
20 20 > common=common + intermediates,
21 21 > bundlecaps=bundlecaps)
22 22 > bundler.newpart('b2x:output', data='changegroup2')
23 23 > bundler.newpart('b2x:changegroup', data=cg.getchunks())
24 24 >
25 25 > def _pull(repo, *args, **kwargs):
26 26 > pullop = _orig_pull(repo, *args, **kwargs)
27 27 > repo.ui.write('pullop.cgresult is %d\n' % pullop.cgresult)
28 28 > return pullop
29 29 >
30 30 > _orig_pull = exchange.pull
31 31 > exchange.pull = _pull
32 32 > exchange.getbundle2partsmapping['changegroup'] = _getbundlechangegrouppart
33 33 > EOF
34 34
35 35 $ cat >> $HGRCPATH << EOF
36 36 > [experimental]
37 37 > bundle2-exp=True
38 38 > [ui]
39 39 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
40 40 > EOF
41 41
42 42 Start with a simple repository with a single commit
43 43
44 44 $ hg init repo
45 45 $ cd repo
46 46 $ cat > .hg/hgrc << EOF
47 47 > [extensions]
48 48 > bundle2=$TESTTMP/bundle2.py
49 49 > EOF
50 50
51 51 $ echo A > A
52 52 $ hg commit -A -m A -q
53 53 $ cd ..
54 54
55 55 Clone
56 56
57 57 $ hg clone -q repo clone
58 58
59 59 Add two linear commits
60 60
61 61 $ cd repo
62 62 $ echo B > B
63 63 $ hg commit -A -m B -q
64 64 $ echo C > C
65 65 $ hg commit -A -m C -q
66 66
67 67 $ cd ../clone
68 68 $ cat >> .hg/hgrc <<EOF
69 69 > [hooks]
70 70 > pretxnchangegroup = sh -c "python \"$TESTDIR/printenv.py\" pretxnchangegroup"
71 71 > changegroup = sh -c "python \"$TESTDIR/printenv.py\" changegroup"
72 72 > incoming = sh -c "python \"$TESTDIR/printenv.py\" incoming"
73 73 > EOF
74 74
75 75 Pull the new commits in the clone
76 76
77 77 $ hg pull
78 78 pulling from $TESTTMP/repo (glob)
79 79 searching for changes
80 80 remote: changegroup1
81 81 adding changesets
82 82 adding manifests
83 83 adding file changes
84 84 added 1 changesets with 1 changes to 1 files
85 pretxnchangegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_PENDING=$TESTTMP/clone HG_SOURCE=bundle2 HG_URL=bundle2
85 pretxnchangegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
86 86 remote: changegroup2
87 87 adding changesets
88 88 adding manifests
89 89 adding file changes
90 90 added 1 changesets with 1 changes to 1 files
91 pretxnchangegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
92 changegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=bundle2 HG_URL=bundle2
93 incoming hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=bundle2 HG_URL=bundle2
94 changegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
95 incoming hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
91 pretxnchangegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
92 changegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
93 incoming hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
94 changegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
95 incoming hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
96 96 pullop.cgresult is 1
97 97 (run 'hg update' to get a working copy)
98 98 $ hg update
99 99 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
100 100 $ hg log -G
101 101 @ 2:f838bfaca5c7 public test C
102 102 |
103 103 o 1:27547f69f254 public test B
104 104 |
105 105 o 0:4a2df7238c3b public test A
106 106
107 107 Add more changesets with multiple heads to the original repository
108 108
109 109 $ cd ../repo
110 110 $ echo D > D
111 111 $ hg commit -A -m D -q
112 112 $ hg up -r 1
113 113 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
114 114 $ echo E > E
115 115 $ hg commit -A -m E -q
116 116 $ echo F > F
117 117 $ hg commit -A -m F -q
118 118 $ hg up -r 1
119 119 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
120 120 $ echo G > G
121 121 $ hg commit -A -m G -q
122 122 $ hg up -r 3
123 123 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
124 124 $ echo H > H
125 125 $ hg commit -A -m H -q
126 126 $ hg log -G
127 127 @ 7:5cd59d311f65 draft test H
128 128 |
129 129 | o 6:1d14c3ce6ac0 draft test G
130 130 | |
131 131 | | o 5:7f219660301f draft test F
132 132 | | |
133 133 | | o 4:8a5212ebc852 draft test E
134 134 | |/
135 135 o | 3:b3325c91a4d9 draft test D
136 136 | |
137 137 o | 2:f838bfaca5c7 draft test C
138 138 |/
139 139 o 1:27547f69f254 draft test B
140 140 |
141 141 o 0:4a2df7238c3b draft test A
142 142
143 143 New heads are reported during transfer and properly accounted for in
144 144 pullop.cgresult
145 145
146 146 $ cd ../clone
147 147 $ hg pull
148 148 pulling from $TESTTMP/repo (glob)
149 149 searching for changes
150 150 remote: changegroup1
151 151 adding changesets
152 152 adding manifests
153 153 adding file changes
154 154 added 2 changesets with 2 changes to 2 files (+1 heads)
155 pretxnchangegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_PENDING=$TESTTMP/clone HG_SOURCE=bundle2 HG_URL=bundle2
155 pretxnchangegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
156 156 remote: changegroup2
157 157 adding changesets
158 158 adding manifests
159 159 adding file changes
160 160 added 3 changesets with 3 changes to 3 files (+1 heads)
161 pretxnchangegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
162 changegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=bundle2 HG_URL=bundle2
163 incoming hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=bundle2 HG_URL=bundle2
164 incoming hook: HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=bundle2 HG_URL=bundle2
165 changegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
166 incoming hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
167 incoming hook: HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
168 incoming hook: HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
161 pretxnchangegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
162 changegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
163 incoming hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
164 incoming hook: HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
165 changegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
166 incoming hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
167 incoming hook: HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
168 incoming hook: HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
169 169 pullop.cgresult is 3
170 170 (run 'hg heads' to see heads, 'hg merge' to merge)
171 171 $ hg log -G
172 172 o 7:5cd59d311f65 public test H
173 173 |
174 174 | o 6:1d14c3ce6ac0 public test G
175 175 | |
176 176 | | o 5:7f219660301f public test F
177 177 | | |
178 178 | | o 4:8a5212ebc852 public test E
179 179 | |/
180 180 o | 3:b3325c91a4d9 public test D
181 181 | |
182 182 @ | 2:f838bfaca5c7 public test C
183 183 |/
184 184 o 1:27547f69f254 public test B
185 185 |
186 186 o 0:4a2df7238c3b public test A
187 187
188 188 Removing a head from the original repository by merging it
189 189
190 190 $ cd ../repo
191 191 $ hg merge -r 6 -q
192 192 $ hg commit -m Merge
193 193 $ echo I > I
194 194 $ hg commit -A -m H -q
195 195 $ hg log -G
196 196 @ 9:9d18e5bd9ab0 draft test H
197 197 |
198 198 o 8:71bd7b46de72 draft test Merge
199 199 |\
200 200 | o 7:5cd59d311f65 draft test H
201 201 | |
202 202 o | 6:1d14c3ce6ac0 draft test G
203 203 | |
204 204 | | o 5:7f219660301f draft test F
205 205 | | |
206 206 +---o 4:8a5212ebc852 draft test E
207 207 | |
208 208 | o 3:b3325c91a4d9 draft test D
209 209 | |
210 210 | o 2:f838bfaca5c7 draft test C
211 211 |/
212 212 o 1:27547f69f254 draft test B
213 213 |
214 214 o 0:4a2df7238c3b draft test A
215 215
216 216 Removed heads are reported during transfer and properly accounted for in
217 217 pullop.cgresult
218 218
219 219 $ cd ../clone
220 220 $ hg pull
221 221 pulling from $TESTTMP/repo (glob)
222 222 searching for changes
223 223 remote: changegroup1
224 224 adding changesets
225 225 adding manifests
226 226 adding file changes
227 227 added 1 changesets with 0 changes to 0 files (-1 heads)
228 pretxnchangegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_PENDING=$TESTTMP/clone HG_SOURCE=bundle2 HG_URL=bundle2
228 pretxnchangegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
229 229 remote: changegroup2
230 230 adding changesets
231 231 adding manifests
232 232 adding file changes
233 233 added 1 changesets with 1 changes to 1 files
234 pretxnchangegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
235 changegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=bundle2 HG_URL=bundle2
236 incoming hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=bundle2 HG_URL=bundle2
237 changegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
238 incoming hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
234 pretxnchangegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
235 changegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
236 incoming hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
237 changegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
238 incoming hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
239 239 pullop.cgresult is -2
240 240 (run 'hg update' to get a working copy)
241 241 $ hg log -G
242 242 o 9:9d18e5bd9ab0 public test H
243 243 |
244 244 o 8:71bd7b46de72 public test Merge
245 245 |\
246 246 | o 7:5cd59d311f65 public test H
247 247 | |
248 248 o | 6:1d14c3ce6ac0 public test G
249 249 | |
250 250 | | o 5:7f219660301f public test F
251 251 | | |
252 252 +---o 4:8a5212ebc852 public test E
253 253 | |
254 254 | o 3:b3325c91a4d9 public test D
255 255 | |
256 256 | @ 2:f838bfaca5c7 public test C
257 257 |/
258 258 o 1:27547f69f254 public test B
259 259 |
260 260 o 0:4a2df7238c3b public test A
261 261
General Comments 0
You need to be logged in to leave comments. Login now