##// END OF EJS Templates
push: gather all bookmark decisions together...
Pierre-Yves David -
r22651:b901645a default
parent child Browse files
Show More
@@ -1,1170 +1,1195
1 1 # exchange.py - utility to exchange data between repos.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 from node import hex, nullid
10 10 import errno, urllib
11 11 import util, scmutil, changegroup, base85, error
12 12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
13 13
14 14 def readbundle(ui, fh, fname, vfs=None):
15 15 header = changegroup.readexactly(fh, 4)
16 16
17 17 alg = None
18 18 if not fname:
19 19 fname = "stream"
20 20 if not header.startswith('HG') and header.startswith('\0'):
21 21 fh = changegroup.headerlessfixup(fh, header)
22 22 header = "HG10"
23 23 alg = 'UN'
24 24 elif vfs:
25 25 fname = vfs.join(fname)
26 26
27 27 magic, version = header[0:2], header[2:4]
28 28
29 29 if magic != 'HG':
30 30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 31 if version == '10':
32 32 if alg is None:
33 33 alg = changegroup.readexactly(fh, 2)
34 34 return changegroup.cg1unpacker(fh, alg)
35 35 elif version == '2X':
36 36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 37 else:
38 38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39 39
40 40 def buildobsmarkerspart(bundler, markers):
41 41 """add an obsmarker part to the bundler with <markers>
42 42
43 43 No part is created if markers is empty.
44 44 Raises ValueError if the bundler doesn't support any known obsmarker format.
45 45 """
46 46 if markers:
47 47 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
48 48 version = obsolete.commonversion(remoteversions)
49 49 if version is None:
50 50 raise ValueError('bundler do not support common obsmarker format')
51 51 stream = obsolete.encodemarkers(markers, True, version=version)
52 52 return bundler.newpart('B2X:OBSMARKERS', data=stream)
53 53 return None
54 54
55 55 class pushoperation(object):
56 56 """A object that represent a single push operation
57 57
58 58 It purpose is to carry push related state and very common operation.
59 59
60 60 A new should be created at the beginning of each push and discarded
61 61 afterward.
62 62 """
63 63
64 64 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
65 65 bookmarks=()):
66 66 # repo we push from
67 67 self.repo = repo
68 68 self.ui = repo.ui
69 69 # repo we push to
70 70 self.remote = remote
71 71 # force option provided
72 72 self.force = force
73 73 # revs to be pushed (None is "all")
74 74 self.revs = revs
75 75 # bookmark explicitly pushed
76 76 self.bookmarks = bookmarks
77 77 # allow push of new branch
78 78 self.newbranch = newbranch
79 79 # did a local lock get acquired?
80 80 self.locallocked = None
81 81 # step already performed
82 82 # (used to check what steps have been already performed through bundle2)
83 83 self.stepsdone = set()
84 84 # Integer version of the changegroup push result
85 85 # - None means nothing to push
86 86 # - 0 means HTTP error
87 87 # - 1 means we pushed and remote head count is unchanged *or*
88 88 # we have outgoing changesets but refused to push
89 89 # - other values as described by addchangegroup()
90 90 self.cgresult = None
91 91 # Boolean value for the bookmark push
92 92 self.bkresult = None
93 93 # discover.outgoing object (contains common and outgoing data)
94 94 self.outgoing = None
95 95 # all remote heads before the push
96 96 self.remoteheads = None
97 97 # testable as a boolean indicating if any nodes are missing locally.
98 98 self.incoming = None
99 99 # phases changes that must be pushed along side the changesets
100 100 self.outdatedphases = None
101 101 # phases changes that must be pushed if changeset push fails
102 102 self.fallbackoutdatedphases = None
103 103 # outgoing obsmarkers
104 104 self.outobsmarkers = set()
105 105 # outgoing bookmarks
106 106 self.outbookmarks = []
107 107
108 108 @util.propertycache
109 109 def futureheads(self):
110 110 """future remote heads if the changeset push succeeds"""
111 111 return self.outgoing.missingheads
112 112
113 113 @util.propertycache
114 114 def fallbackheads(self):
115 115 """future remote heads if the changeset push fails"""
116 116 if self.revs is None:
117 117 # not target to push, all common are relevant
118 118 return self.outgoing.commonheads
119 119 unfi = self.repo.unfiltered()
120 120 # I want cheads = heads(::missingheads and ::commonheads)
121 121 # (missingheads is revs with secret changeset filtered out)
122 122 #
123 123 # This can be expressed as:
124 124 # cheads = ( (missingheads and ::commonheads)
125 125 # + (commonheads and ::missingheads))"
126 126 # )
127 127 #
128 128 # while trying to push we already computed the following:
129 129 # common = (::commonheads)
130 130 # missing = ((commonheads::missingheads) - commonheads)
131 131 #
132 132 # We can pick:
133 133 # * missingheads part of common (::commonheads)
134 134 common = set(self.outgoing.common)
135 135 nm = self.repo.changelog.nodemap
136 136 cheads = [node for node in self.revs if nm[node] in common]
137 137 # and
138 138 # * commonheads parents on missing
139 139 revset = unfi.set('%ln and parents(roots(%ln))',
140 140 self.outgoing.commonheads,
141 141 self.outgoing.missing)
142 142 cheads.extend(c.node() for c in revset)
143 143 return cheads
144 144
145 145 @property
146 146 def commonheads(self):
147 147 """set of all common heads after changeset bundle push"""
148 148 if self.cgresult:
149 149 return self.futureheads
150 150 else:
151 151 return self.fallbackheads
152 152
153 153 # mapping of message used when pushing bookmark
154 154 bookmsgmap = {'update': (_("updating bookmark %s\n"),
155 155 _('updating bookmark %s failed!\n')),
156 156 'export': (_("exporting bookmark %s\n"),
157 157 _('exporting bookmark %s failed!\n')),
158 158 'delete': (_("deleting remote bookmark %s\n"),
159 159 _('deleting remote bookmark %s failed!\n')),
160 160 }
161 161
162 162
163 163 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
164 164 '''Push outgoing changesets (limited by revs) from a local
165 165 repository to remote. Return an integer:
166 166 - None means nothing to push
167 167 - 0 means HTTP error
168 168 - 1 means we pushed and remote head count is unchanged *or*
169 169 we have outgoing changesets but refused to push
170 170 - other values as described by addchangegroup()
171 171 '''
172 172 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
173 173 if pushop.remote.local():
174 174 missing = (set(pushop.repo.requirements)
175 175 - pushop.remote.local().supported)
176 176 if missing:
177 177 msg = _("required features are not"
178 178 " supported in the destination:"
179 179 " %s") % (', '.join(sorted(missing)))
180 180 raise util.Abort(msg)
181 181
182 182 # there are two ways to push to remote repo:
183 183 #
184 184 # addchangegroup assumes local user can lock remote
185 185 # repo (local filesystem, old ssh servers).
186 186 #
187 187 # unbundle assumes local user cannot lock remote repo (new ssh
188 188 # servers, http servers).
189 189
190 190 if not pushop.remote.canpush():
191 191 raise util.Abort(_("destination does not support push"))
192 192 # get local lock as we might write phase data
193 193 locallock = None
194 194 try:
195 195 locallock = pushop.repo.lock()
196 196 pushop.locallocked = True
197 197 except IOError, err:
198 198 pushop.locallocked = False
199 199 if err.errno != errno.EACCES:
200 200 raise
201 201 # source repo cannot be locked.
202 202 # We do not abort the push, but just disable the local phase
203 203 # synchronisation.
204 204 msg = 'cannot lock source repository: %s\n' % err
205 205 pushop.ui.debug(msg)
206 206 try:
207 207 pushop.repo.checkpush(pushop)
208 208 lock = None
209 209 unbundle = pushop.remote.capable('unbundle')
210 210 if not unbundle:
211 211 lock = pushop.remote.lock()
212 212 try:
213 213 _pushdiscovery(pushop)
214 214 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
215 215 False)
216 216 and pushop.remote.capable('bundle2-exp')):
217 217 _pushbundle2(pushop)
218 218 _pushchangeset(pushop)
219 219 _pushsyncphase(pushop)
220 220 _pushobsolete(pushop)
221 221 _pushbookmark(pushop)
222 222 finally:
223 223 if lock is not None:
224 224 lock.release()
225 225 finally:
226 226 if locallock is not None:
227 227 locallock.release()
228 228
229 if pushop.bookmarks:
230 pushop.bkresult = bookmod.pushtoremote(repo.ui, repo, remote,
231 pushop.bookmarks)
232
233 229 return pushop
234 230
235 231 # list of steps to perform discovery before push
236 232 pushdiscoveryorder = []
237 233
238 234 # Mapping between step name and function
239 235 #
240 236 # This exists to help extensions wrap steps if necessary
241 237 pushdiscoverymapping = {}
242 238
243 239 def pushdiscovery(stepname):
244 240 """decorator for function performing discovery before push
245 241
246 242 The function is added to the step -> function mapping and appended to the
247 243 list of steps. Beware that decorated function will be added in order (this
248 244 may matter).
249 245
250 246 You can only use this decorator for a new step, if you want to wrap a step
251 247 from an extension, change the pushdiscovery dictionary directly."""
252 248 def dec(func):
253 249 assert stepname not in pushdiscoverymapping
254 250 pushdiscoverymapping[stepname] = func
255 251 pushdiscoveryorder.append(stepname)
256 252 return func
257 253 return dec
258 254
259 255 def _pushdiscovery(pushop):
260 256 """Run all discovery steps"""
261 257 for stepname in pushdiscoveryorder:
262 258 step = pushdiscoverymapping[stepname]
263 259 step(pushop)
264 260
265 261 @pushdiscovery('changeset')
266 262 def _pushdiscoverychangeset(pushop):
267 263 """discover the changeset that need to be pushed"""
268 264 unfi = pushop.repo.unfiltered()
269 265 fci = discovery.findcommonincoming
270 266 commoninc = fci(unfi, pushop.remote, force=pushop.force)
271 267 common, inc, remoteheads = commoninc
272 268 fco = discovery.findcommonoutgoing
273 269 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
274 270 commoninc=commoninc, force=pushop.force)
275 271 pushop.outgoing = outgoing
276 272 pushop.remoteheads = remoteheads
277 273 pushop.incoming = inc
278 274
279 275 @pushdiscovery('phase')
280 276 def _pushdiscoveryphase(pushop):
281 277 """discover the phase that needs to be pushed
282 278
283 279 (computed for both success and failure case for changesets push)"""
284 280 outgoing = pushop.outgoing
285 281 unfi = pushop.repo.unfiltered()
286 282 remotephases = pushop.remote.listkeys('phases')
287 283 publishing = remotephases.get('publishing', False)
288 284 ana = phases.analyzeremotephases(pushop.repo,
289 285 pushop.fallbackheads,
290 286 remotephases)
291 287 pheads, droots = ana
292 288 extracond = ''
293 289 if not publishing:
294 290 extracond = ' and public()'
295 291 revset = 'heads((%%ln::%%ln) %s)' % extracond
296 292 # Get the list of all revs draft on remote by public here.
297 293 # XXX Beware that revset break if droots is not strictly
298 294 # XXX root we may want to ensure it is but it is costly
299 295 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
300 296 if not outgoing.missing:
301 297 future = fallback
302 298 else:
303 299 # adds changeset we are going to push as draft
304 300 #
305 301 # should not be necessary for pushblishing server, but because of an
306 302 # issue fixed in xxxxx we have to do it anyway.
307 303 fdroots = list(unfi.set('roots(%ln + %ln::)',
308 304 outgoing.missing, droots))
309 305 fdroots = [f.node() for f in fdroots]
310 306 future = list(unfi.set(revset, fdroots, pushop.futureheads))
311 307 pushop.outdatedphases = future
312 308 pushop.fallbackoutdatedphases = fallback
313 309
314 310 @pushdiscovery('obsmarker')
315 311 def _pushdiscoveryobsmarkers(pushop):
316 312 if (obsolete._enabled
317 313 and pushop.repo.obsstore
318 314 and 'obsolete' in pushop.remote.listkeys('namespaces')):
319 315 repo = pushop.repo
320 316 # very naive computation, that can be quite expensive on big repo.
321 317 # However: evolution is currently slow on them anyway.
322 318 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
323 319 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
324 320
325 321 @pushdiscovery('bookmarks')
326 322 def _pushdiscoverybookmarks(pushop):
327 323 ui = pushop.ui
328 324 repo = pushop.repo.unfiltered()
329 325 remote = pushop.remote
330 326 ui.debug("checking for updated bookmarks\n")
331 327 ancestors = ()
332 328 if pushop.revs:
333 329 revnums = map(repo.changelog.rev, pushop.revs)
334 330 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
335 331 remotebookmark = remote.listkeys('bookmarks')
336 332
333 explicit = set(pushop.bookmarks)
334
337 335 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
338 336 addsrc, adddst, advsrc, advdst, diverge, differ, invalid = comp
339 337 for b, scid, dcid in advsrc:
338 if b in explicit:
339 explicit.remove(b)
340 340 if not ancestors or repo[scid].rev() in ancestors:
341 341 pushop.outbookmarks.append((b, dcid, scid))
342 # search added bookmark
343 for b, scid, dcid in addsrc:
344 if b in explicit:
345 explicit.remove(b)
346 pushop.outbookmarks.append((b, '', scid))
347 # search for overwritten bookmark
348 for b, scid, dcid in advdst + diverge + differ:
349 if b in explicit:
350 explicit.remove(b)
351 pushop.outbookmarks.append((b, dcid, scid))
352 # search for bookmark to delete
353 for b, scid, dcid in adddst:
354 if b in explicit:
355 explicit.remove(b)
356 # treat as "deleted locally"
357 pushop.outbookmarks.append((b, dcid, ''))
358
359 if explicit:
360 explicit = sorted(explicit)
361 # we should probably list all of them
362 ui.warn(_('bookmark %s does not exist on the local '
363 'or remote repository!\n') % explicit[0])
364 pushop.bkresult = 2
365
366 pushop.outbookmarks.sort()
342 367
343 368 def _pushcheckoutgoing(pushop):
344 369 outgoing = pushop.outgoing
345 370 unfi = pushop.repo.unfiltered()
346 371 if not outgoing.missing:
347 372 # nothing to push
348 373 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
349 374 return False
350 375 # something to push
351 376 if not pushop.force:
352 377 # if repo.obsstore == False --> no obsolete
353 378 # then, save the iteration
354 379 if unfi.obsstore:
355 380 # this message are here for 80 char limit reason
356 381 mso = _("push includes obsolete changeset: %s!")
357 382 mst = {"unstable": _("push includes unstable changeset: %s!"),
358 383 "bumped": _("push includes bumped changeset: %s!"),
359 384 "divergent": _("push includes divergent changeset: %s!")}
360 385 # If we are to push if there is at least one
361 386 # obsolete or unstable changeset in missing, at
362 387 # least one of the missinghead will be obsolete or
363 388 # unstable. So checking heads only is ok
364 389 for node in outgoing.missingheads:
365 390 ctx = unfi[node]
366 391 if ctx.obsolete():
367 392 raise util.Abort(mso % ctx)
368 393 elif ctx.troubled():
369 394 raise util.Abort(mst[ctx.troubles()[0]] % ctx)
370 395 newbm = pushop.ui.configlist('bookmarks', 'pushing')
371 396 discovery.checkheads(unfi, pushop.remote, outgoing,
372 397 pushop.remoteheads,
373 398 pushop.newbranch,
374 399 bool(pushop.incoming),
375 400 newbm)
376 401 return True
377 402
378 403 # List of names of steps to perform for an outgoing bundle2, order matters.
379 404 b2partsgenorder = []
380 405
381 406 # Mapping between step name and function
382 407 #
383 408 # This exists to help extensions wrap steps if necessary
384 409 b2partsgenmapping = {}
385 410
386 411 def b2partsgenerator(stepname):
387 412 """decorator for function generating bundle2 part
388 413
389 414 The function is added to the step -> function mapping and appended to the
390 415 list of steps. Beware that decorated functions will be added in order
391 416 (this may matter).
392 417
393 418 You can only use this decorator for new steps, if you want to wrap a step
394 419 from an extension, attack the b2partsgenmapping dictionary directly."""
395 420 def dec(func):
396 421 assert stepname not in b2partsgenmapping
397 422 b2partsgenmapping[stepname] = func
398 423 b2partsgenorder.append(stepname)
399 424 return func
400 425 return dec
401 426
402 427 @b2partsgenerator('changeset')
403 428 def _pushb2ctx(pushop, bundler):
404 429 """handle changegroup push through bundle2
405 430
406 431 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
407 432 """
408 433 if 'changesets' in pushop.stepsdone:
409 434 return
410 435 pushop.stepsdone.add('changesets')
411 436 # Send known heads to the server for race detection.
412 437 if not _pushcheckoutgoing(pushop):
413 438 return
414 439 pushop.repo.prepushoutgoinghooks(pushop.repo,
415 440 pushop.remote,
416 441 pushop.outgoing)
417 442 if not pushop.force:
418 443 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
419 444 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', pushop.outgoing)
420 445 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
421 446 def handlereply(op):
422 447 """extract addchangroup returns from server reply"""
423 448 cgreplies = op.records.getreplies(cgpart.id)
424 449 assert len(cgreplies['changegroup']) == 1
425 450 pushop.cgresult = cgreplies['changegroup'][0]['return']
426 451 return handlereply
427 452
428 453 @b2partsgenerator('phase')
429 454 def _pushb2phases(pushop, bundler):
430 455 """handle phase push through bundle2"""
431 456 if 'phases' in pushop.stepsdone:
432 457 return
433 458 b2caps = bundle2.bundle2caps(pushop.remote)
434 459 if not 'b2x:pushkey' in b2caps:
435 460 return
436 461 pushop.stepsdone.add('phases')
437 462 part2node = []
438 463 enc = pushkey.encode
439 464 for newremotehead in pushop.outdatedphases:
440 465 part = bundler.newpart('b2x:pushkey')
441 466 part.addparam('namespace', enc('phases'))
442 467 part.addparam('key', enc(newremotehead.hex()))
443 468 part.addparam('old', enc(str(phases.draft)))
444 469 part.addparam('new', enc(str(phases.public)))
445 470 part2node.append((part.id, newremotehead))
446 471 def handlereply(op):
447 472 for partid, node in part2node:
448 473 partrep = op.records.getreplies(partid)
449 474 results = partrep['pushkey']
450 475 assert len(results) <= 1
451 476 msg = None
452 477 if not results:
453 478 msg = _('server ignored update of %s to public!\n') % node
454 479 elif not int(results[0]['return']):
455 480 msg = _('updating %s to public failed!\n') % node
456 481 if msg is not None:
457 482 pushop.ui.warn(msg)
458 483 return handlereply
459 484
460 485 @b2partsgenerator('obsmarkers')
461 486 def _pushb2obsmarkers(pushop, bundler):
462 487 if 'obsmarkers' in pushop.stepsdone:
463 488 return
464 489 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
465 490 if obsolete.commonversion(remoteversions) is None:
466 491 return
467 492 pushop.stepsdone.add('obsmarkers')
468 493 if pushop.outobsmarkers:
469 494 buildobsmarkerspart(bundler, pushop.outobsmarkers)
470 495
471 496 @b2partsgenerator('bookmarks')
472 497 def _pushb2bookmarks(pushop, bundler):
473 498 """handle phase push through bundle2"""
474 499 if 'bookmarks' in pushop.stepsdone:
475 500 return
476 501 b2caps = bundle2.bundle2caps(pushop.remote)
477 502 if 'b2x:pushkey' not in b2caps:
478 503 return
479 504 pushop.stepsdone.add('bookmarks')
480 505 part2book = []
481 506 enc = pushkey.encode
482 507 for book, old, new in pushop.outbookmarks:
483 508 part = bundler.newpart('b2x:pushkey')
484 509 part.addparam('namespace', enc('bookmarks'))
485 510 part.addparam('key', enc(book))
486 511 part.addparam('old', enc(old))
487 512 part.addparam('new', enc(new))
488 513 action = 'update'
489 514 if not old:
490 515 action = 'export'
491 516 elif not new:
492 517 action = 'delete'
493 518 part2book.append((part.id, book, action))
494 519
495 520
496 521 def handlereply(op):
497 522 ui = pushop.ui
498 523 for partid, book, action in part2book:
499 524 partrep = op.records.getreplies(partid)
500 525 results = partrep['pushkey']
501 526 assert len(results) <= 1
502 527 if not results:
503 528 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
504 529 else:
505 530 ret = int(results[0]['return'])
506 531 if ret:
507 532 ui.status(bookmsgmap[action][0] % book)
508 533 else:
509 534 ui.warn(bookmsgmap[action][1] % book)
510 535 if pushop.bkresult is not None:
511 536 pushop.bkresult = 1
512 537 return handlereply
513 538
514 539
515 540 def _pushbundle2(pushop):
516 541 """push data to the remote using bundle2
517 542
518 543 The only currently supported type of data is changegroup but this will
519 544 evolve in the future."""
520 545 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
521 546 # create reply capability
522 547 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
523 548 bundler.newpart('b2x:replycaps', data=capsblob)
524 549 replyhandlers = []
525 550 for partgenname in b2partsgenorder:
526 551 partgen = b2partsgenmapping[partgenname]
527 552 ret = partgen(pushop, bundler)
528 553 if callable(ret):
529 554 replyhandlers.append(ret)
530 555 # do not push if nothing to push
531 556 if bundler.nbparts <= 1:
532 557 return
533 558 stream = util.chunkbuffer(bundler.getchunks())
534 559 try:
535 560 reply = pushop.remote.unbundle(stream, ['force'], 'push')
536 561 except error.BundleValueError, exc:
537 562 raise util.Abort('missing support for %s' % exc)
538 563 try:
539 564 op = bundle2.processbundle(pushop.repo, reply)
540 565 except error.BundleValueError, exc:
541 566 raise util.Abort('missing support for %s' % exc)
542 567 for rephand in replyhandlers:
543 568 rephand(op)
544 569
545 570 def _pushchangeset(pushop):
546 571 """Make the actual push of changeset bundle to remote repo"""
547 572 if 'changesets' in pushop.stepsdone:
548 573 return
549 574 pushop.stepsdone.add('changesets')
550 575 if not _pushcheckoutgoing(pushop):
551 576 return
552 577 pushop.repo.prepushoutgoinghooks(pushop.repo,
553 578 pushop.remote,
554 579 pushop.outgoing)
555 580 outgoing = pushop.outgoing
556 581 unbundle = pushop.remote.capable('unbundle')
557 582 # TODO: get bundlecaps from remote
558 583 bundlecaps = None
559 584 # create a changegroup from local
560 585 if pushop.revs is None and not (outgoing.excluded
561 586 or pushop.repo.changelog.filteredrevs):
562 587 # push everything,
563 588 # use the fast path, no race possible on push
564 589 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
565 590 cg = changegroup.getsubset(pushop.repo,
566 591 outgoing,
567 592 bundler,
568 593 'push',
569 594 fastpath=True)
570 595 else:
571 596 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
572 597 bundlecaps)
573 598
574 599 # apply changegroup to remote
575 600 if unbundle:
576 601 # local repo finds heads on server, finds out what
577 602 # revs it must push. once revs transferred, if server
578 603 # finds it has different heads (someone else won
579 604 # commit/push race), server aborts.
580 605 if pushop.force:
581 606 remoteheads = ['force']
582 607 else:
583 608 remoteheads = pushop.remoteheads
584 609 # ssh: return remote's addchangegroup()
585 610 # http: return remote's addchangegroup() or 0 for error
586 611 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
587 612 pushop.repo.url())
588 613 else:
589 614 # we return an integer indicating remote head count
590 615 # change
591 616 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
592 617 pushop.repo.url())
593 618
594 619 def _pushsyncphase(pushop):
595 620 """synchronise phase information locally and remotely"""
596 621 cheads = pushop.commonheads
597 622 # even when we don't push, exchanging phase data is useful
598 623 remotephases = pushop.remote.listkeys('phases')
599 624 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
600 625 and remotephases # server supports phases
601 626 and pushop.cgresult is None # nothing was pushed
602 627 and remotephases.get('publishing', False)):
603 628 # When:
604 629 # - this is a subrepo push
605 630 # - and remote support phase
606 631 # - and no changeset was pushed
607 632 # - and remote is publishing
608 633 # We may be in issue 3871 case!
609 634 # We drop the possible phase synchronisation done by
610 635 # courtesy to publish changesets possibly locally draft
611 636 # on the remote.
612 637 remotephases = {'publishing': 'True'}
613 638 if not remotephases: # old server or public only reply from non-publishing
614 639 _localphasemove(pushop, cheads)
615 640 # don't push any phase data as there is nothing to push
616 641 else:
617 642 ana = phases.analyzeremotephases(pushop.repo, cheads,
618 643 remotephases)
619 644 pheads, droots = ana
620 645 ### Apply remote phase on local
621 646 if remotephases.get('publishing', False):
622 647 _localphasemove(pushop, cheads)
623 648 else: # publish = False
624 649 _localphasemove(pushop, pheads)
625 650 _localphasemove(pushop, cheads, phases.draft)
626 651 ### Apply local phase on remote
627 652
628 653 if pushop.cgresult:
629 654 if 'phases' in pushop.stepsdone:
630 655 # phases already pushed though bundle2
631 656 return
632 657 outdated = pushop.outdatedphases
633 658 else:
634 659 outdated = pushop.fallbackoutdatedphases
635 660
636 661 pushop.stepsdone.add('phases')
637 662
638 663 # filter heads already turned public by the push
639 664 outdated = [c for c in outdated if c.node() not in pheads]
640 665 b2caps = bundle2.bundle2caps(pushop.remote)
641 666 if 'b2x:pushkey' in b2caps:
642 667 # server supports bundle2, let's do a batched push through it
643 668 #
644 669 # This will eventually be unified with the changesets bundle2 push
645 670 bundler = bundle2.bundle20(pushop.ui, b2caps)
646 671 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
647 672 bundler.newpart('b2x:replycaps', data=capsblob)
648 673 part2node = []
649 674 enc = pushkey.encode
650 675 for newremotehead in outdated:
651 676 part = bundler.newpart('b2x:pushkey')
652 677 part.addparam('namespace', enc('phases'))
653 678 part.addparam('key', enc(newremotehead.hex()))
654 679 part.addparam('old', enc(str(phases.draft)))
655 680 part.addparam('new', enc(str(phases.public)))
656 681 part2node.append((part.id, newremotehead))
657 682 stream = util.chunkbuffer(bundler.getchunks())
658 683 try:
659 684 reply = pushop.remote.unbundle(stream, ['force'], 'push')
660 685 op = bundle2.processbundle(pushop.repo, reply)
661 686 except error.BundleValueError, exc:
662 687 raise util.Abort('missing support for %s' % exc)
663 688 for partid, node in part2node:
664 689 partrep = op.records.getreplies(partid)
665 690 results = partrep['pushkey']
666 691 assert len(results) <= 1
667 692 msg = None
668 693 if not results:
669 694 msg = _('server ignored update of %s to public!\n') % node
670 695 elif not int(results[0]['return']):
671 696 msg = _('updating %s to public failed!\n') % node
672 697 if msg is not None:
673 698 pushop.ui.warn(msg)
674 699
675 700 else:
676 701 # fallback to independant pushkey command
677 702 for newremotehead in outdated:
678 703 r = pushop.remote.pushkey('phases',
679 704 newremotehead.hex(),
680 705 str(phases.draft),
681 706 str(phases.public))
682 707 if not r:
683 708 pushop.ui.warn(_('updating %s to public failed!\n')
684 709 % newremotehead)
685 710
686 711 def _localphasemove(pushop, nodes, phase=phases.public):
687 712 """move <nodes> to <phase> in the local source repo"""
688 713 if pushop.locallocked:
689 714 tr = pushop.repo.transaction('push-phase-sync')
690 715 try:
691 716 phases.advanceboundary(pushop.repo, tr, phase, nodes)
692 717 tr.close()
693 718 finally:
694 719 tr.release()
695 720 else:
696 721 # repo is not locked, do not change any phases!
697 722 # Informs the user that phases should have been moved when
698 723 # applicable.
699 724 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
700 725 phasestr = phases.phasenames[phase]
701 726 if actualmoves:
702 727 pushop.ui.status(_('cannot lock source repo, skipping '
703 728 'local %s phase update\n') % phasestr)
704 729
705 730 def _pushobsolete(pushop):
706 731 """utility function to push obsolete markers to a remote"""
707 732 if 'obsmarkers' in pushop.stepsdone:
708 733 return
709 734 pushop.ui.debug('try to push obsolete markers to remote\n')
710 735 repo = pushop.repo
711 736 remote = pushop.remote
712 737 pushop.stepsdone.add('obsmarkers')
713 738 if pushop.outobsmarkers:
714 739 rslts = []
715 740 remotedata = obsolete._pushkeyescape(pushop.outobsmarkers)
716 741 for key in sorted(remotedata, reverse=True):
717 742 # reverse sort to ensure we end with dump0
718 743 data = remotedata[key]
719 744 rslts.append(remote.pushkey('obsolete', key, '', data))
720 745 if [r for r in rslts if not r]:
721 746 msg = _('failed to push some obsolete markers!\n')
722 747 repo.ui.warn(msg)
723 748
724 749 def _pushbookmark(pushop):
725 750 """Update bookmark position on remote"""
726 751 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
727 752 return
728 753 pushop.stepsdone.add('bookmarks')
729 754 ui = pushop.ui
730 755 remote = pushop.remote
731 756
732 757 for b, old, new in pushop.outbookmarks:
733 758 action = 'update'
734 759 if not old:
735 760 action = 'export'
736 761 elif not new:
737 762 action = 'delete'
738 763 if remote.pushkey('bookmarks', b, old, new):
739 764 ui.status(bookmsgmap[action][0] % b)
740 765 else:
741 766 ui.warn(bookmsgmap[action][1] % b)
742 767 # discovery can have set the value form invalid entry
743 768 if pushop.bkresult is not None:
744 769 pushop.bkresult = 1
745 770
746 771 class pulloperation(object):
747 772 """A object that represent a single pull operation
748 773
749 774 It purpose is to carry push related state and very common operation.
750 775
751 776 A new should be created at the beginning of each pull and discarded
752 777 afterward.
753 778 """
754 779
755 780 def __init__(self, repo, remote, heads=None, force=False):
756 781 # repo we pull into
757 782 self.repo = repo
758 783 # repo we pull from
759 784 self.remote = remote
760 785 # revision we try to pull (None is "all")
761 786 self.heads = heads
762 787 # do we force pull?
763 788 self.force = force
764 789 # the name the pull transaction
765 790 self._trname = 'pull\n' + util.hidepassword(remote.url())
766 791 # hold the transaction once created
767 792 self._tr = None
768 793 # set of common changeset between local and remote before pull
769 794 self.common = None
770 795 # set of pulled head
771 796 self.rheads = None
772 797 # list of missing changeset to fetch remotely
773 798 self.fetch = None
774 799 # result of changegroup pulling (used as return code by pull)
775 800 self.cgresult = None
776 801 # list of step remaining todo (related to future bundle2 usage)
777 802 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
778 803
779 804 @util.propertycache
780 805 def pulledsubset(self):
781 806 """heads of the set of changeset target by the pull"""
782 807 # compute target subset
783 808 if self.heads is None:
784 809 # We pulled every thing possible
785 810 # sync on everything common
786 811 c = set(self.common)
787 812 ret = list(self.common)
788 813 for n in self.rheads:
789 814 if n not in c:
790 815 ret.append(n)
791 816 return ret
792 817 else:
793 818 # We pulled a specific subset
794 819 # sync on this subset
795 820 return self.heads
796 821
797 822 def gettransaction(self):
798 823 """get appropriate pull transaction, creating it if needed"""
799 824 if self._tr is None:
800 825 self._tr = self.repo.transaction(self._trname)
801 826 return self._tr
802 827
803 828 def closetransaction(self):
804 829 """close transaction if created"""
805 830 if self._tr is not None:
806 831 self._tr.close()
807 832
808 833 def releasetransaction(self):
809 834 """release transaction if created"""
810 835 if self._tr is not None:
811 836 self._tr.release()
812 837
813 838 def pull(repo, remote, heads=None, force=False, bookmarks=()):
814 839 pullop = pulloperation(repo, remote, heads, force)
815 840 if pullop.remote.local():
816 841 missing = set(pullop.remote.requirements) - pullop.repo.supported
817 842 if missing:
818 843 msg = _("required features are not"
819 844 " supported in the destination:"
820 845 " %s") % (', '.join(sorted(missing)))
821 846 raise util.Abort(msg)
822 847
823 848 remotebookmarks = remote.listkeys('bookmarks')
824 849 lock = pullop.repo.lock()
825 850 try:
826 851 _pulldiscovery(pullop)
827 852 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
828 853 and pullop.remote.capable('bundle2-exp')):
829 854 _pullbundle2(pullop)
830 855 if 'changegroup' in pullop.todosteps:
831 856 _pullchangeset(pullop)
832 857 if 'phases' in pullop.todosteps:
833 858 _pullphase(pullop)
834 859 if 'obsmarkers' in pullop.todosteps:
835 860 _pullobsolete(pullop)
836 861 pullop.closetransaction()
837 862 finally:
838 863 pullop.releasetransaction()
839 864 lock.release()
840 865 bookmod.updatefromremote(repo.ui, repo, remotebookmarks, remote.url())
841 866 # update specified bookmarks
842 867 if bookmarks:
843 868 marks = repo._bookmarks
844 869 writer = repo.ui.status
845 870 if repo.ui.configbool('ui', 'quietbookmarkmove', False):
846 871 writer = repo.ui.debug
847 872 for b in bookmarks:
848 873 # explicit pull overrides local bookmark if any
849 874 writer(_("importing bookmark %s\n") % b)
850 875 marks[b] = repo[remotebookmarks[b]].node()
851 876 marks.write()
852 877
853 878 return pullop.cgresult
854 879
855 880 def _pulldiscovery(pullop):
856 881 """discovery phase for the pull
857 882
858 883 Current handle changeset discovery only, will change handle all discovery
859 884 at some point."""
860 885 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
861 886 pullop.remote,
862 887 heads=pullop.heads,
863 888 force=pullop.force)
864 889 pullop.common, pullop.fetch, pullop.rheads = tmp
865 890
866 891 def _pullbundle2(pullop):
867 892 """pull data using bundle2
868 893
869 894 For now, the only supported data are changegroup."""
870 895 remotecaps = bundle2.bundle2caps(pullop.remote)
871 896 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
872 897 # pulling changegroup
873 898 pullop.todosteps.remove('changegroup')
874 899
875 900 kwargs['common'] = pullop.common
876 901 kwargs['heads'] = pullop.heads or pullop.rheads
877 902 kwargs['cg'] = pullop.fetch
878 903 if 'b2x:listkeys' in remotecaps:
879 904 kwargs['listkeys'] = ['phase']
880 905 if not pullop.fetch:
881 906 pullop.repo.ui.status(_("no changes found\n"))
882 907 pullop.cgresult = 0
883 908 else:
884 909 if pullop.heads is None and list(pullop.common) == [nullid]:
885 910 pullop.repo.ui.status(_("requesting all changes\n"))
886 911 if obsolete._enabled:
887 912 remoteversions = bundle2.obsmarkersversion(remotecaps)
888 913 if obsolete.commonversion(remoteversions) is not None:
889 914 kwargs['obsmarkers'] = True
890 915 pullop.todosteps.remove('obsmarkers')
891 916 _pullbundle2extraprepare(pullop, kwargs)
892 917 if kwargs.keys() == ['format']:
893 918 return # nothing to pull
894 919 bundle = pullop.remote.getbundle('pull', **kwargs)
895 920 try:
896 921 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
897 922 except error.BundleValueError, exc:
898 923 raise util.Abort('missing support for %s' % exc)
899 924
900 925 if pullop.fetch:
901 926 assert len(op.records['changegroup']) == 1
902 927 pullop.cgresult = op.records['changegroup'][0]['return']
903 928
904 929 # processing phases change
905 930 for namespace, value in op.records['listkeys']:
906 931 if namespace == 'phases':
907 932 _pullapplyphases(pullop, value)
908 933
909 934 def _pullbundle2extraprepare(pullop, kwargs):
910 935 """hook function so that extensions can extend the getbundle call"""
911 936 pass
912 937
913 938 def _pullchangeset(pullop):
914 939 """pull changeset from unbundle into the local repo"""
915 940 # We delay the open of the transaction as late as possible so we
916 941 # don't open transaction for nothing or you break future useful
917 942 # rollback call
918 943 pullop.todosteps.remove('changegroup')
919 944 if not pullop.fetch:
920 945 pullop.repo.ui.status(_("no changes found\n"))
921 946 pullop.cgresult = 0
922 947 return
923 948 pullop.gettransaction()
924 949 if pullop.heads is None and list(pullop.common) == [nullid]:
925 950 pullop.repo.ui.status(_("requesting all changes\n"))
926 951 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
927 952 # issue1320, avoid a race if remote changed after discovery
928 953 pullop.heads = pullop.rheads
929 954
930 955 if pullop.remote.capable('getbundle'):
931 956 # TODO: get bundlecaps from remote
932 957 cg = pullop.remote.getbundle('pull', common=pullop.common,
933 958 heads=pullop.heads or pullop.rheads)
934 959 elif pullop.heads is None:
935 960 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
936 961 elif not pullop.remote.capable('changegroupsubset'):
937 962 raise util.Abort(_("partial pull cannot be done because "
938 963 "other repository doesn't support "
939 964 "changegroupsubset."))
940 965 else:
941 966 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
942 967 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
943 968 pullop.remote.url())
944 969
945 970 def _pullphase(pullop):
946 971 # Get remote phases data from remote
947 972 remotephases = pullop.remote.listkeys('phases')
948 973 _pullapplyphases(pullop, remotephases)
949 974
950 975 def _pullapplyphases(pullop, remotephases):
951 976 """apply phase movement from observed remote state"""
952 977 pullop.todosteps.remove('phases')
953 978 publishing = bool(remotephases.get('publishing', False))
954 979 if remotephases and not publishing:
955 980 # remote is new and unpublishing
956 981 pheads, _dr = phases.analyzeremotephases(pullop.repo,
957 982 pullop.pulledsubset,
958 983 remotephases)
959 984 dheads = pullop.pulledsubset
960 985 else:
961 986 # Remote is old or publishing all common changesets
962 987 # should be seen as public
963 988 pheads = pullop.pulledsubset
964 989 dheads = []
965 990 unfi = pullop.repo.unfiltered()
966 991 phase = unfi._phasecache.phase
967 992 rev = unfi.changelog.nodemap.get
968 993 public = phases.public
969 994 draft = phases.draft
970 995
971 996 # exclude changesets already public locally and update the others
972 997 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
973 998 if pheads:
974 999 tr = pullop.gettransaction()
975 1000 phases.advanceboundary(pullop.repo, tr, public, pheads)
976 1001
977 1002 # exclude changesets already draft locally and update the others
978 1003 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
979 1004 if dheads:
980 1005 tr = pullop.gettransaction()
981 1006 phases.advanceboundary(pullop.repo, tr, draft, dheads)
982 1007
983 1008 def _pullobsolete(pullop):
984 1009 """utility function to pull obsolete markers from a remote
985 1010
986 1011 The `gettransaction` is function that return the pull transaction, creating
987 1012 one if necessary. We return the transaction to inform the calling code that
988 1013 a new transaction have been created (when applicable).
989 1014
990 1015 Exists mostly to allow overriding for experimentation purpose"""
991 1016 pullop.todosteps.remove('obsmarkers')
992 1017 tr = None
993 1018 if obsolete._enabled:
994 1019 pullop.repo.ui.debug('fetching remote obsolete markers\n')
995 1020 remoteobs = pullop.remote.listkeys('obsolete')
996 1021 if 'dump0' in remoteobs:
997 1022 tr = pullop.gettransaction()
998 1023 for key in sorted(remoteobs, reverse=True):
999 1024 if key.startswith('dump'):
1000 1025 data = base85.b85decode(remoteobs[key])
1001 1026 pullop.repo.obsstore.mergemarkers(tr, data)
1002 1027 pullop.repo.invalidatevolatilesets()
1003 1028 return tr
1004 1029
1005 1030 def caps20to10(repo):
1006 1031 """return a set with appropriate options to use bundle20 during getbundle"""
1007 1032 caps = set(['HG2X'])
1008 1033 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1009 1034 caps.add('bundle2=' + urllib.quote(capsblob))
1010 1035 return caps
1011 1036
1012 1037 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1013 1038 getbundle2partsorder = []
1014 1039
1015 1040 # Mapping between step name and function
1016 1041 #
1017 1042 # This exists to help extensions wrap steps if necessary
1018 1043 getbundle2partsmapping = {}
1019 1044
1020 1045 def getbundle2partsgenerator(stepname):
1021 1046 """decorator for function generating bundle2 part for getbundle
1022 1047
1023 1048 The function is added to the step -> function mapping and appended to the
1024 1049 list of steps. Beware that decorated functions will be added in order
1025 1050 (this may matter).
1026 1051
1027 1052 You can only use this decorator for new steps, if you want to wrap a step
1028 1053 from an extension, attack the getbundle2partsmapping dictionary directly."""
1029 1054 def dec(func):
1030 1055 assert stepname not in getbundle2partsmapping
1031 1056 getbundle2partsmapping[stepname] = func
1032 1057 getbundle2partsorder.append(stepname)
1033 1058 return func
1034 1059 return dec
1035 1060
1036 1061 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1037 1062 **kwargs):
1038 1063 """return a full bundle (with potentially multiple kind of parts)
1039 1064
1040 1065 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
1041 1066 passed. For now, the bundle can contain only changegroup, but this will
1042 1067 changes when more part type will be available for bundle2.
1043 1068
1044 1069 This is different from changegroup.getchangegroup that only returns an HG10
1045 1070 changegroup bundle. They may eventually get reunited in the future when we
1046 1071 have a clearer idea of the API we what to query different data.
1047 1072
1048 1073 The implementation is at a very early stage and will get massive rework
1049 1074 when the API of bundle is refined.
1050 1075 """
1051 1076 # bundle10 case
1052 1077 if bundlecaps is None or 'HG2X' not in bundlecaps:
1053 1078 if bundlecaps and not kwargs.get('cg', True):
1054 1079 raise ValueError(_('request for bundle10 must include changegroup'))
1055 1080
1056 1081 if kwargs:
1057 1082 raise ValueError(_('unsupported getbundle arguments: %s')
1058 1083 % ', '.join(sorted(kwargs.keys())))
1059 1084 return changegroup.getchangegroup(repo, source, heads=heads,
1060 1085 common=common, bundlecaps=bundlecaps)
1061 1086
1062 1087 # bundle20 case
1063 1088 b2caps = {}
1064 1089 for bcaps in bundlecaps:
1065 1090 if bcaps.startswith('bundle2='):
1066 1091 blob = urllib.unquote(bcaps[len('bundle2='):])
1067 1092 b2caps.update(bundle2.decodecaps(blob))
1068 1093 bundler = bundle2.bundle20(repo.ui, b2caps)
1069 1094
1070 1095 for name in getbundle2partsorder:
1071 1096 func = getbundle2partsmapping[name]
1072 1097 kwargs['heads'] = heads
1073 1098 kwargs['common'] = common
1074 1099 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1075 1100 **kwargs)
1076 1101
1077 1102 return util.chunkbuffer(bundler.getchunks())
1078 1103
1079 1104 @getbundle2partsgenerator('changegroup')
1080 1105 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1081 1106 b2caps=None, heads=None, common=None, **kwargs):
1082 1107 """add a changegroup part to the requested bundle"""
1083 1108 cg = None
1084 1109 if kwargs.get('cg', True):
1085 1110 # build changegroup bundle here.
1086 1111 cg = changegroup.getchangegroup(repo, source, heads=heads,
1087 1112 common=common, bundlecaps=bundlecaps)
1088 1113
1089 1114 if cg:
1090 1115 bundler.newpart('b2x:changegroup', data=cg.getchunks())
1091 1116
1092 1117 @getbundle2partsgenerator('listkeys')
1093 1118 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1094 1119 b2caps=None, **kwargs):
1095 1120 """add parts containing listkeys namespaces to the requested bundle"""
1096 1121 listkeys = kwargs.get('listkeys', ())
1097 1122 for namespace in listkeys:
1098 1123 part = bundler.newpart('b2x:listkeys')
1099 1124 part.addparam('namespace', namespace)
1100 1125 keys = repo.listkeys(namespace).items()
1101 1126 part.data = pushkey.encodekeys(keys)
1102 1127
1103 1128 @getbundle2partsgenerator('obsmarkers')
1104 1129 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1105 1130 b2caps=None, heads=None, **kwargs):
1106 1131 """add an obsolescence markers part to the requested bundle"""
1107 1132 if kwargs.get('obsmarkers', False):
1108 1133 if heads is None:
1109 1134 heads = repo.heads()
1110 1135 subset = [c.node() for c in repo.set('::%ln', heads)]
1111 1136 markers = repo.obsstore.relevantmarkers(subset)
1112 1137 buildobsmarkerspart(bundler, markers)
1113 1138
1114 1139 @getbundle2partsgenerator('extra')
1115 1140 def _getbundleextrapart(bundler, repo, source, bundlecaps=None,
1116 1141 b2caps=None, **kwargs):
1117 1142 """hook function to let extensions add parts to the requested bundle"""
1118 1143 pass
1119 1144
1120 1145 def check_heads(repo, their_heads, context):
1121 1146 """check if the heads of a repo have been modified
1122 1147
1123 1148 Used by peer for unbundling.
1124 1149 """
1125 1150 heads = repo.heads()
1126 1151 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1127 1152 if not (their_heads == ['force'] or their_heads == heads or
1128 1153 their_heads == ['hashed', heads_hash]):
1129 1154 # someone else committed/pushed/unbundled while we
1130 1155 # were transferring data
1131 1156 raise error.PushRaced('repository changed while %s - '
1132 1157 'please try again' % context)
1133 1158
1134 1159 def unbundle(repo, cg, heads, source, url):
1135 1160 """Apply a bundle to a repo.
1136 1161
1137 1162 this function makes sure the repo is locked during the application and have
1138 1163 mechanism to check that no push race occurred between the creation of the
1139 1164 bundle and its application.
1140 1165
1141 1166 If the push was raced as PushRaced exception is raised."""
1142 1167 r = 0
1143 1168 # need a transaction when processing a bundle2 stream
1144 1169 tr = None
1145 1170 lock = repo.lock()
1146 1171 try:
1147 1172 check_heads(repo, heads, 'uploading changes')
1148 1173 # push can proceed
1149 1174 if util.safehasattr(cg, 'params'):
1150 1175 try:
1151 1176 tr = repo.transaction('unbundle')
1152 1177 tr.hookargs['bundle2-exp'] = '1'
1153 1178 r = bundle2.processbundle(repo, cg, lambda: tr).reply
1154 1179 cl = repo.unfiltered().changelog
1155 1180 p = cl.writepending() and repo.root or ""
1156 1181 repo.hook('b2x-pretransactionclose', throw=True, source=source,
1157 1182 url=url, pending=p, **tr.hookargs)
1158 1183 tr.close()
1159 1184 repo.hook('b2x-transactionclose', source=source, url=url,
1160 1185 **tr.hookargs)
1161 1186 except Exception, exc:
1162 1187 exc.duringunbundle2 = True
1163 1188 raise
1164 1189 else:
1165 1190 r = changegroup.addchangegroup(repo, cg, source, url)
1166 1191 finally:
1167 1192 if tr is not None:
1168 1193 tr.release()
1169 1194 lock.release()
1170 1195 return r
@@ -1,446 +1,446
1 1 #require serve
2 2
3 3 $ cat << EOF >> $HGRCPATH
4 4 > [ui]
5 5 > logtemplate={rev}:{node|short} {desc|firstline}
6 6 > [phases]
7 7 > publish=False
8 8 > [extensions]
9 9 > EOF
10 10 $ cat > obs.py << EOF
11 11 > import mercurial.obsolete
12 12 > mercurial.obsolete._enabled = True
13 13 > EOF
14 14 $ echo "obs=${TESTTMP}/obs.py" >> $HGRCPATH
15 15
16 16 initialize
17 17
18 18 $ hg init a
19 19 $ cd a
20 20 $ echo 'test' > test
21 21 $ hg commit -Am'test'
22 22 adding test
23 23
24 24 set bookmarks
25 25
26 26 $ hg bookmark X
27 27 $ hg bookmark Y
28 28 $ hg bookmark Z
29 29
30 30 import bookmark by name
31 31
32 32 $ hg init ../b
33 33 $ cd ../b
34 34 $ hg book Y
35 35 $ hg book
36 36 * Y -1:000000000000
37 37 $ hg pull ../a
38 38 pulling from ../a
39 39 requesting all changes
40 40 adding changesets
41 41 adding manifests
42 42 adding file changes
43 43 added 1 changesets with 1 changes to 1 files
44 44 adding remote bookmark X
45 45 updating bookmark Y
46 46 adding remote bookmark Z
47 47 (run 'hg update' to get a working copy)
48 48 $ hg bookmarks
49 49 X 0:4e3505fd9583
50 50 * Y 0:4e3505fd9583
51 51 Z 0:4e3505fd9583
52 52 $ hg debugpushkey ../a namespaces
53 53 bookmarks
54 54 namespaces
55 55 obsolete
56 56 phases
57 57 $ hg debugpushkey ../a bookmarks
58 58 X 4e3505fd95835d721066b76e75dbb8cc554d7f77
59 59 Y 4e3505fd95835d721066b76e75dbb8cc554d7f77
60 60 Z 4e3505fd95835d721066b76e75dbb8cc554d7f77
61 61 $ hg pull -B X ../a
62 62 pulling from ../a
63 63 no changes found
64 64 importing bookmark X
65 65 $ hg bookmark
66 66 X 0:4e3505fd9583
67 67 * Y 0:4e3505fd9583
68 68 Z 0:4e3505fd9583
69 69
70 70 export bookmark by name
71 71
72 72 $ hg bookmark W
73 73 $ hg bookmark foo
74 74 $ hg bookmark foobar
75 75 $ hg push -B W ../a
76 76 pushing to ../a
77 77 searching for changes
78 78 no changes found
79 79 exporting bookmark W
80 80 [1]
81 81 $ hg -R ../a bookmarks
82 82 W -1:000000000000
83 83 X 0:4e3505fd9583
84 84 Y 0:4e3505fd9583
85 85 * Z 0:4e3505fd9583
86 86
87 87 delete a remote bookmark
88 88
89 89 $ hg book -d W
90 90 $ hg push -B W ../a
91 91 pushing to ../a
92 92 searching for changes
93 93 no changes found
94 94 deleting remote bookmark W
95 95 [1]
96 96
97 97 push/pull name that doesn't exist
98 98
99 99 $ hg push -B badname ../a
100 100 pushing to ../a
101 101 searching for changes
102 bookmark badname does not exist on the local or remote repository!
102 103 no changes found
103 bookmark badname does not exist on the local or remote repository!
104 104 [2]
105 105 $ hg pull -B anotherbadname ../a
106 106 pulling from ../a
107 107 abort: remote bookmark anotherbadname not found!
108 108 [255]
109 109
110 110 divergent bookmarks
111 111
112 112 $ cd ../a
113 113 $ echo c1 > f1
114 114 $ hg ci -Am1
115 115 adding f1
116 116 $ hg book -f @
117 117 $ hg book -f X
118 118 $ hg book
119 119 @ 1:0d2164f0ce0d
120 120 * X 1:0d2164f0ce0d
121 121 Y 0:4e3505fd9583
122 122 Z 1:0d2164f0ce0d
123 123
124 124 $ cd ../b
125 125 $ hg up
126 126 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
127 127 updating bookmark foobar
128 128 $ echo c2 > f2
129 129 $ hg ci -Am2
130 130 adding f2
131 131 $ hg book -if @
132 132 $ hg book -if X
133 133 $ hg book
134 134 @ 1:9b140be10808
135 135 X 1:9b140be10808
136 136 Y 0:4e3505fd9583
137 137 Z 0:4e3505fd9583
138 138 foo -1:000000000000
139 139 * foobar 1:9b140be10808
140 140
141 141 $ hg pull --config paths.foo=../a foo
142 142 pulling from $TESTTMP/a (glob)
143 143 searching for changes
144 144 adding changesets
145 145 adding manifests
146 146 adding file changes
147 147 added 1 changesets with 1 changes to 1 files (+1 heads)
148 148 divergent bookmark @ stored as @foo
149 149 divergent bookmark X stored as X@foo
150 150 updating bookmark Z
151 151 (run 'hg heads' to see heads, 'hg merge' to merge)
152 152 $ hg book
153 153 @ 1:9b140be10808
154 154 @foo 2:0d2164f0ce0d
155 155 X 1:9b140be10808
156 156 X@foo 2:0d2164f0ce0d
157 157 Y 0:4e3505fd9583
158 158 Z 2:0d2164f0ce0d
159 159 foo -1:000000000000
160 160 * foobar 1:9b140be10808
161 161 $ hg push -f ../a
162 162 pushing to ../a
163 163 searching for changes
164 164 adding changesets
165 165 adding manifests
166 166 adding file changes
167 167 added 1 changesets with 1 changes to 1 files (+1 heads)
168 168 $ hg -R ../a book
169 169 @ 1:0d2164f0ce0d
170 170 * X 1:0d2164f0ce0d
171 171 Y 0:4e3505fd9583
172 172 Z 1:0d2164f0ce0d
173 173
174 174 revsets should not ignore divergent bookmarks
175 175
176 176 $ hg bookmark -fr 1 Z
177 177 $ hg log -r 'bookmark()' --template '{rev}:{node|short} {bookmarks}\n'
178 178 0:4e3505fd9583 Y
179 179 1:9b140be10808 @ X Z foobar
180 180 2:0d2164f0ce0d @foo X@foo
181 181 $ hg log -r 'bookmark("X@foo")' --template '{rev}:{node|short} {bookmarks}\n'
182 182 2:0d2164f0ce0d @foo X@foo
183 183 $ hg log -r 'bookmark("re:X@foo")' --template '{rev}:{node|short} {bookmarks}\n'
184 184 2:0d2164f0ce0d @foo X@foo
185 185
186 186 update a remote bookmark from a non-head to a head
187 187
188 188 $ hg up -q Y
189 189 $ echo c3 > f2
190 190 $ hg ci -Am3
191 191 adding f2
192 192 created new head
193 193 $ hg push ../a
194 194 pushing to ../a
195 195 searching for changes
196 196 adding changesets
197 197 adding manifests
198 198 adding file changes
199 199 added 1 changesets with 1 changes to 1 files (+1 heads)
200 200 updating bookmark Y
201 201 $ hg -R ../a book
202 202 @ 1:0d2164f0ce0d
203 203 * X 1:0d2164f0ce0d
204 204 Y 3:f6fc62dde3c0
205 205 Z 1:0d2164f0ce0d
206 206
207 207 update a bookmark in the middle of a client pulling changes
208 208
209 209 $ cd ..
210 210 $ hg clone -q a pull-race
211 211 $ hg clone -q pull-race pull-race2
212 212 $ cd pull-race
213 213 $ hg up -q Y
214 214 $ echo c4 > f2
215 215 $ hg ci -Am4
216 216 $ echo c5 > f3
217 217 $ cat <<EOF > .hg/hgrc
218 218 > [hooks]
219 219 > outgoing.makecommit = hg ci -Am5; echo committed in pull-race
220 220 > EOF
221 221 $ cd ../pull-race2
222 222 $ hg pull
223 223 pulling from $TESTTMP/pull-race (glob)
224 224 searching for changes
225 225 adding changesets
226 226 adding f3
227 227 committed in pull-race
228 228 adding manifests
229 229 adding file changes
230 230 added 1 changesets with 1 changes to 1 files
231 231 updating bookmark Y
232 232 (run 'hg update' to get a working copy)
233 233 $ hg book
234 234 * @ 1:0d2164f0ce0d
235 235 X 1:0d2164f0ce0d
236 236 Y 4:b0a5eff05604
237 237 Z 1:0d2164f0ce0d
238 238 $ cd ../b
239 239
240 240 diverging a remote bookmark fails
241 241
242 242 $ hg up -q 4e3505fd9583
243 243 $ echo c4 > f2
244 244 $ hg ci -Am4
245 245 adding f2
246 246 created new head
247 247 $ echo c5 > f2
248 248 $ hg ci -Am5
249 249 $ hg log -G
250 250 @ 5:c922c0139ca0 5
251 251 |
252 252 o 4:4efff6d98829 4
253 253 |
254 254 | o 3:f6fc62dde3c0 3
255 255 |/
256 256 | o 2:0d2164f0ce0d 1
257 257 |/
258 258 | o 1:9b140be10808 2
259 259 |/
260 260 o 0:4e3505fd9583 test
261 261
262 262
263 263 $ hg book -f Y
264 264
265 265 $ cat <<EOF > ../a/.hg/hgrc
266 266 > [web]
267 267 > push_ssl = false
268 268 > allow_push = *
269 269 > EOF
270 270
271 271 $ hg -R ../a serve -p $HGPORT2 -d --pid-file=../hg2.pid
272 272 $ cat ../hg2.pid >> $DAEMON_PIDS
273 273
274 274 $ hg push http://localhost:$HGPORT2/
275 275 pushing to http://localhost:$HGPORT2/
276 276 searching for changes
277 277 abort: push creates new remote head c922c0139ca0 with bookmark 'Y'!
278 278 (merge or see "hg help push" for details about pushing new heads)
279 279 [255]
280 280 $ hg -R ../a book
281 281 @ 1:0d2164f0ce0d
282 282 * X 1:0d2164f0ce0d
283 283 Y 3:f6fc62dde3c0
284 284 Z 1:0d2164f0ce0d
285 285
286 286
287 287 Unrelated marker does not alter the decision
288 288
289 289 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
290 290 $ hg push http://localhost:$HGPORT2/
291 291 pushing to http://localhost:$HGPORT2/
292 292 searching for changes
293 293 abort: push creates new remote head c922c0139ca0 with bookmark 'Y'!
294 294 (merge or see "hg help push" for details about pushing new heads)
295 295 [255]
296 296 $ hg -R ../a book
297 297 @ 1:0d2164f0ce0d
298 298 * X 1:0d2164f0ce0d
299 299 Y 3:f6fc62dde3c0
300 300 Z 1:0d2164f0ce0d
301 301
302 302 Update to a successor works
303 303
304 304 $ hg id --debug -r 3
305 305 f6fc62dde3c0771e29704af56ba4d8af77abcc2f
306 306 $ hg id --debug -r 4
307 307 4efff6d98829d9c824c621afd6e3f01865f5439f
308 308 $ hg id --debug -r 5
309 309 c922c0139ca03858f655e4a2af4dd02796a63969 tip Y
310 310 $ hg debugobsolete f6fc62dde3c0771e29704af56ba4d8af77abcc2f cccccccccccccccccccccccccccccccccccccccc
311 311 $ hg debugobsolete cccccccccccccccccccccccccccccccccccccccc 4efff6d98829d9c824c621afd6e3f01865f5439f
312 312 $ hg push http://localhost:$HGPORT2/
313 313 pushing to http://localhost:$HGPORT2/
314 314 searching for changes
315 315 remote: adding changesets
316 316 remote: adding manifests
317 317 remote: adding file changes
318 318 remote: added 2 changesets with 2 changes to 1 files (+1 heads)
319 319 updating bookmark Y
320 320 $ hg -R ../a book
321 321 @ 1:0d2164f0ce0d
322 322 * X 1:0d2164f0ce0d
323 323 Y 5:c922c0139ca0
324 324 Z 1:0d2164f0ce0d
325 325
326 326 hgweb
327 327
328 328 $ cat <<EOF > .hg/hgrc
329 329 > [web]
330 330 > push_ssl = false
331 331 > allow_push = *
332 332 > EOF
333 333
334 334 $ hg serve -p $HGPORT -d --pid-file=../hg.pid -E errors.log
335 335 $ cat ../hg.pid >> $DAEMON_PIDS
336 336 $ cd ../a
337 337
338 338 $ hg debugpushkey http://localhost:$HGPORT/ namespaces
339 339 bookmarks
340 340 namespaces
341 341 obsolete
342 342 phases
343 343 $ hg debugpushkey http://localhost:$HGPORT/ bookmarks
344 344 @ 9b140be1080824d768c5a4691a564088eede71f9
345 345 X 9b140be1080824d768c5a4691a564088eede71f9
346 346 Y c922c0139ca03858f655e4a2af4dd02796a63969
347 347 Z 9b140be1080824d768c5a4691a564088eede71f9
348 348 foo 0000000000000000000000000000000000000000
349 349 foobar 9b140be1080824d768c5a4691a564088eede71f9
350 350 $ hg out -B http://localhost:$HGPORT/
351 351 comparing with http://localhost:$HGPORT/
352 352 searching for changed bookmarks
353 353 no changed bookmarks found
354 354 [1]
355 355 $ hg push -B Z http://localhost:$HGPORT/
356 356 pushing to http://localhost:$HGPORT/
357 357 searching for changes
358 358 no changes found
359 exporting bookmark Z
359 updating bookmark Z
360 360 [1]
361 361 $ hg book -d Z
362 362 $ hg in -B http://localhost:$HGPORT/
363 363 comparing with http://localhost:$HGPORT/
364 364 searching for changed bookmarks
365 365 Z 0d2164f0ce0d
366 366 foo 000000000000
367 367 foobar 9b140be10808
368 368 $ hg pull -B Z http://localhost:$HGPORT/
369 369 pulling from http://localhost:$HGPORT/
370 370 no changes found
371 371 divergent bookmark @ stored as @1
372 372 divergent bookmark X stored as X@1
373 373 adding remote bookmark Z
374 374 adding remote bookmark foo
375 375 adding remote bookmark foobar
376 376 importing bookmark Z
377 377 $ hg clone http://localhost:$HGPORT/ cloned-bookmarks
378 378 requesting all changes
379 379 adding changesets
380 380 adding manifests
381 381 adding file changes
382 382 added 5 changesets with 5 changes to 3 files (+2 heads)
383 383 updating to bookmark @
384 384 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
385 385 $ hg -R cloned-bookmarks bookmarks
386 386 * @ 1:9b140be10808
387 387 X 1:9b140be10808
388 388 Y 4:c922c0139ca0
389 389 Z 2:0d2164f0ce0d
390 390 foo -1:000000000000
391 391 foobar 1:9b140be10808
392 392
393 393 $ cd ..
394 394
395 395 Pushing a bookmark should only push the changes required by that
396 396 bookmark, not all outgoing changes:
397 397 $ hg clone http://localhost:$HGPORT/ addmarks
398 398 requesting all changes
399 399 adding changesets
400 400 adding manifests
401 401 adding file changes
402 402 added 5 changesets with 5 changes to 3 files (+2 heads)
403 403 updating to bookmark @
404 404 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
405 405 $ cd addmarks
406 406 $ echo foo > foo
407 407 $ hg add foo
408 408 $ hg commit -m 'add foo'
409 409 $ echo bar > bar
410 410 $ hg add bar
411 411 $ hg commit -m 'add bar'
412 412 $ hg co "tip^"
413 413 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
414 414 (leaving bookmark @)
415 415 $ hg book add-foo
416 416 $ hg book -r tip add-bar
417 417 Note: this push *must* push only a single changeset, as that's the point
418 418 of this test.
419 419 $ hg push -B add-foo --traceback
420 420 pushing to http://localhost:$HGPORT/
421 421 searching for changes
422 422 remote: adding changesets
423 423 remote: adding manifests
424 424 remote: adding file changes
425 425 remote: added 1 changesets with 1 changes to 1 files
426 426 exporting bookmark add-foo
427 427
428 428 pushing a new bookmark on a new head does not require -f if -B is specified
429 429
430 430 $ hg up -q X
431 431 $ hg book W
432 432 $ echo c5 > f2
433 433 $ hg ci -Am5
434 434 created new head
435 435 $ hg push -B W
436 436 pushing to http://localhost:$HGPORT/
437 437 searching for changes
438 438 remote: adding changesets
439 439 remote: adding manifests
440 440 remote: adding file changes
441 441 remote: added 1 changesets with 1 changes to 1 files (+1 heads)
442 442 exporting bookmark W
443 443 $ hg -R ../b id -r W
444 444 cc978a373a53 tip W
445 445
446 446 $ cd ..
@@ -1,1198 +1,1195
1 1
2 2 $ getmainid() {
3 3 > hg -R main log --template '{node}\n' --rev "$1"
4 4 > }
5 5
6 6 Create an extension to test bundle2 API
7 7
8 8 $ cat > bundle2.py << EOF
9 9 > """A small extension to test bundle2 implementation
10 10 >
11 11 > Current bundle2 implementation is far too limited to be used in any core
12 12 > code. We still need to be able to test it while it grow up.
13 13 > """
14 14 >
15 15 > import sys, os
16 16 > from mercurial import cmdutil
17 17 > from mercurial import util
18 18 > from mercurial import bundle2
19 19 > from mercurial import scmutil
20 20 > from mercurial import discovery
21 21 > from mercurial import changegroup
22 22 > from mercurial import error
23 23 > from mercurial import obsolete
24 24 >
25 25 > obsolete._enabled = True
26 26 >
27 27 > try:
28 28 > import msvcrt
29 29 > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
30 30 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
31 31 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
32 32 > except ImportError:
33 33 > pass
34 34 >
35 35 > cmdtable = {}
36 36 > command = cmdutil.command(cmdtable)
37 37 >
38 38 > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
39 39 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
40 40 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
41 41 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
42 42 >
43 43 > @bundle2.parthandler('test:song')
44 44 > def songhandler(op, part):
45 45 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
46 46 > op.ui.write('The choir starts singing:\n')
47 47 > verses = 0
48 48 > for line in part.read().split('\n'):
49 49 > op.ui.write(' %s\n' % line)
50 50 > verses += 1
51 51 > op.records.add('song', {'verses': verses})
52 52 >
53 53 > @bundle2.parthandler('test:ping')
54 54 > def pinghandler(op, part):
55 55 > op.ui.write('received ping request (id %i)\n' % part.id)
56 56 > if op.reply is not None and 'ping-pong' in op.reply.capabilities:
57 57 > op.ui.write_err('replying to ping request (id %i)\n' % part.id)
58 58 > op.reply.newpart('test:pong', [('in-reply-to', str(part.id))])
59 59 >
60 60 > @bundle2.parthandler('test:debugreply')
61 61 > def debugreply(op, part):
62 62 > """print data about the capacity of the bundle reply"""
63 63 > if op.reply is None:
64 64 > op.ui.write('debugreply: no reply\n')
65 65 > else:
66 66 > op.ui.write('debugreply: capabilities:\n')
67 67 > for cap in sorted(op.reply.capabilities):
68 68 > op.ui.write('debugreply: %r\n' % cap)
69 69 > for val in op.reply.capabilities[cap]:
70 70 > op.ui.write('debugreply: %r\n' % val)
71 71 >
72 72 > @command('bundle2',
73 73 > [('', 'param', [], 'stream level parameter'),
74 74 > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
75 75 > ('', 'unknownparams', False, 'include an unknown part parameters in the bundle'),
76 76 > ('', 'parts', False, 'include some arbitrary parts to the bundle'),
77 77 > ('', 'reply', False, 'produce a reply bundle'),
78 78 > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
79 79 > ('r', 'rev', [], 'includes those changeset in the bundle'),],
80 80 > '[OUTPUTFILE]')
81 81 > def cmdbundle2(ui, repo, path=None, **opts):
82 82 > """write a bundle2 container on standard ouput"""
83 83 > bundler = bundle2.bundle20(ui)
84 84 > for p in opts['param']:
85 85 > p = p.split('=', 1)
86 86 > try:
87 87 > bundler.addparam(*p)
88 88 > except ValueError, exc:
89 89 > raise util.Abort('%s' % exc)
90 90 >
91 91 > if opts['reply']:
92 92 > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
93 93 > bundler.newpart('b2x:replycaps', data=capsstring)
94 94 >
95 95 > if opts['pushrace']:
96 96 > # also serve to test the assignement of data outside of init
97 97 > part = bundler.newpart('b2x:check:heads')
98 98 > part.data = '01234567890123456789'
99 99 >
100 100 > revs = opts['rev']
101 101 > if 'rev' in opts:
102 102 > revs = scmutil.revrange(repo, opts['rev'])
103 103 > if revs:
104 104 > # very crude version of a changegroup part creation
105 105 > bundled = repo.revs('%ld::%ld', revs, revs)
106 106 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
107 107 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
108 108 > outgoing = discovery.outgoing(repo.changelog, headcommon, headmissing)
109 109 > cg = changegroup.getlocalchangegroup(repo, 'test:bundle2', outgoing, None)
110 110 > bundler.newpart('b2x:changegroup', data=cg.getchunks())
111 111 >
112 112 > if opts['parts']:
113 113 > bundler.newpart('test:empty')
114 114 > # add a second one to make sure we handle multiple parts
115 115 > bundler.newpart('test:empty')
116 116 > bundler.newpart('test:song', data=ELEPHANTSSONG)
117 117 > bundler.newpart('test:debugreply')
118 118 > mathpart = bundler.newpart('test:math')
119 119 > mathpart.addparam('pi', '3.14')
120 120 > mathpart.addparam('e', '2.72')
121 121 > mathpart.addparam('cooking', 'raw', mandatory=False)
122 122 > mathpart.data = '42'
123 123 > # advisory known part with unknown mandatory param
124 124 > bundler.newpart('test:song', [('randomparam','')])
125 125 > if opts['unknown']:
126 126 > bundler.newpart('test:UNKNOWN', data='some random content')
127 127 > if opts['unknownparams']:
128 128 > bundler.newpart('test:SONG', [('randomparams', '')])
129 129 > if opts['parts']:
130 130 > bundler.newpart('test:ping')
131 131 >
132 132 > if path is None:
133 133 > file = sys.stdout
134 134 > else:
135 135 > file = open(path, 'wb')
136 136 >
137 137 > for chunk in bundler.getchunks():
138 138 > file.write(chunk)
139 139 >
140 140 > @command('unbundle2', [], '')
141 141 > def cmdunbundle2(ui, repo, replypath=None):
142 142 > """process a bundle2 stream from stdin on the current repo"""
143 143 > try:
144 144 > tr = None
145 145 > lock = repo.lock()
146 146 > tr = repo.transaction('processbundle')
147 147 > try:
148 148 > unbundler = bundle2.unbundle20(ui, sys.stdin)
149 149 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
150 150 > tr.close()
151 151 > except error.BundleValueError, exc:
152 152 > raise util.Abort('missing support for %s' % exc)
153 153 > except error.PushRaced, exc:
154 154 > raise util.Abort('push race: %s' % exc)
155 155 > finally:
156 156 > if tr is not None:
157 157 > tr.release()
158 158 > lock.release()
159 159 > remains = sys.stdin.read()
160 160 > ui.write('%i unread bytes\n' % len(remains))
161 161 > if op.records['song']:
162 162 > totalverses = sum(r['verses'] for r in op.records['song'])
163 163 > ui.write('%i total verses sung\n' % totalverses)
164 164 > for rec in op.records['changegroup']:
165 165 > ui.write('addchangegroup return: %i\n' % rec['return'])
166 166 > if op.reply is not None and replypath is not None:
167 167 > file = open(replypath, 'wb')
168 168 > for chunk in op.reply.getchunks():
169 169 > file.write(chunk)
170 170 >
171 171 > @command('statbundle2', [], '')
172 172 > def cmdstatbundle2(ui, repo):
173 173 > """print statistic on the bundle2 container read from stdin"""
174 174 > unbundler = bundle2.unbundle20(ui, sys.stdin)
175 175 > try:
176 176 > params = unbundler.params
177 177 > except error.BundleValueError, exc:
178 178 > raise util.Abort('unknown parameters: %s' % exc)
179 179 > ui.write('options count: %i\n' % len(params))
180 180 > for key in sorted(params):
181 181 > ui.write('- %s\n' % key)
182 182 > value = params[key]
183 183 > if value is not None:
184 184 > ui.write(' %s\n' % value)
185 185 > count = 0
186 186 > for p in unbundler.iterparts():
187 187 > count += 1
188 188 > ui.write(' :%s:\n' % p.type)
189 189 > ui.write(' mandatory: %i\n' % len(p.mandatoryparams))
190 190 > ui.write(' advisory: %i\n' % len(p.advisoryparams))
191 191 > ui.write(' payload: %i bytes\n' % len(p.read()))
192 192 > ui.write('parts count: %i\n' % count)
193 193 > EOF
194 194 $ cat >> $HGRCPATH << EOF
195 195 > [extensions]
196 196 > bundle2=$TESTTMP/bundle2.py
197 197 > [experimental]
198 198 > bundle2-exp=True
199 199 > [ui]
200 200 > ssh=python "$TESTDIR/dummyssh"
201 201 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
202 202 > [web]
203 203 > push_ssl = false
204 204 > allow_push = *
205 205 > [phases]
206 206 > publish=False
207 207 > EOF
208 208
209 209 The extension requires a repo (currently unused)
210 210
211 211 $ hg init main
212 212 $ cd main
213 213 $ touch a
214 214 $ hg add a
215 215 $ hg commit -m 'a'
216 216
217 217
218 218 Empty bundle
219 219 =================
220 220
221 221 - no option
222 222 - no parts
223 223
224 224 Test bundling
225 225
226 226 $ hg bundle2
227 227 HG2X\x00\x00\x00\x00 (no-eol) (esc)
228 228
229 229 Test unbundling
230 230
231 231 $ hg bundle2 | hg statbundle2
232 232 options count: 0
233 233 parts count: 0
234 234
235 235 Test old style bundle are detected and refused
236 236
237 237 $ hg bundle --all ../bundle.hg
238 238 1 changesets found
239 239 $ hg statbundle2 < ../bundle.hg
240 240 abort: unknown bundle version 10
241 241 [255]
242 242
243 243 Test parameters
244 244 =================
245 245
246 246 - some options
247 247 - no parts
248 248
249 249 advisory parameters, no value
250 250 -------------------------------
251 251
252 252 Simplest possible parameters form
253 253
254 254 Test generation simple option
255 255
256 256 $ hg bundle2 --param 'caution'
257 257 HG2X\x00\x07caution\x00\x00 (no-eol) (esc)
258 258
259 259 Test unbundling
260 260
261 261 $ hg bundle2 --param 'caution' | hg statbundle2
262 262 options count: 1
263 263 - caution
264 264 parts count: 0
265 265
266 266 Test generation multiple option
267 267
268 268 $ hg bundle2 --param 'caution' --param 'meal'
269 269 HG2X\x00\x0ccaution meal\x00\x00 (no-eol) (esc)
270 270
271 271 Test unbundling
272 272
273 273 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
274 274 options count: 2
275 275 - caution
276 276 - meal
277 277 parts count: 0
278 278
279 279 advisory parameters, with value
280 280 -------------------------------
281 281
282 282 Test generation
283 283
284 284 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants'
285 285 HG2X\x00\x1ccaution meal=vegan elephants\x00\x00 (no-eol) (esc)
286 286
287 287 Test unbundling
288 288
289 289 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
290 290 options count: 3
291 291 - caution
292 292 - elephants
293 293 - meal
294 294 vegan
295 295 parts count: 0
296 296
297 297 parameter with special char in value
298 298 ---------------------------------------------------
299 299
300 300 Test generation
301 301
302 302 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple
303 303 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
304 304
305 305 Test unbundling
306 306
307 307 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
308 308 options count: 2
309 309 - e|! 7/
310 310 babar%#==tutu
311 311 - simple
312 312 parts count: 0
313 313
314 314 Test unknown mandatory option
315 315 ---------------------------------------------------
316 316
317 317 $ hg bundle2 --param 'Gravity' | hg statbundle2
318 318 abort: unknown parameters: Stream Parameter - Gravity
319 319 [255]
320 320
321 321 Test debug output
322 322 ---------------------------------------------------
323 323
324 324 bundling debug
325 325
326 326 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2
327 327 start emission of HG2X stream
328 328 bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
329 329 start of parts
330 330 end of bundle
331 331
332 332 file content is ok
333 333
334 334 $ cat ../out.hg2
335 335 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
336 336
337 337 unbundling debug
338 338
339 339 $ hg statbundle2 --debug < ../out.hg2
340 340 start processing of HG2X stream
341 341 reading bundle2 stream parameters
342 342 ignoring unknown parameter 'e|! 7/'
343 343 ignoring unknown parameter 'simple'
344 344 options count: 2
345 345 - e|! 7/
346 346 babar%#==tutu
347 347 - simple
348 348 start extraction of bundle2 parts
349 349 part header size: 0
350 350 end of bundle2 stream
351 351 parts count: 0
352 352
353 353
354 354 Test buggy input
355 355 ---------------------------------------------------
356 356
357 357 empty parameter name
358 358
359 359 $ hg bundle2 --param '' --quiet
360 360 abort: empty parameter name
361 361 [255]
362 362
363 363 bad parameter name
364 364
365 365 $ hg bundle2 --param 42babar
366 366 abort: non letter first character: '42babar'
367 367 [255]
368 368
369 369
370 370 Test part
371 371 =================
372 372
373 373 $ hg bundle2 --parts ../parts.hg2 --debug
374 374 start emission of HG2X stream
375 375 bundle parameter:
376 376 start of parts
377 377 bundle part: "test:empty"
378 378 bundle part: "test:empty"
379 379 bundle part: "test:song"
380 380 bundle part: "test:debugreply"
381 381 bundle part: "test:math"
382 382 bundle part: "test:song"
383 383 bundle part: "test:ping"
384 384 end of bundle
385 385
386 386 $ cat ../parts.hg2
387 387 HG2X\x00\x00\x00\x11 (esc)
388 388 test:empty\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
389 389 test:empty\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x10 test:song\x00\x00\x00\x02\x00\x00\x00\x00\x00\xb2Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko (esc)
390 390 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
391 391 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.\x00\x00\x00\x00\x00\x16\x0ftest:debugreply\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00+ test:math\x00\x00\x00\x04\x02\x01\x02\x04\x01\x04\x07\x03pi3.14e2.72cookingraw\x00\x00\x00\x0242\x00\x00\x00\x00\x00\x1d test:song\x00\x00\x00\x05\x01\x00\x0b\x00randomparam\x00\x00\x00\x00\x00\x10 test:ping\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
392 392
393 393
394 394 $ hg statbundle2 < ../parts.hg2
395 395 options count: 0
396 396 :test:empty:
397 397 mandatory: 0
398 398 advisory: 0
399 399 payload: 0 bytes
400 400 :test:empty:
401 401 mandatory: 0
402 402 advisory: 0
403 403 payload: 0 bytes
404 404 :test:song:
405 405 mandatory: 0
406 406 advisory: 0
407 407 payload: 178 bytes
408 408 :test:debugreply:
409 409 mandatory: 0
410 410 advisory: 0
411 411 payload: 0 bytes
412 412 :test:math:
413 413 mandatory: 2
414 414 advisory: 1
415 415 payload: 2 bytes
416 416 :test:song:
417 417 mandatory: 1
418 418 advisory: 0
419 419 payload: 0 bytes
420 420 :test:ping:
421 421 mandatory: 0
422 422 advisory: 0
423 423 payload: 0 bytes
424 424 parts count: 7
425 425
426 426 $ hg statbundle2 --debug < ../parts.hg2
427 427 start processing of HG2X stream
428 428 reading bundle2 stream parameters
429 429 options count: 0
430 430 start extraction of bundle2 parts
431 431 part header size: 17
432 432 part type: "test:empty"
433 433 part id: "0"
434 434 part parameters: 0
435 435 :test:empty:
436 436 mandatory: 0
437 437 advisory: 0
438 438 payload chunk size: 0
439 439 payload: 0 bytes
440 440 part header size: 17
441 441 part type: "test:empty"
442 442 part id: "1"
443 443 part parameters: 0
444 444 :test:empty:
445 445 mandatory: 0
446 446 advisory: 0
447 447 payload chunk size: 0
448 448 payload: 0 bytes
449 449 part header size: 16
450 450 part type: "test:song"
451 451 part id: "2"
452 452 part parameters: 0
453 453 :test:song:
454 454 mandatory: 0
455 455 advisory: 0
456 456 payload chunk size: 178
457 457 payload chunk size: 0
458 458 payload: 178 bytes
459 459 part header size: 22
460 460 part type: "test:debugreply"
461 461 part id: "3"
462 462 part parameters: 0
463 463 :test:debugreply:
464 464 mandatory: 0
465 465 advisory: 0
466 466 payload chunk size: 0
467 467 payload: 0 bytes
468 468 part header size: 43
469 469 part type: "test:math"
470 470 part id: "4"
471 471 part parameters: 3
472 472 :test:math:
473 473 mandatory: 2
474 474 advisory: 1
475 475 payload chunk size: 2
476 476 payload chunk size: 0
477 477 payload: 2 bytes
478 478 part header size: 29
479 479 part type: "test:song"
480 480 part id: "5"
481 481 part parameters: 1
482 482 :test:song:
483 483 mandatory: 1
484 484 advisory: 0
485 485 payload chunk size: 0
486 486 payload: 0 bytes
487 487 part header size: 16
488 488 part type: "test:ping"
489 489 part id: "6"
490 490 part parameters: 0
491 491 :test:ping:
492 492 mandatory: 0
493 493 advisory: 0
494 494 payload chunk size: 0
495 495 payload: 0 bytes
496 496 part header size: 0
497 497 end of bundle2 stream
498 498 parts count: 7
499 499
500 500 Test actual unbundling of test part
501 501 =======================================
502 502
503 503 Process the bundle
504 504
505 505 $ hg unbundle2 --debug < ../parts.hg2
506 506 start processing of HG2X stream
507 507 reading bundle2 stream parameters
508 508 start extraction of bundle2 parts
509 509 part header size: 17
510 510 part type: "test:empty"
511 511 part id: "0"
512 512 part parameters: 0
513 513 ignoring unsupported advisory part test:empty
514 514 payload chunk size: 0
515 515 part header size: 17
516 516 part type: "test:empty"
517 517 part id: "1"
518 518 part parameters: 0
519 519 ignoring unsupported advisory part test:empty
520 520 payload chunk size: 0
521 521 part header size: 16
522 522 part type: "test:song"
523 523 part id: "2"
524 524 part parameters: 0
525 525 found a handler for part 'test:song'
526 526 The choir starts singing:
527 527 payload chunk size: 178
528 528 payload chunk size: 0
529 529 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
530 530 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
531 531 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
532 532 part header size: 22
533 533 part type: "test:debugreply"
534 534 part id: "3"
535 535 part parameters: 0
536 536 found a handler for part 'test:debugreply'
537 537 debugreply: no reply
538 538 payload chunk size: 0
539 539 part header size: 43
540 540 part type: "test:math"
541 541 part id: "4"
542 542 part parameters: 3
543 543 ignoring unsupported advisory part test:math
544 544 payload chunk size: 2
545 545 payload chunk size: 0
546 546 part header size: 29
547 547 part type: "test:song"
548 548 part id: "5"
549 549 part parameters: 1
550 550 found a handler for part 'test:song'
551 551 ignoring unsupported advisory part test:song - randomparam
552 552 payload chunk size: 0
553 553 part header size: 16
554 554 part type: "test:ping"
555 555 part id: "6"
556 556 part parameters: 0
557 557 found a handler for part 'test:ping'
558 558 received ping request (id 6)
559 559 payload chunk size: 0
560 560 part header size: 0
561 561 end of bundle2 stream
562 562 0 unread bytes
563 563 3 total verses sung
564 564
565 565 Unbundle with an unknown mandatory part
566 566 (should abort)
567 567
568 568 $ hg bundle2 --parts --unknown ../unknown.hg2
569 569
570 570 $ hg unbundle2 < ../unknown.hg2
571 571 The choir starts singing:
572 572 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
573 573 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
574 574 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
575 575 debugreply: no reply
576 576 0 unread bytes
577 577 abort: missing support for test:unknown
578 578 [255]
579 579
580 580 Unbundle with an unknown mandatory part parameters
581 581 (should abort)
582 582
583 583 $ hg bundle2 --unknownparams ../unknown.hg2
584 584
585 585 $ hg unbundle2 < ../unknown.hg2
586 586 0 unread bytes
587 587 abort: missing support for test:song - randomparams
588 588 [255]
589 589
590 590 unbundle with a reply
591 591
592 592 $ hg bundle2 --parts --reply ../parts-reply.hg2
593 593 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
594 594 0 unread bytes
595 595 3 total verses sung
596 596
597 597 The reply is a bundle
598 598
599 599 $ cat ../reply.hg2
600 600 HG2X\x00\x00\x00\x1f (esc)
601 601 b2x:output\x00\x00\x00\x00\x00\x01\x0b\x01in-reply-to3\x00\x00\x00\xd9The choir starts singing: (esc)
602 602 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
603 603 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
604 604 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
605 605 \x00\x00\x00\x00\x00\x1f (esc)
606 606 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to4\x00\x00\x00\xc9debugreply: capabilities: (esc)
607 607 debugreply: 'city=!'
608 608 debugreply: 'celeste,ville'
609 609 debugreply: 'elephants'
610 610 debugreply: 'babar'
611 611 debugreply: 'celeste'
612 612 debugreply: 'ping-pong'
613 613 \x00\x00\x00\x00\x00\x1e test:pong\x00\x00\x00\x02\x01\x00\x0b\x01in-reply-to7\x00\x00\x00\x00\x00\x1f (esc)
614 614 b2x:output\x00\x00\x00\x03\x00\x01\x0b\x01in-reply-to7\x00\x00\x00=received ping request (id 7) (esc)
615 615 replying to ping request (id 7)
616 616 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
617 617
618 618 The reply is valid
619 619
620 620 $ hg statbundle2 < ../reply.hg2
621 621 options count: 0
622 622 :b2x:output:
623 623 mandatory: 0
624 624 advisory: 1
625 625 payload: 217 bytes
626 626 :b2x:output:
627 627 mandatory: 0
628 628 advisory: 1
629 629 payload: 201 bytes
630 630 :test:pong:
631 631 mandatory: 1
632 632 advisory: 0
633 633 payload: 0 bytes
634 634 :b2x:output:
635 635 mandatory: 0
636 636 advisory: 1
637 637 payload: 61 bytes
638 638 parts count: 4
639 639
640 640 Unbundle the reply to get the output:
641 641
642 642 $ hg unbundle2 < ../reply.hg2
643 643 remote: The choir starts singing:
644 644 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
645 645 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
646 646 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
647 647 remote: debugreply: capabilities:
648 648 remote: debugreply: 'city=!'
649 649 remote: debugreply: 'celeste,ville'
650 650 remote: debugreply: 'elephants'
651 651 remote: debugreply: 'babar'
652 652 remote: debugreply: 'celeste'
653 653 remote: debugreply: 'ping-pong'
654 654 remote: received ping request (id 7)
655 655 remote: replying to ping request (id 7)
656 656 0 unread bytes
657 657
658 658 Test push race detection
659 659
660 660 $ hg bundle2 --pushrace ../part-race.hg2
661 661
662 662 $ hg unbundle2 < ../part-race.hg2
663 663 0 unread bytes
664 664 abort: push race: repository changed while pushing - please try again
665 665 [255]
666 666
667 667 Support for changegroup
668 668 ===================================
669 669
670 670 $ hg unbundle $TESTDIR/bundles/rebase.hg
671 671 adding changesets
672 672 adding manifests
673 673 adding file changes
674 674 added 8 changesets with 7 changes to 7 files (+3 heads)
675 675 (run 'hg heads' to see heads, 'hg merge' to merge)
676 676
677 677 $ hg log -G
678 678 o 8:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
679 679 |
680 680 | o 7:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
681 681 |/|
682 682 o | 6:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
683 683 | |
684 684 | o 5:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
685 685 |/
686 686 | o 4:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
687 687 | |
688 688 | o 3:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
689 689 | |
690 690 | o 2:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
691 691 |/
692 692 o 1:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
693 693
694 694 @ 0:3903775176ed draft test a
695 695
696 696
697 697 $ hg bundle2 --debug --rev '8+7+5+4' ../rev.hg2
698 698 4 changesets found
699 699 list of changesets:
700 700 32af7686d403cf45b5d95f2d70cebea587ac806a
701 701 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
702 702 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
703 703 02de42196ebee42ef284b6780a87cdc96e8eaab6
704 704 start emission of HG2X stream
705 705 bundle parameter:
706 706 start of parts
707 707 bundle part: "b2x:changegroup"
708 708 bundling: 1/4 changesets (25.00%)
709 709 bundling: 2/4 changesets (50.00%)
710 710 bundling: 3/4 changesets (75.00%)
711 711 bundling: 4/4 changesets (100.00%)
712 712 bundling: 1/4 manifests (25.00%)
713 713 bundling: 2/4 manifests (50.00%)
714 714 bundling: 3/4 manifests (75.00%)
715 715 bundling: 4/4 manifests (100.00%)
716 716 bundling: D 1/3 files (33.33%)
717 717 bundling: E 2/3 files (66.67%)
718 718 bundling: H 3/3 files (100.00%)
719 719 end of bundle
720 720
721 721 $ cat ../rev.hg2
722 722 HG2X\x00\x00\x00\x16\x0fb2x:changegroup\x00\x00\x00\x00\x00\x00\x00\x00\x06\x13\x00\x00\x00\xa42\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j_\xdd\xd9\x89W\xc8\xa5JMCm\xfe\x1d\xa9\xd8\x7f!\xa1\xb9{\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c (esc)
723 723 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02D (esc)
724 724 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01D\x00\x00\x00\xa4\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xcd\x01\x0b\x8c\xd9\x98\xf3\x98\x1aZ\x81\x15\xf9O\x8d\xa4\xabP`\x89\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)4dece9c826f69490507b98c6383a3009b295837d (esc)
725 725 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02E (esc)
726 726 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01E\x00\x00\x00\xa2\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)365b93d57fdf4814e2b5911d6bacff2b12014441 (esc)
727 727 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x00\x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01G\x00\x00\x00\xa4\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
728 728 \x87\xcd\xc9n\x8e\xaa\xb6$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
729 729 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)8bee48edc7318541fc0013ee41b089276a8c24bf (esc)
730 730 \x00\x00\x00f\x00\x00\x00f\x00\x00\x00\x02H (esc)
731 731 \x00\x00\x00g\x00\x00\x00h\x00\x00\x00\x01H\x00\x00\x00\x00\x00\x00\x00\x8bn\x1fLG\xec\xb53\xff\xd0\xc8\xe5,\xdc\x88\xaf\xb6\xcd9\xe2\x0cf\xa5\xa0\x18\x17\xfd\xf5#\x9c'8\x02\xb5\xb7a\x8d\x05\x1c\x89\xe4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+D\x00c3f1ca2924c16a19b0656a84900e504e5b0aec2d (esc)
732 732 \x00\x00\x00\x8bM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\x00}\x8c\x9d\x88\x84\x13%\xf5\xc6\xb0cq\xb3[N\x8a+\x1a\x83\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00+\x00\x00\x00\xac\x00\x00\x00+E\x009c6fd0350a6c0d0c49d4a9c5017cf07043f54e58 (esc)
733 733 \x00\x00\x00\x8b6[\x93\xd5\x7f\xdfH\x14\xe2\xb5\x91\x1dk\xac\xff+\x12\x01DA(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xceM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00V\x00\x00\x00V\x00\x00\x00+F\x0022bfcfd62a21a3287edbd4d656218d0f525ed76a (esc)
734 734 \x00\x00\x00\x97\x8b\xeeH\xed\xc71\x85A\xfc\x00\x13\xeeA\xb0\x89'j\x8c$\xbf(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
735 735 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00+\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+H\x008500189e74a9e0475e822093bc7db0d631aeb0b4 (esc)
736 736 \x00\x00\x00\x00\x00\x00\x00\x05D\x00\x00\x00b\xc3\xf1\xca)$\xc1j\x19\xb0ej\x84\x90\x0ePN[ (esc)
737 737 \xec-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02D (esc)
738 738 \x00\x00\x00\x00\x00\x00\x00\x05E\x00\x00\x00b\x9co\xd05 (esc)
739 739 l\r (no-eol) (esc)
740 740 \x0cI\xd4\xa9\xc5\x01|\xf0pC\xf5NX\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02E (esc)
741 741 \x00\x00\x00\x00\x00\x00\x00\x05H\x00\x00\x00b\x85\x00\x18\x9et\xa9\xe0G^\x82 \x93\xbc}\xb0\xd61\xae\xb0\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
742 742 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02H (esc)
743 743 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
744 744
745 745 $ hg unbundle2 < ../rev.hg2
746 746 adding changesets
747 747 adding manifests
748 748 adding file changes
749 749 added 0 changesets with 0 changes to 3 files
750 750 0 unread bytes
751 751 addchangegroup return: 1
752 752
753 753 with reply
754 754
755 755 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
756 756 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
757 757 0 unread bytes
758 758 addchangegroup return: 1
759 759
760 760 $ cat ../rev-reply.hg2
761 761 HG2X\x00\x00\x003\x15b2x:reply:changegroup\x00\x00\x00\x00\x00\x02\x0b\x01\x06\x01in-reply-to1return1\x00\x00\x00\x00\x00\x1f (esc)
762 762 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to1\x00\x00\x00dadding changesets (esc)
763 763 adding manifests
764 764 adding file changes
765 765 added 0 changesets with 0 changes to 3 files
766 766 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
767 767
768 768 $ cd ..
769 769
770 770 Real world exchange
771 771 =====================
772 772
773 773 Add more obsolescence information
774 774
775 775 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
776 776 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
777 777
778 778 clone --pull
779 779
780 780 $ hg -R main phase --public cd010b8cd998
781 781 $ hg clone main other --pull --rev 9520eea781bc
782 782 adding changesets
783 783 adding manifests
784 784 adding file changes
785 785 added 2 changesets with 2 changes to 2 files
786 786 1 new obsolescence markers
787 787 updating to branch default
788 788 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
789 789 $ hg -R other log -G
790 790 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
791 791 |
792 792 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
793 793
794 794 $ hg -R other debugobsolete
795 795 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
796 796
797 797 pull
798 798
799 799 $ hg -R main phase --public 9520eea781bc
800 800 $ hg -R other pull -r 24b6387c8c8c
801 801 pulling from $TESTTMP/main (glob)
802 802 searching for changes
803 803 adding changesets
804 804 adding manifests
805 805 adding file changes
806 806 added 1 changesets with 1 changes to 1 files (+1 heads)
807 807 1 new obsolescence markers
808 808 (run 'hg heads' to see heads, 'hg merge' to merge)
809 809 $ hg -R other log -G
810 810 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
811 811 |
812 812 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
813 813 |/
814 814 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
815 815
816 816 $ hg -R other debugobsolete
817 817 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
818 818 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
819 819
820 820 pull empty (with phase movement)
821 821
822 822 $ hg -R main phase --public 24b6387c8c8c
823 823 $ hg -R other pull -r 24b6387c8c8c
824 824 pulling from $TESTTMP/main (glob)
825 825 no changes found
826 826 $ hg -R other log -G
827 827 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
828 828 |
829 829 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
830 830 |/
831 831 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
832 832
833 833 $ hg -R other debugobsolete
834 834 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
835 835 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
836 836
837 837 pull empty
838 838
839 839 $ hg -R other pull -r 24b6387c8c8c
840 840 pulling from $TESTTMP/main (glob)
841 841 no changes found
842 842 $ hg -R other log -G
843 843 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
844 844 |
845 845 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
846 846 |/
847 847 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
848 848
849 849 $ hg -R other debugobsolete
850 850 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
851 851 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
852 852
853 853 add extra data to test their exchange during push
854 854
855 855 $ hg -R main bookmark --rev eea13746799a book_eea1
856 856 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
857 857 $ hg -R main bookmark --rev 02de42196ebe book_02de
858 858 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
859 859 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
860 860 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
861 861 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
862 862 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
863 863 $ hg -R main bookmark --rev 32af7686d403 book_32af
864 864 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
865 865
866 866 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
867 867 $ hg -R other bookmark --rev cd010b8cd998 book_02de
868 868 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
869 869 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
870 870 $ hg -R other bookmark --rev cd010b8cd998 book_32af
871 871
872 872 $ hg -R main phase --public eea13746799a
873 873
874 874 push
875 875 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
876 876 pushing to other
877 877 searching for changes
878 878 remote: adding changesets
879 879 remote: adding manifests
880 880 remote: adding file changes
881 881 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
882 882 remote: 1 new obsolescence markers
883 883 updating bookmark book_eea1
884 exporting bookmark book_eea1
885 884 $ hg -R other log -G
886 885 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
887 886 |\
888 887 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
889 888 | |
890 889 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
891 890 |/
892 891 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
893 892
894 893 $ hg -R other debugobsolete
895 894 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
896 895 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
897 896 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
898 897
899 898 pull over ssh
900 899
901 900 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
902 901 pulling from ssh://user@dummy/main
903 902 searching for changes
904 903 adding changesets
905 904 adding manifests
906 905 adding file changes
907 906 added 1 changesets with 1 changes to 1 files (+1 heads)
908 907 1 new obsolescence markers
909 908 updating bookmark book_02de
910 909 importing bookmark book_02de
911 910 (run 'hg heads' to see heads, 'hg merge' to merge)
912 911 $ hg -R other debugobsolete
913 912 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
914 913 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
915 914 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
916 915 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
917 916
918 917 pull over http
919 918
920 919 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
921 920 $ cat main.pid >> $DAEMON_PIDS
922 921
923 922 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
924 923 pulling from http://localhost:$HGPORT/
925 924 searching for changes
926 925 adding changesets
927 926 adding manifests
928 927 adding file changes
929 928 added 1 changesets with 1 changes to 1 files (+1 heads)
930 929 1 new obsolescence markers
931 930 updating bookmark book_42cc
932 931 importing bookmark book_42cc
933 932 (run 'hg heads .' to see heads, 'hg merge' to merge)
934 933 $ cat main-error.log
935 934 $ hg -R other debugobsolete
936 935 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
937 936 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
938 937 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
939 938 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
940 939 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
941 940
942 941 push over ssh
943 942
944 943 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
945 944 pushing to ssh://user@dummy/other
946 945 searching for changes
947 946 remote: adding changesets
948 947 remote: adding manifests
949 948 remote: adding file changes
950 949 remote: added 1 changesets with 1 changes to 1 files
951 950 remote: 1 new obsolescence markers
952 951 updating bookmark book_5fdd
953 exporting bookmark book_5fdd
954 952 $ hg -R other log -G
955 953 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
956 954 |
957 955 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
958 956 |
959 957 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
960 958 | |
961 959 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
962 960 | |/|
963 961 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
964 962 |/ /
965 963 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
966 964 |/
967 965 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
968 966
969 967 $ hg -R other debugobsolete
970 968 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
971 969 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
972 970 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
973 971 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
974 972 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
975 973 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
976 974
977 975 push over http
978 976
979 977 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
980 978 $ cat other.pid >> $DAEMON_PIDS
981 979
982 980 $ hg -R main phase --public 32af7686d403
983 981 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
984 982 pushing to http://localhost:$HGPORT2/
985 983 searching for changes
986 984 remote: adding changesets
987 985 remote: adding manifests
988 986 remote: adding file changes
989 987 remote: added 1 changesets with 1 changes to 1 files
990 988 remote: 1 new obsolescence markers
991 989 updating bookmark book_32af
992 exporting bookmark book_32af
993 990 $ cat other-error.log
994 991
995 992 Check final content.
996 993
997 994 $ hg -R other log -G
998 995 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
999 996 |
1000 997 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
1001 998 |
1002 999 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
1003 1000 |
1004 1001 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
1005 1002 | |
1006 1003 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
1007 1004 | |/|
1008 1005 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
1009 1006 |/ /
1010 1007 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
1011 1008 |/
1012 1009 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
1013 1010
1014 1011 $ hg -R other debugobsolete
1015 1012 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1016 1013 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1017 1014 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1018 1015 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1019 1016 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1020 1017 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1021 1018 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1022 1019
1023 1020 Error Handling
1024 1021 ==============
1025 1022
1026 1023 Check that errors are properly returned to the client during push.
1027 1024
1028 1025 Setting up
1029 1026
1030 1027 $ cat > failpush.py << EOF
1031 1028 > """A small extension that makes push fails when using bundle2
1032 1029 >
1033 1030 > used to test error handling in bundle2
1034 1031 > """
1035 1032 >
1036 1033 > from mercurial import util
1037 1034 > from mercurial import bundle2
1038 1035 > from mercurial import exchange
1039 1036 > from mercurial import extensions
1040 1037 >
1041 1038 > def _pushbundle2failpart(pushop, bundler):
1042 1039 > reason = pushop.ui.config('failpush', 'reason', None)
1043 1040 > part = None
1044 1041 > if reason == 'abort':
1045 1042 > bundler.newpart('test:abort')
1046 1043 > if reason == 'unknown':
1047 1044 > bundler.newpart('TEST:UNKNOWN')
1048 1045 > if reason == 'race':
1049 1046 > # 20 Bytes of crap
1050 1047 > bundler.newpart('b2x:check:heads', data='01234567890123456789')
1051 1048 >
1052 1049 > @bundle2.parthandler("test:abort")
1053 1050 > def handleabort(op, part):
1054 1051 > raise util.Abort('Abandon ship!', hint="don't panic")
1055 1052 >
1056 1053 > def uisetup(ui):
1057 1054 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
1058 1055 > exchange.b2partsgenorder.insert(0, 'failpart')
1059 1056 >
1060 1057 > EOF
1061 1058
1062 1059 $ cd main
1063 1060 $ hg up tip
1064 1061 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
1065 1062 $ echo 'I' > I
1066 1063 $ hg add I
1067 1064 $ hg ci -m 'I'
1068 1065 $ hg id
1069 1066 e7ec4e813ba6 tip
1070 1067 $ cd ..
1071 1068
1072 1069 $ cat << EOF >> $HGRCPATH
1073 1070 > [extensions]
1074 1071 > failpush=$TESTTMP/failpush.py
1075 1072 > EOF
1076 1073
1077 1074 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
1078 1075 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
1079 1076 $ cat other.pid >> $DAEMON_PIDS
1080 1077
1081 1078 Doing the actual push: Abort error
1082 1079
1083 1080 $ cat << EOF >> $HGRCPATH
1084 1081 > [failpush]
1085 1082 > reason = abort
1086 1083 > EOF
1087 1084
1088 1085 $ hg -R main push other -r e7ec4e813ba6
1089 1086 pushing to other
1090 1087 searching for changes
1091 1088 abort: Abandon ship!
1092 1089 (don't panic)
1093 1090 [255]
1094 1091
1095 1092 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1096 1093 pushing to ssh://user@dummy/other
1097 1094 searching for changes
1098 1095 abort: Abandon ship!
1099 1096 (don't panic)
1100 1097 [255]
1101 1098
1102 1099 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1103 1100 pushing to http://localhost:$HGPORT2/
1104 1101 searching for changes
1105 1102 abort: Abandon ship!
1106 1103 (don't panic)
1107 1104 [255]
1108 1105
1109 1106
1110 1107 Doing the actual push: unknown mandatory parts
1111 1108
1112 1109 $ cat << EOF >> $HGRCPATH
1113 1110 > [failpush]
1114 1111 > reason = unknown
1115 1112 > EOF
1116 1113
1117 1114 $ hg -R main push other -r e7ec4e813ba6
1118 1115 pushing to other
1119 1116 searching for changes
1120 1117 abort: missing support for test:unknown
1121 1118 [255]
1122 1119
1123 1120 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1124 1121 pushing to ssh://user@dummy/other
1125 1122 searching for changes
1126 1123 abort: missing support for test:unknown
1127 1124 [255]
1128 1125
1129 1126 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1130 1127 pushing to http://localhost:$HGPORT2/
1131 1128 searching for changes
1132 1129 abort: missing support for test:unknown
1133 1130 [255]
1134 1131
1135 1132 Doing the actual push: race
1136 1133
1137 1134 $ cat << EOF >> $HGRCPATH
1138 1135 > [failpush]
1139 1136 > reason = race
1140 1137 > EOF
1141 1138
1142 1139 $ hg -R main push other -r e7ec4e813ba6
1143 1140 pushing to other
1144 1141 searching for changes
1145 1142 abort: push failed:
1146 1143 'repository changed while pushing - please try again'
1147 1144 [255]
1148 1145
1149 1146 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1150 1147 pushing to ssh://user@dummy/other
1151 1148 searching for changes
1152 1149 abort: push failed:
1153 1150 'repository changed while pushing - please try again'
1154 1151 [255]
1155 1152
1156 1153 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1157 1154 pushing to http://localhost:$HGPORT2/
1158 1155 searching for changes
1159 1156 abort: push failed:
1160 1157 'repository changed while pushing - please try again'
1161 1158 [255]
1162 1159
1163 1160 Doing the actual push: hook abort
1164 1161
1165 1162 $ cat << EOF >> $HGRCPATH
1166 1163 > [failpush]
1167 1164 > reason =
1168 1165 > [hooks]
1169 1166 > b2x-pretransactionclose.failpush = false
1170 1167 > EOF
1171 1168
1172 1169 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
1173 1170 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
1174 1171 $ cat other.pid >> $DAEMON_PIDS
1175 1172
1176 1173 $ hg -R main push other -r e7ec4e813ba6
1177 1174 pushing to other
1178 1175 searching for changes
1179 1176 transaction abort!
1180 1177 rollback completed
1181 1178 abort: b2x-pretransactionclose.failpush hook exited with status 1
1182 1179 [255]
1183 1180
1184 1181 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1185 1182 pushing to ssh://user@dummy/other
1186 1183 searching for changes
1187 1184 abort: b2x-pretransactionclose.failpush hook exited with status 1
1188 1185 remote: transaction abort!
1189 1186 remote: rollback completed
1190 1187 [255]
1191 1188
1192 1189 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1193 1190 pushing to http://localhost:$HGPORT2/
1194 1191 searching for changes
1195 1192 abort: b2x-pretransactionclose.failpush hook exited with status 1
1196 1193 [255]
1197 1194
1198 1195
@@ -1,646 +1,644
1 1 commit hooks can see env vars
2 2
3 3 $ hg init a
4 4 $ cd a
5 5 $ cat > .hg/hgrc <<EOF
6 6 > [hooks]
7 7 > commit = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" commit"
8 8 > commit.b = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" commit.b"
9 9 > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= python \"$TESTDIR/printenv.py\" precommit"
10 10 > pretxncommit = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" pretxncommit"
11 11 > pretxncommit.tip = hg -q tip
12 12 > pre-identify = python "$TESTDIR/printenv.py" pre-identify 1
13 13 > pre-cat = python "$TESTDIR/printenv.py" pre-cat
14 14 > post-cat = python "$TESTDIR/printenv.py" post-cat
15 15 > EOF
16 16 $ echo a > a
17 17 $ hg add a
18 18 $ hg commit -m a
19 19 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
20 20 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
21 21 0:cb9a9f314b8b
22 22 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
23 23 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
24 24
25 25 $ hg clone . ../b
26 26 updating to branch default
27 27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
28 28 $ cd ../b
29 29
30 30 changegroup hooks can see env vars
31 31
32 32 $ cat > .hg/hgrc <<EOF
33 33 > [hooks]
34 34 > prechangegroup = python "$TESTDIR/printenv.py" prechangegroup
35 35 > changegroup = python "$TESTDIR/printenv.py" changegroup
36 36 > incoming = python "$TESTDIR/printenv.py" incoming
37 37 > EOF
38 38
39 39 pretxncommit and commit hooks can see both parents of merge
40 40
41 41 $ cd ../a
42 42 $ echo b >> a
43 43 $ hg commit -m a1 -d "1 0"
44 44 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
45 45 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
46 46 1:ab228980c14d
47 47 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
48 48 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
49 49 $ hg update -C 0
50 50 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
51 51 $ echo b > b
52 52 $ hg add b
53 53 $ hg commit -m b -d '1 0'
54 54 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
55 55 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
56 56 2:ee9deb46ab31
57 57 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
58 58 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
59 59 created new head
60 60 $ hg merge 1
61 61 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
62 62 (branch merge, don't forget to commit)
63 63 $ hg commit -m merge -d '2 0'
64 64 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
65 65 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
66 66 3:07f3376c1e65
67 67 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
68 68 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
69 69
70 70 test generic hooks
71 71
72 72 $ hg id
73 73 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
74 74 abort: pre-identify hook exited with status 1
75 75 [255]
76 76 $ hg cat b
77 77 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
78 78 b
79 79 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
80 80
81 81 $ cd ../b
82 82 $ hg pull ../a
83 83 pulling from ../a
84 84 searching for changes
85 85 prechangegroup hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
86 86 adding changesets
87 87 adding manifests
88 88 adding file changes
89 89 added 3 changesets with 2 changes to 2 files
90 90 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
91 91 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
92 92 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
93 93 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
94 94 (run 'hg update' to get a working copy)
95 95
96 96 tag hooks can see env vars
97 97
98 98 $ cd ../a
99 99 $ cat >> .hg/hgrc <<EOF
100 100 > pretag = python "$TESTDIR/printenv.py" pretag
101 101 > tag = sh -c "HG_PARENT1= HG_PARENT2= python \"$TESTDIR/printenv.py\" tag"
102 102 > EOF
103 103 $ hg tag -d '3 0' a
104 104 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
105 105 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
106 106 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
107 107 4:539e4b31b6dc
108 108 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
109 109 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
110 110 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
111 111 $ hg tag -l la
112 112 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
113 113 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
114 114
115 115 pretag hook can forbid tagging
116 116
117 117 $ echo "pretag.forbid = python \"$TESTDIR/printenv.py\" pretag.forbid 1" >> .hg/hgrc
118 118 $ hg tag -d '4 0' fa
119 119 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
120 120 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
121 121 abort: pretag.forbid hook exited with status 1
122 122 [255]
123 123 $ hg tag -l fla
124 124 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
125 125 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
126 126 abort: pretag.forbid hook exited with status 1
127 127 [255]
128 128
129 129 pretxncommit hook can see changeset, can roll back txn, changeset no
130 130 more there after
131 131
132 132 $ echo "pretxncommit.forbid0 = hg tip -q" >> .hg/hgrc
133 133 $ echo "pretxncommit.forbid1 = python \"$TESTDIR/printenv.py\" pretxncommit.forbid 1" >> .hg/hgrc
134 134 $ echo z > z
135 135 $ hg add z
136 136 $ hg -q tip
137 137 4:539e4b31b6dc
138 138 $ hg commit -m 'fail' -d '4 0'
139 139 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
140 140 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
141 141 5:6f611f8018c1
142 142 5:6f611f8018c1
143 143 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
144 144 transaction abort!
145 145 rollback completed
146 146 abort: pretxncommit.forbid1 hook exited with status 1
147 147 [255]
148 148 $ hg -q tip
149 149 4:539e4b31b6dc
150 150
151 151 precommit hook can prevent commit
152 152
153 153 $ echo "precommit.forbid = python \"$TESTDIR/printenv.py\" precommit.forbid 1" >> .hg/hgrc
154 154 $ hg commit -m 'fail' -d '4 0'
155 155 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
156 156 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
157 157 abort: precommit.forbid hook exited with status 1
158 158 [255]
159 159 $ hg -q tip
160 160 4:539e4b31b6dc
161 161
162 162 preupdate hook can prevent update
163 163
164 164 $ echo "preupdate = python \"$TESTDIR/printenv.py\" preupdate" >> .hg/hgrc
165 165 $ hg update 1
166 166 preupdate hook: HG_PARENT1=ab228980c14d
167 167 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
168 168
169 169 update hook
170 170
171 171 $ echo "update = python \"$TESTDIR/printenv.py\" update" >> .hg/hgrc
172 172 $ hg update
173 173 preupdate hook: HG_PARENT1=539e4b31b6dc
174 174 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
175 175 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
176 176
177 177 pushkey hook
178 178
179 179 $ echo "pushkey = python \"$TESTDIR/printenv.py\" pushkey" >> .hg/hgrc
180 180 $ cd ../b
181 181 $ hg bookmark -r null foo
182 182 $ hg push -B foo ../a
183 183 pushing to ../a
184 184 searching for changes
185 185 no changes found
186 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
186 187 exporting bookmark foo
187 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
188 188 [1]
189 189 $ cd ../a
190 190
191 191 listkeys hook
192 192
193 193 $ echo "listkeys = python \"$TESTDIR/printenv.py\" listkeys" >> .hg/hgrc
194 194 $ hg bookmark -r null bar
195 195 $ cd ../b
196 196 $ hg pull -B bar ../a
197 197 pulling from ../a
198 198 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
199 199 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
200 200 no changes found
201 201 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
202 202 adding remote bookmark bar
203 203 importing bookmark bar
204 204 $ cd ../a
205 205
206 206 test that prepushkey can prevent incoming keys
207 207
208 208 $ echo "prepushkey = python \"$TESTDIR/printenv.py\" prepushkey.forbid 1" >> .hg/hgrc
209 209 $ cd ../b
210 210 $ hg bookmark -r null baz
211 211 $ hg push -B baz ../a
212 212 pushing to ../a
213 213 searching for changes
214 214 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
215 215 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
216 216 no changes found
217 217 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
218 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
219 exporting bookmark baz
220 218 prepushkey.forbid hook: HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000
221 219 abort: prepushkey hook exited with status 1
222 220 [255]
223 221 $ cd ../a
224 222
225 223 test that prelistkeys can prevent listing keys
226 224
227 225 $ echo "prelistkeys = python \"$TESTDIR/printenv.py\" prelistkeys.forbid 1" >> .hg/hgrc
228 226 $ hg bookmark -r null quux
229 227 $ cd ../b
230 228 $ hg pull -B quux ../a
231 229 pulling from ../a
232 230 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
233 231 abort: prelistkeys hook exited with status 1
234 232 [255]
235 233 $ cd ../a
236 234 $ rm .hg/hgrc
237 235
238 236 prechangegroup hook can prevent incoming changes
239 237
240 238 $ cd ../b
241 239 $ hg -q tip
242 240 3:07f3376c1e65
243 241 $ cat > .hg/hgrc <<EOF
244 242 > [hooks]
245 243 > prechangegroup.forbid = python "$TESTDIR/printenv.py" prechangegroup.forbid 1
246 244 > EOF
247 245 $ hg pull ../a
248 246 pulling from ../a
249 247 searching for changes
250 248 prechangegroup.forbid hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
251 249 abort: prechangegroup.forbid hook exited with status 1
252 250 [255]
253 251
254 252 pretxnchangegroup hook can see incoming changes, can roll back txn,
255 253 incoming changes no longer there after
256 254
257 255 $ cat > .hg/hgrc <<EOF
258 256 > [hooks]
259 257 > pretxnchangegroup.forbid0 = hg tip -q
260 258 > pretxnchangegroup.forbid1 = python "$TESTDIR/printenv.py" pretxnchangegroup.forbid 1
261 259 > EOF
262 260 $ hg pull ../a
263 261 pulling from ../a
264 262 searching for changes
265 263 adding changesets
266 264 adding manifests
267 265 adding file changes
268 266 added 1 changesets with 1 changes to 1 files
269 267 4:539e4b31b6dc
270 268 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_URL=file:$TESTTMP/a
271 269 transaction abort!
272 270 rollback completed
273 271 abort: pretxnchangegroup.forbid1 hook exited with status 1
274 272 [255]
275 273 $ hg -q tip
276 274 3:07f3376c1e65
277 275
278 276 outgoing hooks can see env vars
279 277
280 278 $ rm .hg/hgrc
281 279 $ cat > ../a/.hg/hgrc <<EOF
282 280 > [hooks]
283 281 > preoutgoing = python "$TESTDIR/printenv.py" preoutgoing
284 282 > outgoing = python "$TESTDIR/printenv.py" outgoing
285 283 > EOF
286 284 $ hg pull ../a
287 285 pulling from ../a
288 286 searching for changes
289 287 preoutgoing hook: HG_SOURCE=pull
290 288 adding changesets
291 289 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
292 290 adding manifests
293 291 adding file changes
294 292 added 1 changesets with 1 changes to 1 files
295 293 adding remote bookmark quux
296 294 (run 'hg update' to get a working copy)
297 295 $ hg rollback
298 296 repository tip rolled back to revision 3 (undo pull)
299 297
300 298 preoutgoing hook can prevent outgoing changes
301 299
302 300 $ echo "preoutgoing.forbid = python \"$TESTDIR/printenv.py\" preoutgoing.forbid 1" >> ../a/.hg/hgrc
303 301 $ hg pull ../a
304 302 pulling from ../a
305 303 searching for changes
306 304 preoutgoing hook: HG_SOURCE=pull
307 305 preoutgoing.forbid hook: HG_SOURCE=pull
308 306 abort: preoutgoing.forbid hook exited with status 1
309 307 [255]
310 308
311 309 outgoing hooks work for local clones
312 310
313 311 $ cd ..
314 312 $ cat > a/.hg/hgrc <<EOF
315 313 > [hooks]
316 314 > preoutgoing = python "$TESTDIR/printenv.py" preoutgoing
317 315 > outgoing = python "$TESTDIR/printenv.py" outgoing
318 316 > EOF
319 317 $ hg clone a c
320 318 preoutgoing hook: HG_SOURCE=clone
321 319 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
322 320 updating to branch default
323 321 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
324 322 $ rm -rf c
325 323
326 324 preoutgoing hook can prevent outgoing changes for local clones
327 325
328 326 $ echo "preoutgoing.forbid = python \"$TESTDIR/printenv.py\" preoutgoing.forbid 1" >> a/.hg/hgrc
329 327 $ hg clone a zzz
330 328 preoutgoing hook: HG_SOURCE=clone
331 329 preoutgoing.forbid hook: HG_SOURCE=clone
332 330 abort: preoutgoing.forbid hook exited with status 1
333 331 [255]
334 332
335 333 $ cd "$TESTTMP/b"
336 334
337 335 $ cat > hooktests.py <<EOF
338 336 > from mercurial import util
339 337 >
340 338 > uncallable = 0
341 339 >
342 340 > def printargs(args):
343 341 > args.pop('ui', None)
344 342 > args.pop('repo', None)
345 343 > a = list(args.items())
346 344 > a.sort()
347 345 > print 'hook args:'
348 346 > for k, v in a:
349 347 > print ' ', k, v
350 348 >
351 349 > def passhook(**args):
352 350 > printargs(args)
353 351 >
354 352 > def failhook(**args):
355 353 > printargs(args)
356 354 > return True
357 355 >
358 356 > class LocalException(Exception):
359 357 > pass
360 358 >
361 359 > def raisehook(**args):
362 360 > raise LocalException('exception from hook')
363 361 >
364 362 > def aborthook(**args):
365 363 > raise util.Abort('raise abort from hook')
366 364 >
367 365 > def brokenhook(**args):
368 366 > return 1 + {}
369 367 >
370 368 > def verbosehook(ui, **args):
371 369 > ui.note('verbose output from hook\n')
372 370 >
373 371 > def printtags(ui, repo, **args):
374 372 > print sorted(repo.tags())
375 373 >
376 374 > class container:
377 375 > unreachable = 1
378 376 > EOF
379 377
380 378 test python hooks
381 379
382 380 #if windows
383 381 $ PYTHONPATH="$TESTTMP/b;$PYTHONPATH"
384 382 #else
385 383 $ PYTHONPATH="$TESTTMP/b:$PYTHONPATH"
386 384 #endif
387 385 $ export PYTHONPATH
388 386
389 387 $ echo '[hooks]' > ../a/.hg/hgrc
390 388 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
391 389 $ hg pull ../a 2>&1 | grep 'raised an exception'
392 390 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
393 391
394 392 $ echo '[hooks]' > ../a/.hg/hgrc
395 393 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
396 394 $ hg pull ../a 2>&1 | grep 'raised an exception'
397 395 error: preoutgoing.raise hook raised an exception: exception from hook
398 396
399 397 $ echo '[hooks]' > ../a/.hg/hgrc
400 398 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
401 399 $ hg pull ../a
402 400 pulling from ../a
403 401 searching for changes
404 402 error: preoutgoing.abort hook failed: raise abort from hook
405 403 abort: raise abort from hook
406 404 [255]
407 405
408 406 $ echo '[hooks]' > ../a/.hg/hgrc
409 407 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
410 408 $ hg pull ../a
411 409 pulling from ../a
412 410 searching for changes
413 411 hook args:
414 412 hooktype preoutgoing
415 413 source pull
416 414 abort: preoutgoing.fail hook failed
417 415 [255]
418 416
419 417 $ echo '[hooks]' > ../a/.hg/hgrc
420 418 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
421 419 $ hg pull ../a
422 420 pulling from ../a
423 421 searching for changes
424 422 abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable)
425 423 [255]
426 424
427 425 $ echo '[hooks]' > ../a/.hg/hgrc
428 426 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
429 427 $ hg pull ../a
430 428 pulling from ../a
431 429 searching for changes
432 430 abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined)
433 431 [255]
434 432
435 433 $ echo '[hooks]' > ../a/.hg/hgrc
436 434 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
437 435 $ hg pull ../a
438 436 pulling from ../a
439 437 searching for changes
440 438 abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module)
441 439 [255]
442 440
443 441 $ echo '[hooks]' > ../a/.hg/hgrc
444 442 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
445 443 $ hg pull ../a
446 444 pulling from ../a
447 445 searching for changes
448 446 abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed)
449 447 [255]
450 448
451 449 $ echo '[hooks]' > ../a/.hg/hgrc
452 450 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
453 451 $ hg pull ../a
454 452 pulling from ../a
455 453 searching for changes
456 454 abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed)
457 455 [255]
458 456
459 457 $ echo '[hooks]' > ../a/.hg/hgrc
460 458 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
461 459 $ hg pull ../a
462 460 pulling from ../a
463 461 searching for changes
464 462 hook args:
465 463 hooktype preoutgoing
466 464 source pull
467 465 adding changesets
468 466 adding manifests
469 467 adding file changes
470 468 added 1 changesets with 1 changes to 1 files
471 469 adding remote bookmark quux
472 470 (run 'hg update' to get a working copy)
473 471
474 472 make sure --traceback works
475 473
476 474 $ echo '[hooks]' > .hg/hgrc
477 475 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
478 476
479 477 $ echo aa > a
480 478 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
481 479 Traceback (most recent call last):
482 480
483 481 $ cd ..
484 482 $ hg init c
485 483 $ cd c
486 484
487 485 $ cat > hookext.py <<EOF
488 486 > def autohook(**args):
489 487 > print "Automatically installed hook"
490 488 >
491 489 > def reposetup(ui, repo):
492 490 > repo.ui.setconfig("hooks", "commit.auto", autohook)
493 491 > EOF
494 492 $ echo '[extensions]' >> .hg/hgrc
495 493 $ echo 'hookext = hookext.py' >> .hg/hgrc
496 494
497 495 $ touch foo
498 496 $ hg add foo
499 497 $ hg ci -d '0 0' -m 'add foo'
500 498 Automatically installed hook
501 499 $ echo >> foo
502 500 $ hg ci --debug -d '0 0' -m 'change foo'
503 501 foo
504 502 calling hook commit.auto: hgext_hookext.autohook
505 503 Automatically installed hook
506 504 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
507 505
508 506 $ hg showconfig hooks
509 507 hooks.commit.auto=<function autohook at *> (glob)
510 508
511 509 test python hook configured with python:[file]:[hook] syntax
512 510
513 511 $ cd ..
514 512 $ mkdir d
515 513 $ cd d
516 514 $ hg init repo
517 515 $ mkdir hooks
518 516
519 517 $ cd hooks
520 518 $ cat > testhooks.py <<EOF
521 519 > def testhook(**args):
522 520 > print 'hook works'
523 521 > EOF
524 522 $ echo '[hooks]' > ../repo/.hg/hgrc
525 523 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
526 524
527 525 $ cd ../repo
528 526 $ hg commit -d '0 0'
529 527 hook works
530 528 nothing changed
531 529 [1]
532 530
533 531 $ echo '[hooks]' > .hg/hgrc
534 532 $ echo "update.ne = python:`pwd`/nonexistent.py:testhook" >> .hg/hgrc
535 533 $ echo "pre-identify.npmd = python:`pwd`/:no_python_module_dir" >> .hg/hgrc
536 534
537 535 $ hg up null
538 536 loading update.ne hook failed:
539 537 abort: No such file or directory: $TESTTMP/d/repo/nonexistent.py
540 538 [255]
541 539
542 540 $ hg id
543 541 loading pre-identify.npmd hook failed:
544 542 abort: No module named repo!
545 543 [255]
546 544
547 545 $ cd ../../b
548 546
549 547 make sure --traceback works on hook import failure
550 548
551 549 $ cat > importfail.py <<EOF
552 550 > import somebogusmodule
553 551 > # dereference something in the module to force demandimport to load it
554 552 > somebogusmodule.whatever
555 553 > EOF
556 554
557 555 $ echo '[hooks]' > .hg/hgrc
558 556 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
559 557
560 558 $ echo a >> a
561 559 $ hg --traceback commit -ma 2>&1 | egrep -v '^( +File| [a-zA-Z(])'
562 560 exception from first failed import attempt:
563 561 Traceback (most recent call last):
564 562 ImportError: No module named somebogusmodule
565 563 exception from second failed import attempt:
566 564 Traceback (most recent call last):
567 565 ImportError: No module named hgext_importfail
568 566 Traceback (most recent call last):
569 567 Abort: precommit.importfail hook is invalid (import of "importfail" failed)
570 568 abort: precommit.importfail hook is invalid (import of "importfail" failed)
571 569
572 570 Issue1827: Hooks Update & Commit not completely post operation
573 571
574 572 commit and update hooks should run after command completion
575 573
576 574 $ echo '[hooks]' > .hg/hgrc
577 575 $ echo 'commit = hg id' >> .hg/hgrc
578 576 $ echo 'update = hg id' >> .hg/hgrc
579 577 $ echo bb > a
580 578 $ hg ci -ma
581 579 223eafe2750c tip
582 580 $ hg up 0
583 581 cb9a9f314b8b
584 582 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
585 583
586 584 make sure --verbose (and --quiet/--debug etc.) are propagated to the local ui
587 585 that is passed to pre/post hooks
588 586
589 587 $ echo '[hooks]' > .hg/hgrc
590 588 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
591 589 $ hg id
592 590 cb9a9f314b8b
593 591 $ hg id --verbose
594 592 calling hook pre-identify: hooktests.verbosehook
595 593 verbose output from hook
596 594 cb9a9f314b8b
597 595
598 596 Ensure hooks can be prioritized
599 597
600 598 $ echo '[hooks]' > .hg/hgrc
601 599 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
602 600 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
603 601 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
604 602 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
605 603 $ hg id --verbose
606 604 calling hook pre-identify.b: hooktests.verbosehook
607 605 verbose output from hook
608 606 calling hook pre-identify.a: hooktests.verbosehook
609 607 verbose output from hook
610 608 calling hook pre-identify.c: hooktests.verbosehook
611 609 verbose output from hook
612 610 cb9a9f314b8b
613 611
614 612 new tags must be visible in pretxncommit (issue3210)
615 613
616 614 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
617 615 $ hg tag -f foo
618 616 ['a', 'foo', 'tip']
619 617
620 618 new commits must be visible in pretxnchangegroup (issue3428)
621 619
622 620 $ cd ..
623 621 $ hg init to
624 622 $ echo '[hooks]' >> to/.hg/hgrc
625 623 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
626 624 $ echo a >> to/a
627 625 $ hg --cwd to ci -Ama
628 626 adding a
629 627 $ hg clone to from
630 628 updating to branch default
631 629 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
632 630 $ echo aa >> from/a
633 631 $ hg --cwd from ci -mb
634 632 $ hg --cwd from push
635 633 pushing to $TESTTMP/to (glob)
636 634 searching for changes
637 635 adding changesets
638 636 adding manifests
639 637 adding file changes
640 638 added 1 changesets with 1 changes to 1 files
641 639 changeset: 1:9836a07b9b9d
642 640 tag: tip
643 641 user: test
644 642 date: Thu Jan 01 00:00:00 1970 +0000
645 643 summary: b
646 644
General Comments 0
You need to be logged in to leave comments. Login now